target-sparc: Tidy gen_trap_ifnofpu interface
[qemu.git] / target-sparc / translate.c
blob9787664a24003ad9b74d1d24332e1f502004216d
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x, int len)
113 len = 32 - len;
114 return (x << len) >> len;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
129 #if TCG_TARGET_REG_BITS == 32
130 if (src & 1) {
131 return TCGV_LOW(cpu_fpr[src / 2]);
132 } else {
133 return TCGV_HIGH(cpu_fpr[src / 2]);
135 #else
136 if (src & 1) {
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138 } else {
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
149 return ret;
151 #endif
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
156 #if TCG_TARGET_REG_BITS == 32
157 if (dst & 1) {
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159 } else {
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
162 #else
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
166 #endif
167 gen_update_fprs_dirty(dst);
170 static TCGv_i32 gen_dest_fpr_F(void)
172 return cpu_tmp32;
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
177 src = DFPREG(src);
178 return cpu_fpr[src / 2];
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
183 dst = DFPREG(dst);
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
188 static TCGv_i64 gen_dest_fpr_D(void)
190 return cpu_tmp64;
193 static void gen_op_load_fpr_QT0(unsigned int src)
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
201 static void gen_op_load_fpr_QT1(unsigned int src)
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
209 static void gen_op_store_QT0_fpr(unsigned int dst)
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
220 rd = QFPREG(rd);
221 rs = QFPREG(rs);
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
227 #endif
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
253 #ifdef TARGET_SPARC64
254 if (AM_CHECK(dc))
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
261 if (reg == 0)
262 tcg_gen_movi_tl(tn, 0);
263 else if (reg < 8)
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265 else {
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
272 if (reg == 0)
273 return;
274 else if (reg < 8)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
276 else {
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
284 TranslationBlock *tb;
286 tb = s->tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289 !s->singlestep) {
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295 } else {
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
299 tcg_gen_exit_tb(0);
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
334 TCGv r_temp;
335 TCGv_i32 r_const;
336 int l1;
338 l1 = gen_new_label();
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
350 gen_set_label(l1);
351 tcg_temp_free(r_temp);
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
356 int l1;
357 TCGv_i32 r_const;
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
366 gen_set_label(l1);
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
385 static TCGv_i32 gen_add32_carry32(void)
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
398 #endif
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
406 #endif
408 return carry_32;
411 static TCGv_i32 gen_sub32_carry32(void)
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
424 #endif
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
432 #endif
434 return carry_32;
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
440 TCGv_i32 carry_32;
441 TCGv carry;
443 switch (dc->cc_op) {
444 case CC_OP_DIV:
445 case CC_OP_LOGIC:
446 /* Carry is known to be zero. Fall back to plain ADD. */
447 if (update_cc) {
448 gen_op_add_cc(dst, src1, src2);
449 } else {
450 tcg_gen_add_tl(dst, src1, src2);
452 return;
454 case CC_OP_ADD:
455 case CC_OP_TADD:
456 case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
467 goto add_done;
469 #endif
470 carry_32 = gen_add32_carry32();
471 break;
473 case CC_OP_SUB:
474 case CC_OP_TSUB:
475 case CC_OP_TSUBTV:
476 carry_32 = gen_sub32_carry32();
477 break;
479 default:
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
483 break;
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490 carry = carry_32;
491 #endif
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
499 #endif
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 add_done:
503 #endif
504 if (update_cc) {
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
533 TCGv r_temp;
534 TCGv_i32 r_const;
535 int l1;
537 l1 = gen_new_label();
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
548 gen_set_label(l1);
549 tcg_temp_free(r_temp);
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
556 if (src2 == 0) {
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
560 } else {
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
579 TCGv_i32 carry_32;
580 TCGv carry;
582 switch (dc->cc_op) {
583 case CC_OP_DIV:
584 case CC_OP_LOGIC:
585 /* Carry is known to be zero. Fall back to plain SUB. */
586 if (update_cc) {
587 gen_op_sub_cc(dst, src1, src2);
588 } else {
589 tcg_gen_sub_tl(dst, src1, src2);
591 return;
593 case CC_OP_ADD:
594 case CC_OP_TADD:
595 case CC_OP_TADDTV:
596 carry_32 = gen_add32_carry32();
597 break;
599 case CC_OP_SUB:
600 case CC_OP_TSUB:
601 case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
612 goto sub_done;
614 #endif
615 carry_32 = gen_sub32_carry32();
616 break;
618 default:
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
622 break;
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629 carry = carry_32;
630 #endif
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
638 #endif
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641 sub_done:
642 #endif
643 if (update_cc) {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
672 TCGv r_temp;
673 int l1;
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
678 /* old op:
679 if (!(env->y & 1))
680 T1 = 0;
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
687 gen_set_label(l1);
689 // b2 = T0 & 1;
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
704 // T0 = (b1 << 31) | (T0 >> 1);
705 // src1 = T0;
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
729 if (sign_ext) {
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
732 } else {
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
746 tcg_temp_free_i64(r_temp2);
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
767 TCGv_i32 r_const;
768 int l1;
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
775 gen_set_label(l1);
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
780 int l1, l2;
781 TCGv r_temp1, r_temp2;
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
793 tcg_gen_br(l2);
794 gen_set_label(l1);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
796 gen_set_label(l2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
800 #endif
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
805 tcg_gen_movi_tl(dst, 1);
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
811 gen_mov_reg_Z(dst, src);
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
843 gen_mov_reg_C(dst, src);
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
849 gen_mov_reg_V(dst, src);
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
855 tcg_gen_movi_tl(dst, 0);
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
861 gen_mov_reg_N(dst, src);
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
922 FPSR bit field FCC1 | FCC0:
926 3 unordered
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1074 int l1;
1076 l1 = gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1082 gen_set_label(l1);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1089 int l1;
1091 l1 = gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1095 gen_goto_tb(dc, 0, pc2, pc1);
1097 gen_set_label(l1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102 TCGv r_cond)
1104 int l1, l2;
1106 l1 = gen_new_label();
1107 l2 = gen_new_label();
1109 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1111 tcg_gen_movi_tl(cpu_npc, npc1);
1112 tcg_gen_br(l2);
1114 gen_set_label(l1);
1115 tcg_gen_movi_tl(cpu_npc, npc2);
1116 gen_set_label(l2);
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc)
1123 if (dc->npc == JUMP_PC) {
1124 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
1125 dc->npc = DYNAMIC_PC;
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1131 if (dc->npc == JUMP_PC) {
1132 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133 dc->npc = DYNAMIC_PC;
1134 } else if (dc->npc != DYNAMIC_PC) {
1135 tcg_gen_movi_tl(cpu_npc, dc->npc);
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1141 tcg_gen_movi_tl(cpu_pc, dc->pc);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc->cc_op != CC_OP_FLAGS) {
1144 dc->cc_op = CC_OP_FLAGS;
1145 gen_helper_compute_psr(cpu_env);
1147 save_npc(dc, cond);
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1152 if (dc->npc == JUMP_PC) {
1153 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155 dc->pc = DYNAMIC_PC;
1156 } else if (dc->npc == DYNAMIC_PC) {
1157 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158 dc->pc = DYNAMIC_PC;
1159 } else {
1160 dc->pc = dc->npc;
1164 static inline void gen_op_next_insn(void)
1166 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171 DisasContext *dc)
1173 TCGv_i32 r_src;
1175 #ifdef TARGET_SPARC64
1176 if (cc)
1177 r_src = cpu_xcc;
1178 else
1179 r_src = cpu_psr;
1180 #else
1181 r_src = cpu_psr;
1182 #endif
1183 switch (dc->cc_op) {
1184 case CC_OP_FLAGS:
1185 break;
1186 default:
1187 gen_helper_compute_psr(cpu_env);
1188 dc->cc_op = CC_OP_FLAGS;
1189 break;
1191 switch (cond) {
1192 case 0x0:
1193 gen_op_eval_bn(r_dst);
1194 break;
1195 case 0x1:
1196 gen_op_eval_be(r_dst, r_src);
1197 break;
1198 case 0x2:
1199 gen_op_eval_ble(r_dst, r_src);
1200 break;
1201 case 0x3:
1202 gen_op_eval_bl(r_dst, r_src);
1203 break;
1204 case 0x4:
1205 gen_op_eval_bleu(r_dst, r_src);
1206 break;
1207 case 0x5:
1208 gen_op_eval_bcs(r_dst, r_src);
1209 break;
1210 case 0x6:
1211 gen_op_eval_bneg(r_dst, r_src);
1212 break;
1213 case 0x7:
1214 gen_op_eval_bvs(r_dst, r_src);
1215 break;
1216 case 0x8:
1217 gen_op_eval_ba(r_dst);
1218 break;
1219 case 0x9:
1220 gen_op_eval_bne(r_dst, r_src);
1221 break;
1222 case 0xa:
1223 gen_op_eval_bg(r_dst, r_src);
1224 break;
1225 case 0xb:
1226 gen_op_eval_bge(r_dst, r_src);
1227 break;
1228 case 0xc:
1229 gen_op_eval_bgu(r_dst, r_src);
1230 break;
1231 case 0xd:
1232 gen_op_eval_bcc(r_dst, r_src);
1233 break;
1234 case 0xe:
1235 gen_op_eval_bpos(r_dst, r_src);
1236 break;
1237 case 0xf:
1238 gen_op_eval_bvc(r_dst, r_src);
1239 break;
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1245 unsigned int offset;
1247 switch (cc) {
1248 default:
1249 case 0x0:
1250 offset = 0;
1251 break;
1252 case 0x1:
1253 offset = 32 - 10;
1254 break;
1255 case 0x2:
1256 offset = 34 - 10;
1257 break;
1258 case 0x3:
1259 offset = 36 - 10;
1260 break;
1263 switch (cond) {
1264 case 0x0:
1265 gen_op_eval_bn(r_dst);
1266 break;
1267 case 0x1:
1268 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269 break;
1270 case 0x2:
1271 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272 break;
1273 case 0x3:
1274 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275 break;
1276 case 0x4:
1277 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278 break;
1279 case 0x5:
1280 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281 break;
1282 case 0x6:
1283 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x7:
1286 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x8:
1289 gen_op_eval_ba(r_dst);
1290 break;
1291 case 0x9:
1292 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0xa:
1295 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0xb:
1298 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0xc:
1301 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0xd:
1304 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0xe:
1307 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xf:
1310 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311 break;
1315 #ifdef TARGET_SPARC64
1316 // Inverted logic
1317 static const int gen_tcg_cond_reg[8] = {
1319 TCG_COND_NE,
1320 TCG_COND_GT,
1321 TCG_COND_GE,
1323 TCG_COND_EQ,
1324 TCG_COND_LE,
1325 TCG_COND_LT,
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1330 int l1;
1332 l1 = gen_new_label();
1333 tcg_gen_movi_tl(r_dst, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335 tcg_gen_movi_tl(r_dst, 1);
1336 gen_set_label(l1);
1338 #endif
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1342 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1343 target_ulong target = dc->pc + offset;
1345 #ifdef TARGET_SPARC64
1346 if (unlikely(AM_CHECK(dc))) {
1347 target &= 0xffffffffULL;
1349 #endif
1350 if (cond == 0x0) {
1351 /* unconditional not taken */
1352 if (a) {
1353 dc->pc = dc->npc + 4;
1354 dc->npc = dc->pc + 4;
1355 } else {
1356 dc->pc = dc->npc;
1357 dc->npc = dc->pc + 4;
1359 } else if (cond == 0x8) {
1360 /* unconditional taken */
1361 if (a) {
1362 dc->pc = target;
1363 dc->npc = dc->pc + 4;
1364 } else {
1365 dc->pc = dc->npc;
1366 dc->npc = target;
1367 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1369 } else {
1370 flush_cond(dc);
1371 gen_cond(cpu_cond, cc, cond, dc);
1372 if (a) {
1373 gen_branch_a(dc, target, dc->npc, cpu_cond);
1374 dc->is_br = 1;
1375 } else {
1376 dc->pc = dc->npc;
1377 dc->jump_pc[0] = target;
1378 if (unlikely(dc->npc == DYNAMIC_PC)) {
1379 dc->jump_pc[1] = DYNAMIC_PC;
1380 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1381 } else {
1382 dc->jump_pc[1] = dc->npc + 4;
1383 dc->npc = JUMP_PC;
1389 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1391 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1392 target_ulong target = dc->pc + offset;
1394 #ifdef TARGET_SPARC64
1395 if (unlikely(AM_CHECK(dc))) {
1396 target &= 0xffffffffULL;
1398 #endif
1399 if (cond == 0x0) {
1400 /* unconditional not taken */
1401 if (a) {
1402 dc->pc = dc->npc + 4;
1403 dc->npc = dc->pc + 4;
1404 } else {
1405 dc->pc = dc->npc;
1406 dc->npc = dc->pc + 4;
1408 } else if (cond == 0x8) {
1409 /* unconditional taken */
1410 if (a) {
1411 dc->pc = target;
1412 dc->npc = dc->pc + 4;
1413 } else {
1414 dc->pc = dc->npc;
1415 dc->npc = target;
1416 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1418 } else {
1419 flush_cond(dc);
1420 gen_fcond(cpu_cond, cc, cond);
1421 if (a) {
1422 gen_branch_a(dc, target, dc->npc, cpu_cond);
1423 dc->is_br = 1;
1424 } else {
1425 dc->pc = dc->npc;
1426 dc->jump_pc[0] = target;
1427 if (unlikely(dc->npc == DYNAMIC_PC)) {
1428 dc->jump_pc[1] = DYNAMIC_PC;
1429 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1430 } else {
1431 dc->jump_pc[1] = dc->npc + 4;
1432 dc->npc = JUMP_PC;
1438 #ifdef TARGET_SPARC64
1439 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1440 TCGv r_reg)
1442 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1443 target_ulong target = dc->pc + offset;
1445 if (unlikely(AM_CHECK(dc))) {
1446 target &= 0xffffffffULL;
1448 flush_cond(dc);
1449 gen_cond_reg(cpu_cond, cond, r_reg);
1450 if (a) {
1451 gen_branch_a(dc, target, dc->npc, cpu_cond);
1452 dc->is_br = 1;
1453 } else {
1454 dc->pc = dc->npc;
1455 dc->jump_pc[0] = target;
1456 if (unlikely(dc->npc == DYNAMIC_PC)) {
1457 dc->jump_pc[1] = DYNAMIC_PC;
1458 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1459 } else {
1460 dc->jump_pc[1] = dc->npc + 4;
1461 dc->npc = JUMP_PC;
1466 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1468 switch (fccno) {
1469 case 0:
1470 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1471 break;
1472 case 1:
1473 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1474 break;
1475 case 2:
1476 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1477 break;
1478 case 3:
1479 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1480 break;
1484 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1486 switch (fccno) {
1487 case 0:
1488 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1489 break;
1490 case 1:
1491 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1492 break;
1493 case 2:
1494 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1495 break;
1496 case 3:
1497 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1498 break;
1502 static inline void gen_op_fcmpq(int fccno)
1504 switch (fccno) {
1505 case 0:
1506 gen_helper_fcmpq(cpu_env);
1507 break;
1508 case 1:
1509 gen_helper_fcmpq_fcc1(cpu_env);
1510 break;
1511 case 2:
1512 gen_helper_fcmpq_fcc2(cpu_env);
1513 break;
1514 case 3:
1515 gen_helper_fcmpq_fcc3(cpu_env);
1516 break;
1520 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1522 switch (fccno) {
1523 case 0:
1524 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1525 break;
1526 case 1:
1527 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1528 break;
1529 case 2:
1530 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1531 break;
1532 case 3:
1533 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1534 break;
1538 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1540 switch (fccno) {
1541 case 0:
1542 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1543 break;
1544 case 1:
1545 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1546 break;
1547 case 2:
1548 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1549 break;
1550 case 3:
1551 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1552 break;
1556 static inline void gen_op_fcmpeq(int fccno)
1558 switch (fccno) {
1559 case 0:
1560 gen_helper_fcmpeq(cpu_env);
1561 break;
1562 case 1:
1563 gen_helper_fcmpeq_fcc1(cpu_env);
1564 break;
1565 case 2:
1566 gen_helper_fcmpeq_fcc2(cpu_env);
1567 break;
1568 case 3:
1569 gen_helper_fcmpeq_fcc3(cpu_env);
1570 break;
1574 #else
1576 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1578 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1581 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1583 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1586 static inline void gen_op_fcmpq(int fccno)
1588 gen_helper_fcmpq(cpu_env);
1591 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1593 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1596 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1598 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1601 static inline void gen_op_fcmpeq(int fccno)
1603 gen_helper_fcmpeq(cpu_env);
1605 #endif
1607 static inline void gen_op_fpexception_im(int fsr_flags)
1609 TCGv_i32 r_const;
1611 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1612 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1613 r_const = tcg_const_i32(TT_FP_EXCP);
1614 gen_helper_raise_exception(cpu_env, r_const);
1615 tcg_temp_free_i32(r_const);
1618 static int gen_trap_ifnofpu(DisasContext *dc)
1620 #if !defined(CONFIG_USER_ONLY)
1621 if (!dc->fpu_enabled) {
1622 TCGv_i32 r_const;
1624 save_state(dc, cpu_cond);
1625 r_const = tcg_const_i32(TT_NFPU_INSN);
1626 gen_helper_raise_exception(cpu_env, r_const);
1627 tcg_temp_free_i32(r_const);
1628 dc->is_br = 1;
1629 return 1;
1631 #endif
1632 return 0;
1635 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1637 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1640 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1641 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1643 TCGv_i32 dst, src;
1645 src = gen_load_fpr_F(dc, rs);
1646 dst = gen_dest_fpr_F();
1648 gen(dst, cpu_env, src);
1650 gen_store_fpr_F(dc, rd, dst);
1653 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1654 void (*gen)(TCGv_i32, TCGv_i32))
1656 TCGv_i32 dst, src;
1658 src = gen_load_fpr_F(dc, rs);
1659 dst = gen_dest_fpr_F();
1661 gen(dst, src);
1663 gen_store_fpr_F(dc, rd, dst);
1666 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1667 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1669 TCGv_i32 dst, src1, src2;
1671 src1 = gen_load_fpr_F(dc, rs1);
1672 src2 = gen_load_fpr_F(dc, rs2);
1673 dst = gen_dest_fpr_F();
1675 gen(dst, cpu_env, src1, src2);
1677 gen_store_fpr_F(dc, rd, dst);
1680 #ifdef TARGET_SPARC64
1681 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1682 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1684 TCGv_i32 dst, src1, src2;
1686 src1 = gen_load_fpr_F(dc, rs1);
1687 src2 = gen_load_fpr_F(dc, rs2);
1688 dst = gen_dest_fpr_F();
1690 gen(dst, src1, src2);
1692 gen_store_fpr_F(dc, rd, dst);
1694 #endif
1696 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1697 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1699 TCGv_i64 dst, src;
1701 src = gen_load_fpr_D(dc, rs);
1702 dst = gen_dest_fpr_D();
1704 gen(dst, cpu_env, src);
1706 gen_store_fpr_D(dc, rd, dst);
1709 #ifdef TARGET_SPARC64
1710 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1711 void (*gen)(TCGv_i64, TCGv_i64))
1713 TCGv_i64 dst, src;
1715 src = gen_load_fpr_D(dc, rs);
1716 dst = gen_dest_fpr_D();
1718 gen(dst, src);
1720 gen_store_fpr_D(dc, rd, dst);
1722 #endif
1724 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1725 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1727 TCGv_i64 dst, src1, src2;
1729 src1 = gen_load_fpr_D(dc, rs1);
1730 src2 = gen_load_fpr_D(dc, rs2);
1731 dst = gen_dest_fpr_D();
1733 gen(dst, cpu_env, src1, src2);
1735 gen_store_fpr_D(dc, rd, dst);
1738 #ifdef TARGET_SPARC64
1739 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1740 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1742 TCGv_i64 dst, src1, src2;
1744 src1 = gen_load_fpr_D(dc, rs1);
1745 src2 = gen_load_fpr_D(dc, rs2);
1746 dst = gen_dest_fpr_D();
1748 gen(dst, src1, src2);
1750 gen_store_fpr_D(dc, rd, dst);
1753 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1754 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1756 TCGv_i64 dst, src1, src2;
1758 src1 = gen_load_fpr_D(dc, rs1);
1759 src2 = gen_load_fpr_D(dc, rs2);
1760 dst = gen_dest_fpr_D();
1762 gen(dst, cpu_gsr, src1, src2);
1764 gen_store_fpr_D(dc, rd, dst);
1767 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1768 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1770 TCGv_i64 dst, src0, src1, src2;
1772 src1 = gen_load_fpr_D(dc, rs1);
1773 src2 = gen_load_fpr_D(dc, rs2);
1774 src0 = gen_load_fpr_D(dc, rd);
1775 dst = gen_dest_fpr_D();
1777 gen(dst, src0, src1, src2);
1779 gen_store_fpr_D(dc, rd, dst);
1781 #endif
1783 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1784 void (*gen)(TCGv_ptr))
1786 gen_op_load_fpr_QT1(QFPREG(rs));
1788 gen(cpu_env);
1790 gen_op_store_QT0_fpr(QFPREG(rd));
1791 gen_update_fprs_dirty(QFPREG(rd));
1794 #ifdef TARGET_SPARC64
1795 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1796 void (*gen)(TCGv_ptr))
1798 gen_op_load_fpr_QT1(QFPREG(rs));
1800 gen(cpu_env);
1802 gen_op_store_QT0_fpr(QFPREG(rd));
1803 gen_update_fprs_dirty(QFPREG(rd));
1805 #endif
1807 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1808 void (*gen)(TCGv_ptr))
1810 gen_op_load_fpr_QT0(QFPREG(rs1));
1811 gen_op_load_fpr_QT1(QFPREG(rs2));
1813 gen(cpu_env);
1815 gen_op_store_QT0_fpr(QFPREG(rd));
1816 gen_update_fprs_dirty(QFPREG(rd));
1819 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1820 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1822 TCGv_i64 dst;
1823 TCGv_i32 src1, src2;
1825 src1 = gen_load_fpr_F(dc, rs1);
1826 src2 = gen_load_fpr_F(dc, rs2);
1827 dst = gen_dest_fpr_D();
1829 gen(dst, cpu_env, src1, src2);
1831 gen_store_fpr_D(dc, rd, dst);
1834 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1835 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1837 TCGv_i64 src1, src2;
1839 src1 = gen_load_fpr_D(dc, rs1);
1840 src2 = gen_load_fpr_D(dc, rs2);
1842 gen(cpu_env, src1, src2);
1844 gen_op_store_QT0_fpr(QFPREG(rd));
1845 gen_update_fprs_dirty(QFPREG(rd));
1848 #ifdef TARGET_SPARC64
1849 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1850 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1852 TCGv_i64 dst;
1853 TCGv_i32 src;
1855 src = gen_load_fpr_F(dc, rs);
1856 dst = gen_dest_fpr_D();
1858 gen(dst, cpu_env, src);
1860 gen_store_fpr_D(dc, rd, dst);
1862 #endif
1864 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1865 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1867 TCGv_i64 dst;
1868 TCGv_i32 src;
1870 src = gen_load_fpr_F(dc, rs);
1871 dst = gen_dest_fpr_D();
1873 gen(dst, cpu_env, src);
1875 gen_store_fpr_D(dc, rd, dst);
1878 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1879 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1881 TCGv_i32 dst;
1882 TCGv_i64 src;
1884 src = gen_load_fpr_D(dc, rs);
1885 dst = gen_dest_fpr_F();
1887 gen(dst, cpu_env, src);
1889 gen_store_fpr_F(dc, rd, dst);
1892 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1893 void (*gen)(TCGv_i32, TCGv_ptr))
1895 TCGv_i32 dst;
1897 gen_op_load_fpr_QT1(QFPREG(rs));
1898 dst = gen_dest_fpr_F();
1900 gen(dst, cpu_env);
1902 gen_store_fpr_F(dc, rd, dst);
1905 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1906 void (*gen)(TCGv_i64, TCGv_ptr))
1908 TCGv_i64 dst;
1910 gen_op_load_fpr_QT1(QFPREG(rs));
1911 dst = gen_dest_fpr_D();
1913 gen(dst, cpu_env);
1915 gen_store_fpr_D(dc, rd, dst);
1918 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1919 void (*gen)(TCGv_ptr, TCGv_i32))
1921 TCGv_i32 src;
1923 src = gen_load_fpr_F(dc, rs);
1925 gen(cpu_env, src);
1927 gen_op_store_QT0_fpr(QFPREG(rd));
1928 gen_update_fprs_dirty(QFPREG(rd));
1931 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1932 void (*gen)(TCGv_ptr, TCGv_i64))
1934 TCGv_i64 src;
1936 src = gen_load_fpr_D(dc, rs);
1938 gen(cpu_env, src);
1940 gen_op_store_QT0_fpr(QFPREG(rd));
1941 gen_update_fprs_dirty(QFPREG(rd));
1944 /* asi moves */
1945 #ifdef TARGET_SPARC64
1946 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1948 int asi;
1949 TCGv_i32 r_asi;
1951 if (IS_IMM) {
1952 r_asi = tcg_temp_new_i32();
1953 tcg_gen_mov_i32(r_asi, cpu_asi);
1954 } else {
1955 asi = GET_FIELD(insn, 19, 26);
1956 r_asi = tcg_const_i32(asi);
1958 return r_asi;
1961 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1962 int sign)
1964 TCGv_i32 r_asi, r_size, r_sign;
1966 r_asi = gen_get_asi(insn, addr);
1967 r_size = tcg_const_i32(size);
1968 r_sign = tcg_const_i32(sign);
1969 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1970 tcg_temp_free_i32(r_sign);
1971 tcg_temp_free_i32(r_size);
1972 tcg_temp_free_i32(r_asi);
1975 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1977 TCGv_i32 r_asi, r_size;
1979 r_asi = gen_get_asi(insn, addr);
1980 r_size = tcg_const_i32(size);
1981 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1982 tcg_temp_free_i32(r_size);
1983 tcg_temp_free_i32(r_asi);
1986 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1988 TCGv_i32 r_asi, r_size, r_rd;
1990 r_asi = gen_get_asi(insn, addr);
1991 r_size = tcg_const_i32(size);
1992 r_rd = tcg_const_i32(rd);
1993 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1994 tcg_temp_free_i32(r_rd);
1995 tcg_temp_free_i32(r_size);
1996 tcg_temp_free_i32(r_asi);
1999 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2001 TCGv_i32 r_asi, r_size, r_rd;
2003 r_asi = gen_get_asi(insn, addr);
2004 r_size = tcg_const_i32(size);
2005 r_rd = tcg_const_i32(rd);
2006 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2007 tcg_temp_free_i32(r_rd);
2008 tcg_temp_free_i32(r_size);
2009 tcg_temp_free_i32(r_asi);
2012 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2014 TCGv_i32 r_asi, r_size, r_sign;
2016 r_asi = gen_get_asi(insn, addr);
2017 r_size = tcg_const_i32(4);
2018 r_sign = tcg_const_i32(0);
2019 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2020 tcg_temp_free_i32(r_sign);
2021 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2022 tcg_temp_free_i32(r_size);
2023 tcg_temp_free_i32(r_asi);
2024 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2027 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2029 TCGv_i32 r_asi, r_rd;
2031 r_asi = gen_get_asi(insn, addr);
2032 r_rd = tcg_const_i32(rd);
2033 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2034 tcg_temp_free_i32(r_rd);
2035 tcg_temp_free_i32(r_asi);
2038 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2040 TCGv_i32 r_asi, r_size;
2042 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2043 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2044 r_asi = gen_get_asi(insn, addr);
2045 r_size = tcg_const_i32(8);
2046 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2047 tcg_temp_free_i32(r_size);
2048 tcg_temp_free_i32(r_asi);
2051 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2052 int rd)
2054 TCGv r_val1;
2055 TCGv_i32 r_asi;
2057 r_val1 = tcg_temp_new();
2058 gen_movl_reg_TN(rd, r_val1);
2059 r_asi = gen_get_asi(insn, addr);
2060 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2061 tcg_temp_free_i32(r_asi);
2062 tcg_temp_free(r_val1);
2065 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2066 int rd)
2068 TCGv_i32 r_asi;
2070 gen_movl_reg_TN(rd, cpu_tmp64);
2071 r_asi = gen_get_asi(insn, addr);
2072 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2073 tcg_temp_free_i32(r_asi);
2076 #elif !defined(CONFIG_USER_ONLY)
2078 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2079 int sign)
2081 TCGv_i32 r_asi, r_size, r_sign;
2083 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2084 r_size = tcg_const_i32(size);
2085 r_sign = tcg_const_i32(sign);
2086 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2087 tcg_temp_free(r_sign);
2088 tcg_temp_free(r_size);
2089 tcg_temp_free(r_asi);
2090 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2093 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2095 TCGv_i32 r_asi, r_size;
2097 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2098 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2099 r_size = tcg_const_i32(size);
2100 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2101 tcg_temp_free(r_size);
2102 tcg_temp_free(r_asi);
2105 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2107 TCGv_i32 r_asi, r_size, r_sign;
2108 TCGv_i64 r_val;
2110 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2111 r_size = tcg_const_i32(4);
2112 r_sign = tcg_const_i32(0);
2113 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2114 tcg_temp_free(r_sign);
2115 r_val = tcg_temp_new_i64();
2116 tcg_gen_extu_tl_i64(r_val, dst);
2117 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2118 tcg_temp_free_i64(r_val);
2119 tcg_temp_free(r_size);
2120 tcg_temp_free(r_asi);
2121 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2124 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2126 TCGv_i32 r_asi, r_size, r_sign;
2128 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2129 r_size = tcg_const_i32(8);
2130 r_sign = tcg_const_i32(0);
2131 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2132 tcg_temp_free(r_sign);
2133 tcg_temp_free(r_size);
2134 tcg_temp_free(r_asi);
2135 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2136 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2137 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2138 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2139 gen_movl_TN_reg(rd, hi);
2142 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2144 TCGv_i32 r_asi, r_size;
2146 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2147 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2148 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2149 r_size = tcg_const_i32(8);
2150 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2151 tcg_temp_free(r_size);
2152 tcg_temp_free(r_asi);
2154 #endif
2156 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2157 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2159 TCGv_i64 r_val;
2160 TCGv_i32 r_asi, r_size;
2162 gen_ld_asi(dst, addr, insn, 1, 0);
2164 r_val = tcg_const_i64(0xffULL);
2165 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2166 r_size = tcg_const_i32(1);
2167 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2168 tcg_temp_free_i32(r_size);
2169 tcg_temp_free_i32(r_asi);
2170 tcg_temp_free_i64(r_val);
2172 #endif
2174 static inline TCGv get_src1(unsigned int insn, TCGv def)
2176 TCGv r_rs1 = def;
2177 unsigned int rs1;
2179 rs1 = GET_FIELD(insn, 13, 17);
2180 if (rs1 == 0) {
2181 tcg_gen_movi_tl(def, 0);
2182 } else if (rs1 < 8) {
2183 r_rs1 = cpu_gregs[rs1];
2184 } else {
2185 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2187 return r_rs1;
2190 static inline TCGv get_src2(unsigned int insn, TCGv def)
2192 TCGv r_rs2 = def;
2194 if (IS_IMM) { /* immediate */
2195 target_long simm = GET_FIELDs(insn, 19, 31);
2196 tcg_gen_movi_tl(def, simm);
2197 } else { /* register */
2198 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2199 if (rs2 == 0) {
2200 tcg_gen_movi_tl(def, 0);
2201 } else if (rs2 < 8) {
2202 r_rs2 = cpu_gregs[rs2];
2203 } else {
2204 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2207 return r_rs2;
2210 #ifdef TARGET_SPARC64
2211 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2213 TCGv_i32 r_tl = tcg_temp_new_i32();
2215 /* load env->tl into r_tl */
2216 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2218 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2219 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2221 /* calculate offset to current trap state from env->ts, reuse r_tl */
2222 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2223 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2225 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2227 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2228 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2229 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2230 tcg_temp_free_ptr(r_tl_tmp);
2233 tcg_temp_free_i32(r_tl);
2236 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2237 int width, bool cc, bool left)
2239 TCGv lo1, lo2, t1, t2;
2240 uint64_t amask, tabl, tabr;
2241 int shift, imask, omask;
2243 if (cc) {
2244 tcg_gen_mov_tl(cpu_cc_src, s1);
2245 tcg_gen_mov_tl(cpu_cc_src2, s2);
2246 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2247 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2248 dc->cc_op = CC_OP_SUB;
2251 /* Theory of operation: there are two tables, left and right (not to
2252 be confused with the left and right versions of the opcode). These
2253 are indexed by the low 3 bits of the inputs. To make things "easy",
2254 these tables are loaded into two constants, TABL and TABR below.
2255 The operation index = (input & imask) << shift calculates the index
2256 into the constant, while val = (table >> index) & omask calculates
2257 the value we're looking for. */
2258 switch (width) {
2259 case 8:
2260 imask = 0x7;
2261 shift = 3;
2262 omask = 0xff;
2263 if (left) {
2264 tabl = 0x80c0e0f0f8fcfeffULL;
2265 tabr = 0xff7f3f1f0f070301ULL;
2266 } else {
2267 tabl = 0x0103070f1f3f7fffULL;
2268 tabr = 0xfffefcf8f0e0c080ULL;
2270 break;
2271 case 16:
2272 imask = 0x6;
2273 shift = 1;
2274 omask = 0xf;
2275 if (left) {
2276 tabl = 0x8cef;
2277 tabr = 0xf731;
2278 } else {
2279 tabl = 0x137f;
2280 tabr = 0xfec8;
2282 break;
2283 case 32:
2284 imask = 0x4;
2285 shift = 0;
2286 omask = 0x3;
2287 if (left) {
2288 tabl = (2 << 2) | 3;
2289 tabr = (3 << 2) | 1;
2290 } else {
2291 tabl = (1 << 2) | 3;
2292 tabr = (3 << 2) | 2;
2294 break;
2295 default:
2296 abort();
2299 lo1 = tcg_temp_new();
2300 lo2 = tcg_temp_new();
2301 tcg_gen_andi_tl(lo1, s1, imask);
2302 tcg_gen_andi_tl(lo2, s2, imask);
2303 tcg_gen_shli_tl(lo1, lo1, shift);
2304 tcg_gen_shli_tl(lo2, lo2, shift);
2306 t1 = tcg_const_tl(tabl);
2307 t2 = tcg_const_tl(tabr);
2308 tcg_gen_shr_tl(lo1, t1, lo1);
2309 tcg_gen_shr_tl(lo2, t2, lo2);
2310 tcg_gen_andi_tl(dst, lo1, omask);
2311 tcg_gen_andi_tl(lo2, lo2, omask);
2313 amask = -8;
2314 if (AM_CHECK(dc)) {
2315 amask &= 0xffffffffULL;
2317 tcg_gen_andi_tl(s1, s1, amask);
2318 tcg_gen_andi_tl(s2, s2, amask);
2320 /* We want to compute
2321 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2322 We've already done dst = lo1, so this reduces to
2323 dst &= (s1 == s2 ? -1 : lo2)
2324 Which we perform by
2325 lo2 |= -(s1 == s2)
2326 dst &= lo2
2328 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2329 tcg_gen_neg_tl(t1, t1);
2330 tcg_gen_or_tl(lo2, lo2, t1);
2331 tcg_gen_and_tl(dst, dst, lo2);
2333 tcg_temp_free(lo1);
2334 tcg_temp_free(lo2);
2335 tcg_temp_free(t1);
2336 tcg_temp_free(t2);
2339 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2341 TCGv tmp = tcg_temp_new();
2343 tcg_gen_add_tl(tmp, s1, s2);
2344 tcg_gen_andi_tl(dst, tmp, -8);
2345 if (left) {
2346 tcg_gen_neg_tl(tmp, tmp);
2348 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2350 tcg_temp_free(tmp);
2353 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2355 TCGv t1, t2, shift;
2357 t1 = tcg_temp_new();
2358 t2 = tcg_temp_new();
2359 shift = tcg_temp_new();
2361 tcg_gen_andi_tl(shift, gsr, 7);
2362 tcg_gen_shli_tl(shift, shift, 3);
2363 tcg_gen_shl_tl(t1, s1, shift);
2365 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2366 shift of (up to 63) followed by a constant shift of 1. */
2367 tcg_gen_xori_tl(shift, shift, 63);
2368 tcg_gen_shr_tl(t2, s2, shift);
2369 tcg_gen_shri_tl(t2, t2, 1);
2371 tcg_gen_or_tl(dst, t1, t2);
2373 tcg_temp_free(t1);
2374 tcg_temp_free(t2);
2375 tcg_temp_free(shift);
2377 #endif
2379 #define CHECK_IU_FEATURE(dc, FEATURE) \
2380 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2381 goto illegal_insn;
2382 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2383 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2384 goto nfpu_insn;
2386 /* before an instruction, dc->pc must be static */
2387 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2389 unsigned int opc, rs1, rs2, rd;
2390 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2391 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2392 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2393 target_long simm;
2395 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2396 tcg_gen_debug_insn_start(dc->pc);
2399 opc = GET_FIELD(insn, 0, 1);
2401 rd = GET_FIELD(insn, 2, 6);
2403 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2404 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2406 switch (opc) {
2407 case 0: /* branches/sethi */
2409 unsigned int xop = GET_FIELD(insn, 7, 9);
2410 int32_t target;
2411 switch (xop) {
2412 #ifdef TARGET_SPARC64
2413 case 0x1: /* V9 BPcc */
2415 int cc;
2417 target = GET_FIELD_SP(insn, 0, 18);
2418 target = sign_extend(target, 19);
2419 target <<= 2;
2420 cc = GET_FIELD_SP(insn, 20, 21);
2421 if (cc == 0)
2422 do_branch(dc, target, insn, 0);
2423 else if (cc == 2)
2424 do_branch(dc, target, insn, 1);
2425 else
2426 goto illegal_insn;
2427 goto jmp_insn;
2429 case 0x3: /* V9 BPr */
2431 target = GET_FIELD_SP(insn, 0, 13) |
2432 (GET_FIELD_SP(insn, 20, 21) << 14);
2433 target = sign_extend(target, 16);
2434 target <<= 2;
2435 cpu_src1 = get_src1(insn, cpu_src1);
2436 do_branch_reg(dc, target, insn, cpu_src1);
2437 goto jmp_insn;
2439 case 0x5: /* V9 FBPcc */
2441 int cc = GET_FIELD_SP(insn, 20, 21);
2442 if (gen_trap_ifnofpu(dc)) {
2443 goto jmp_insn;
2445 target = GET_FIELD_SP(insn, 0, 18);
2446 target = sign_extend(target, 19);
2447 target <<= 2;
2448 do_fbranch(dc, target, insn, cc);
2449 goto jmp_insn;
2451 #else
2452 case 0x7: /* CBN+x */
2454 goto ncp_insn;
2456 #endif
2457 case 0x2: /* BN+x */
2459 target = GET_FIELD(insn, 10, 31);
2460 target = sign_extend(target, 22);
2461 target <<= 2;
2462 do_branch(dc, target, insn, 0);
2463 goto jmp_insn;
2465 case 0x6: /* FBN+x */
2467 if (gen_trap_ifnofpu(dc)) {
2468 goto jmp_insn;
2470 target = GET_FIELD(insn, 10, 31);
2471 target = sign_extend(target, 22);
2472 target <<= 2;
2473 do_fbranch(dc, target, insn, 0);
2474 goto jmp_insn;
2476 case 0x4: /* SETHI */
2477 if (rd) { // nop
2478 uint32_t value = GET_FIELD(insn, 10, 31);
2479 TCGv r_const;
2481 r_const = tcg_const_tl(value << 10);
2482 gen_movl_TN_reg(rd, r_const);
2483 tcg_temp_free(r_const);
2485 break;
2486 case 0x0: /* UNIMPL */
2487 default:
2488 goto illegal_insn;
2490 break;
2492 break;
2493 case 1: /*CALL*/
2495 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2496 TCGv r_const;
2498 r_const = tcg_const_tl(dc->pc);
2499 gen_movl_TN_reg(15, r_const);
2500 tcg_temp_free(r_const);
2501 target += dc->pc;
2502 gen_mov_pc_npc(dc, cpu_cond);
2503 #ifdef TARGET_SPARC64
2504 if (unlikely(AM_CHECK(dc))) {
2505 target &= 0xffffffffULL;
2507 #endif
2508 dc->npc = target;
2510 goto jmp_insn;
2511 case 2: /* FPU & Logical Operations */
2513 unsigned int xop = GET_FIELD(insn, 7, 12);
2514 if (xop == 0x3a) { /* generate trap */
2515 int cond;
2517 cpu_src1 = get_src1(insn, cpu_src1);
2518 if (IS_IMM) {
2519 rs2 = GET_FIELD(insn, 25, 31);
2520 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2521 } else {
2522 rs2 = GET_FIELD(insn, 27, 31);
2523 if (rs2 != 0) {
2524 gen_movl_reg_TN(rs2, cpu_src2);
2525 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2526 } else
2527 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2530 cond = GET_FIELD(insn, 3, 6);
2531 if (cond == 0x8) { /* Trap Always */
2532 save_state(dc, cpu_cond);
2533 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2534 supervisor(dc))
2535 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2536 else
2537 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2538 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2539 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2540 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2542 } else if (cond != 0) {
2543 TCGv r_cond = tcg_temp_new();
2544 int l1;
2545 #ifdef TARGET_SPARC64
2546 /* V9 icc/xcc */
2547 int cc = GET_FIELD_SP(insn, 11, 12);
2549 save_state(dc, cpu_cond);
2550 if (cc == 0)
2551 gen_cond(r_cond, 0, cond, dc);
2552 else if (cc == 2)
2553 gen_cond(r_cond, 1, cond, dc);
2554 else
2555 goto illegal_insn;
2556 #else
2557 save_state(dc, cpu_cond);
2558 gen_cond(r_cond, 0, cond, dc);
2559 #endif
2560 l1 = gen_new_label();
2561 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2563 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2564 supervisor(dc))
2565 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2566 else
2567 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2568 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2569 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2570 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2572 gen_set_label(l1);
2573 tcg_temp_free(r_cond);
2575 gen_op_next_insn();
2576 tcg_gen_exit_tb(0);
2577 dc->is_br = 1;
2578 goto jmp_insn;
2579 } else if (xop == 0x28) {
2580 rs1 = GET_FIELD(insn, 13, 17);
2581 switch(rs1) {
2582 case 0: /* rdy */
2583 #ifndef TARGET_SPARC64
2584 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2585 manual, rdy on the microSPARC
2586 II */
2587 case 0x0f: /* stbar in the SPARCv8 manual,
2588 rdy on the microSPARC II */
2589 case 0x10 ... 0x1f: /* implementation-dependent in the
2590 SPARCv8 manual, rdy on the
2591 microSPARC II */
2592 /* Read Asr17 */
2593 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2594 TCGv r_const;
2596 /* Read Asr17 for a Leon3 monoprocessor */
2597 r_const = tcg_const_tl((1 << 8)
2598 | (dc->def->nwindows - 1));
2599 gen_movl_TN_reg(rd, r_const);
2600 tcg_temp_free(r_const);
2601 break;
2603 #endif
2604 gen_movl_TN_reg(rd, cpu_y);
2605 break;
2606 #ifdef TARGET_SPARC64
2607 case 0x2: /* V9 rdccr */
2608 gen_helper_compute_psr(cpu_env);
2609 gen_helper_rdccr(cpu_dst, cpu_env);
2610 gen_movl_TN_reg(rd, cpu_dst);
2611 break;
2612 case 0x3: /* V9 rdasi */
2613 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2614 gen_movl_TN_reg(rd, cpu_dst);
2615 break;
2616 case 0x4: /* V9 rdtick */
2618 TCGv_ptr r_tickptr;
2620 r_tickptr = tcg_temp_new_ptr();
2621 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2622 offsetof(CPUSPARCState, tick));
2623 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2624 tcg_temp_free_ptr(r_tickptr);
2625 gen_movl_TN_reg(rd, cpu_dst);
2627 break;
2628 case 0x5: /* V9 rdpc */
2630 TCGv r_const;
2632 if (unlikely(AM_CHECK(dc))) {
2633 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2634 } else {
2635 r_const = tcg_const_tl(dc->pc);
2637 gen_movl_TN_reg(rd, r_const);
2638 tcg_temp_free(r_const);
2640 break;
2641 case 0x6: /* V9 rdfprs */
2642 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2643 gen_movl_TN_reg(rd, cpu_dst);
2644 break;
2645 case 0xf: /* V9 membar */
2646 break; /* no effect */
2647 case 0x13: /* Graphics Status */
2648 if (gen_trap_ifnofpu(dc)) {
2649 goto jmp_insn;
2651 gen_movl_TN_reg(rd, cpu_gsr);
2652 break;
2653 case 0x16: /* Softint */
2654 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2655 gen_movl_TN_reg(rd, cpu_dst);
2656 break;
2657 case 0x17: /* Tick compare */
2658 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2659 break;
2660 case 0x18: /* System tick */
2662 TCGv_ptr r_tickptr;
2664 r_tickptr = tcg_temp_new_ptr();
2665 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2666 offsetof(CPUSPARCState, stick));
2667 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2668 tcg_temp_free_ptr(r_tickptr);
2669 gen_movl_TN_reg(rd, cpu_dst);
2671 break;
2672 case 0x19: /* System tick compare */
2673 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2674 break;
2675 case 0x10: /* Performance Control */
2676 case 0x11: /* Performance Instrumentation Counter */
2677 case 0x12: /* Dispatch Control */
2678 case 0x14: /* Softint set, WO */
2679 case 0x15: /* Softint clear, WO */
2680 #endif
2681 default:
2682 goto illegal_insn;
2684 #if !defined(CONFIG_USER_ONLY)
2685 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2686 #ifndef TARGET_SPARC64
2687 if (!supervisor(dc))
2688 goto priv_insn;
2689 gen_helper_compute_psr(cpu_env);
2690 dc->cc_op = CC_OP_FLAGS;
2691 gen_helper_rdpsr(cpu_dst, cpu_env);
2692 #else
2693 CHECK_IU_FEATURE(dc, HYPV);
2694 if (!hypervisor(dc))
2695 goto priv_insn;
2696 rs1 = GET_FIELD(insn, 13, 17);
2697 switch (rs1) {
2698 case 0: // hpstate
2699 // gen_op_rdhpstate();
2700 break;
2701 case 1: // htstate
2702 // gen_op_rdhtstate();
2703 break;
2704 case 3: // hintp
2705 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2706 break;
2707 case 5: // htba
2708 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2709 break;
2710 case 6: // hver
2711 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2712 break;
2713 case 31: // hstick_cmpr
2714 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2715 break;
2716 default:
2717 goto illegal_insn;
2719 #endif
2720 gen_movl_TN_reg(rd, cpu_dst);
2721 break;
2722 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2723 if (!supervisor(dc))
2724 goto priv_insn;
2725 #ifdef TARGET_SPARC64
2726 rs1 = GET_FIELD(insn, 13, 17);
2727 switch (rs1) {
2728 case 0: // tpc
2730 TCGv_ptr r_tsptr;
2732 r_tsptr = tcg_temp_new_ptr();
2733 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2734 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2735 offsetof(trap_state, tpc));
2736 tcg_temp_free_ptr(r_tsptr);
2738 break;
2739 case 1: // tnpc
2741 TCGv_ptr r_tsptr;
2743 r_tsptr = tcg_temp_new_ptr();
2744 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2745 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2746 offsetof(trap_state, tnpc));
2747 tcg_temp_free_ptr(r_tsptr);
2749 break;
2750 case 2: // tstate
2752 TCGv_ptr r_tsptr;
2754 r_tsptr = tcg_temp_new_ptr();
2755 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2756 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2757 offsetof(trap_state, tstate));
2758 tcg_temp_free_ptr(r_tsptr);
2760 break;
2761 case 3: // tt
2763 TCGv_ptr r_tsptr;
2765 r_tsptr = tcg_temp_new_ptr();
2766 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2767 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2768 offsetof(trap_state, tt));
2769 tcg_temp_free_ptr(r_tsptr);
2770 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2772 break;
2773 case 4: // tick
2775 TCGv_ptr r_tickptr;
2777 r_tickptr = tcg_temp_new_ptr();
2778 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2779 offsetof(CPUSPARCState, tick));
2780 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2781 gen_movl_TN_reg(rd, cpu_tmp0);
2782 tcg_temp_free_ptr(r_tickptr);
2784 break;
2785 case 5: // tba
2786 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2787 break;
2788 case 6: // pstate
2789 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2790 offsetof(CPUSPARCState, pstate));
2791 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2792 break;
2793 case 7: // tl
2794 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2795 offsetof(CPUSPARCState, tl));
2796 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2797 break;
2798 case 8: // pil
2799 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2800 offsetof(CPUSPARCState, psrpil));
2801 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2802 break;
2803 case 9: // cwp
2804 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2805 break;
2806 case 10: // cansave
2807 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2808 offsetof(CPUSPARCState, cansave));
2809 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2810 break;
2811 case 11: // canrestore
2812 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2813 offsetof(CPUSPARCState, canrestore));
2814 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2815 break;
2816 case 12: // cleanwin
2817 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2818 offsetof(CPUSPARCState, cleanwin));
2819 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2820 break;
2821 case 13: // otherwin
2822 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2823 offsetof(CPUSPARCState, otherwin));
2824 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2825 break;
2826 case 14: // wstate
2827 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2828 offsetof(CPUSPARCState, wstate));
2829 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2830 break;
2831 case 16: // UA2005 gl
2832 CHECK_IU_FEATURE(dc, GL);
2833 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2834 offsetof(CPUSPARCState, gl));
2835 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2836 break;
2837 case 26: // UA2005 strand status
2838 CHECK_IU_FEATURE(dc, HYPV);
2839 if (!hypervisor(dc))
2840 goto priv_insn;
2841 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2842 break;
2843 case 31: // ver
2844 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2845 break;
2846 case 15: // fq
2847 default:
2848 goto illegal_insn;
2850 #else
2851 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2852 #endif
2853 gen_movl_TN_reg(rd, cpu_tmp0);
2854 break;
2855 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2856 #ifdef TARGET_SPARC64
2857 save_state(dc, cpu_cond);
2858 gen_helper_flushw(cpu_env);
2859 #else
2860 if (!supervisor(dc))
2861 goto priv_insn;
2862 gen_movl_TN_reg(rd, cpu_tbr);
2863 #endif
2864 break;
2865 #endif
2866 } else if (xop == 0x34) { /* FPU Operations */
2867 if (gen_trap_ifnofpu(dc)) {
2868 goto jmp_insn;
2870 gen_op_clear_ieee_excp_and_FTT();
2871 rs1 = GET_FIELD(insn, 13, 17);
2872 rs2 = GET_FIELD(insn, 27, 31);
2873 xop = GET_FIELD(insn, 18, 26);
2874 save_state(dc, cpu_cond);
2875 switch (xop) {
2876 case 0x1: /* fmovs */
2877 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2878 gen_store_fpr_F(dc, rd, cpu_src1_32);
2879 break;
2880 case 0x5: /* fnegs */
2881 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2882 break;
2883 case 0x9: /* fabss */
2884 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2885 break;
2886 case 0x29: /* fsqrts */
2887 CHECK_FPU_FEATURE(dc, FSQRT);
2888 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2889 break;
2890 case 0x2a: /* fsqrtd */
2891 CHECK_FPU_FEATURE(dc, FSQRT);
2892 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2893 break;
2894 case 0x2b: /* fsqrtq */
2895 CHECK_FPU_FEATURE(dc, FLOAT128);
2896 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2897 break;
2898 case 0x41: /* fadds */
2899 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2900 break;
2901 case 0x42: /* faddd */
2902 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2903 break;
2904 case 0x43: /* faddq */
2905 CHECK_FPU_FEATURE(dc, FLOAT128);
2906 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2907 break;
2908 case 0x45: /* fsubs */
2909 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2910 break;
2911 case 0x46: /* fsubd */
2912 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2913 break;
2914 case 0x47: /* fsubq */
2915 CHECK_FPU_FEATURE(dc, FLOAT128);
2916 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2917 break;
2918 case 0x49: /* fmuls */
2919 CHECK_FPU_FEATURE(dc, FMUL);
2920 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2921 break;
2922 case 0x4a: /* fmuld */
2923 CHECK_FPU_FEATURE(dc, FMUL);
2924 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2925 break;
2926 case 0x4b: /* fmulq */
2927 CHECK_FPU_FEATURE(dc, FLOAT128);
2928 CHECK_FPU_FEATURE(dc, FMUL);
2929 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2930 break;
2931 case 0x4d: /* fdivs */
2932 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2933 break;
2934 case 0x4e: /* fdivd */
2935 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2936 break;
2937 case 0x4f: /* fdivq */
2938 CHECK_FPU_FEATURE(dc, FLOAT128);
2939 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2940 break;
2941 case 0x69: /* fsmuld */
2942 CHECK_FPU_FEATURE(dc, FSMULD);
2943 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2944 break;
2945 case 0x6e: /* fdmulq */
2946 CHECK_FPU_FEATURE(dc, FLOAT128);
2947 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2948 break;
2949 case 0xc4: /* fitos */
2950 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2951 break;
2952 case 0xc6: /* fdtos */
2953 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2954 break;
2955 case 0xc7: /* fqtos */
2956 CHECK_FPU_FEATURE(dc, FLOAT128);
2957 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2958 break;
2959 case 0xc8: /* fitod */
2960 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2961 break;
2962 case 0xc9: /* fstod */
2963 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2964 break;
2965 case 0xcb: /* fqtod */
2966 CHECK_FPU_FEATURE(dc, FLOAT128);
2967 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2968 break;
2969 case 0xcc: /* fitoq */
2970 CHECK_FPU_FEATURE(dc, FLOAT128);
2971 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2972 break;
2973 case 0xcd: /* fstoq */
2974 CHECK_FPU_FEATURE(dc, FLOAT128);
2975 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2976 break;
2977 case 0xce: /* fdtoq */
2978 CHECK_FPU_FEATURE(dc, FLOAT128);
2979 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2980 break;
2981 case 0xd1: /* fstoi */
2982 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2983 break;
2984 case 0xd2: /* fdtoi */
2985 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2986 break;
2987 case 0xd3: /* fqtoi */
2988 CHECK_FPU_FEATURE(dc, FLOAT128);
2989 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2990 break;
2991 #ifdef TARGET_SPARC64
2992 case 0x2: /* V9 fmovd */
2993 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2994 gen_store_fpr_D(dc, rd, cpu_src1_64);
2995 break;
2996 case 0x3: /* V9 fmovq */
2997 CHECK_FPU_FEATURE(dc, FLOAT128);
2998 gen_move_Q(rd, rs2);
2999 break;
3000 case 0x6: /* V9 fnegd */
3001 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3002 break;
3003 case 0x7: /* V9 fnegq */
3004 CHECK_FPU_FEATURE(dc, FLOAT128);
3005 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3006 break;
3007 case 0xa: /* V9 fabsd */
3008 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3009 break;
3010 case 0xb: /* V9 fabsq */
3011 CHECK_FPU_FEATURE(dc, FLOAT128);
3012 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3013 break;
3014 case 0x81: /* V9 fstox */
3015 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3016 break;
3017 case 0x82: /* V9 fdtox */
3018 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3019 break;
3020 case 0x83: /* V9 fqtox */
3021 CHECK_FPU_FEATURE(dc, FLOAT128);
3022 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3023 break;
3024 case 0x84: /* V9 fxtos */
3025 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3026 break;
3027 case 0x88: /* V9 fxtod */
3028 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3029 break;
3030 case 0x8c: /* V9 fxtoq */
3031 CHECK_FPU_FEATURE(dc, FLOAT128);
3032 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3033 break;
3034 #endif
3035 default:
3036 goto illegal_insn;
3038 } else if (xop == 0x35) { /* FPU Operations */
3039 #ifdef TARGET_SPARC64
3040 int cond;
3041 #endif
3042 if (gen_trap_ifnofpu(dc)) {
3043 goto jmp_insn;
3045 gen_op_clear_ieee_excp_and_FTT();
3046 rs1 = GET_FIELD(insn, 13, 17);
3047 rs2 = GET_FIELD(insn, 27, 31);
3048 xop = GET_FIELD(insn, 18, 26);
3049 save_state(dc, cpu_cond);
3050 #ifdef TARGET_SPARC64
3051 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
3052 int l1;
3054 l1 = gen_new_label();
3055 cond = GET_FIELD_SP(insn, 14, 17);
3056 cpu_src1 = get_src1(insn, cpu_src1);
3057 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3058 0, l1);
3059 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3060 gen_store_fpr_F(dc, rd, cpu_src1_32);
3061 gen_set_label(l1);
3062 break;
3063 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3064 int l1;
3066 l1 = gen_new_label();
3067 cond = GET_FIELD_SP(insn, 14, 17);
3068 cpu_src1 = get_src1(insn, cpu_src1);
3069 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3070 0, l1);
3071 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3072 gen_store_fpr_D(dc, rd, cpu_src1_64);
3073 gen_set_label(l1);
3074 break;
3075 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3076 int l1;
3078 CHECK_FPU_FEATURE(dc, FLOAT128);
3079 l1 = gen_new_label();
3080 cond = GET_FIELD_SP(insn, 14, 17);
3081 cpu_src1 = get_src1(insn, cpu_src1);
3082 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3083 0, l1);
3084 gen_move_Q(rd, rs2);
3085 gen_set_label(l1);
3086 break;
3088 #endif
3089 switch (xop) {
3090 #ifdef TARGET_SPARC64
3091 #define FMOVSCC(fcc) \
3093 TCGv r_cond; \
3094 int l1; \
3096 l1 = gen_new_label(); \
3097 r_cond = tcg_temp_new(); \
3098 cond = GET_FIELD_SP(insn, 14, 17); \
3099 gen_fcond(r_cond, fcc, cond); \
3100 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3101 0, l1); \
3102 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3103 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3104 gen_set_label(l1); \
3105 tcg_temp_free(r_cond); \
3107 #define FMOVDCC(fcc) \
3109 TCGv r_cond; \
3110 int l1; \
3112 l1 = gen_new_label(); \
3113 r_cond = tcg_temp_new(); \
3114 cond = GET_FIELD_SP(insn, 14, 17); \
3115 gen_fcond(r_cond, fcc, cond); \
3116 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3117 0, l1); \
3118 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3119 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3120 gen_set_label(l1); \
3121 tcg_temp_free(r_cond); \
3123 #define FMOVQCC(fcc) \
3125 TCGv r_cond; \
3126 int l1; \
3128 l1 = gen_new_label(); \
3129 r_cond = tcg_temp_new(); \
3130 cond = GET_FIELD_SP(insn, 14, 17); \
3131 gen_fcond(r_cond, fcc, cond); \
3132 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3133 0, l1); \
3134 gen_move_Q(rd, rs2); \
3135 gen_set_label(l1); \
3136 tcg_temp_free(r_cond); \
3138 case 0x001: /* V9 fmovscc %fcc0 */
3139 FMOVSCC(0);
3140 break;
3141 case 0x002: /* V9 fmovdcc %fcc0 */
3142 FMOVDCC(0);
3143 break;
3144 case 0x003: /* V9 fmovqcc %fcc0 */
3145 CHECK_FPU_FEATURE(dc, FLOAT128);
3146 FMOVQCC(0);
3147 break;
3148 case 0x041: /* V9 fmovscc %fcc1 */
3149 FMOVSCC(1);
3150 break;
3151 case 0x042: /* V9 fmovdcc %fcc1 */
3152 FMOVDCC(1);
3153 break;
3154 case 0x043: /* V9 fmovqcc %fcc1 */
3155 CHECK_FPU_FEATURE(dc, FLOAT128);
3156 FMOVQCC(1);
3157 break;
3158 case 0x081: /* V9 fmovscc %fcc2 */
3159 FMOVSCC(2);
3160 break;
3161 case 0x082: /* V9 fmovdcc %fcc2 */
3162 FMOVDCC(2);
3163 break;
3164 case 0x083: /* V9 fmovqcc %fcc2 */
3165 CHECK_FPU_FEATURE(dc, FLOAT128);
3166 FMOVQCC(2);
3167 break;
3168 case 0x0c1: /* V9 fmovscc %fcc3 */
3169 FMOVSCC(3);
3170 break;
3171 case 0x0c2: /* V9 fmovdcc %fcc3 */
3172 FMOVDCC(3);
3173 break;
3174 case 0x0c3: /* V9 fmovqcc %fcc3 */
3175 CHECK_FPU_FEATURE(dc, FLOAT128);
3176 FMOVQCC(3);
3177 break;
3178 #undef FMOVSCC
3179 #undef FMOVDCC
3180 #undef FMOVQCC
3181 #define FMOVSCC(icc) \
3183 TCGv r_cond; \
3184 int l1; \
3186 l1 = gen_new_label(); \
3187 r_cond = tcg_temp_new(); \
3188 cond = GET_FIELD_SP(insn, 14, 17); \
3189 gen_cond(r_cond, icc, cond, dc); \
3190 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3191 0, l1); \
3192 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3193 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3194 gen_set_label(l1); \
3195 tcg_temp_free(r_cond); \
3197 #define FMOVDCC(icc) \
3199 TCGv r_cond; \
3200 int l1; \
3202 l1 = gen_new_label(); \
3203 r_cond = tcg_temp_new(); \
3204 cond = GET_FIELD_SP(insn, 14, 17); \
3205 gen_cond(r_cond, icc, cond, dc); \
3206 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3207 0, l1); \
3208 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3209 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3210 gen_update_fprs_dirty(DFPREG(rd)); \
3211 gen_set_label(l1); \
3212 tcg_temp_free(r_cond); \
3214 #define FMOVQCC(icc) \
3216 TCGv r_cond; \
3217 int l1; \
3219 l1 = gen_new_label(); \
3220 r_cond = tcg_temp_new(); \
3221 cond = GET_FIELD_SP(insn, 14, 17); \
3222 gen_cond(r_cond, icc, cond, dc); \
3223 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3224 0, l1); \
3225 gen_move_Q(rd, rs2); \
3226 gen_set_label(l1); \
3227 tcg_temp_free(r_cond); \
3230 case 0x101: /* V9 fmovscc %icc */
3231 FMOVSCC(0);
3232 break;
3233 case 0x102: /* V9 fmovdcc %icc */
3234 FMOVDCC(0);
3235 break;
3236 case 0x103: /* V9 fmovqcc %icc */
3237 CHECK_FPU_FEATURE(dc, FLOAT128);
3238 FMOVQCC(0);
3239 break;
3240 case 0x181: /* V9 fmovscc %xcc */
3241 FMOVSCC(1);
3242 break;
3243 case 0x182: /* V9 fmovdcc %xcc */
3244 FMOVDCC(1);
3245 break;
3246 case 0x183: /* V9 fmovqcc %xcc */
3247 CHECK_FPU_FEATURE(dc, FLOAT128);
3248 FMOVQCC(1);
3249 break;
3250 #undef FMOVSCC
3251 #undef FMOVDCC
3252 #undef FMOVQCC
3253 #endif
3254 case 0x51: /* fcmps, V9 %fcc */
3255 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3256 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3257 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3258 break;
3259 case 0x52: /* fcmpd, V9 %fcc */
3260 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3261 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3262 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3263 break;
3264 case 0x53: /* fcmpq, V9 %fcc */
3265 CHECK_FPU_FEATURE(dc, FLOAT128);
3266 gen_op_load_fpr_QT0(QFPREG(rs1));
3267 gen_op_load_fpr_QT1(QFPREG(rs2));
3268 gen_op_fcmpq(rd & 3);
3269 break;
3270 case 0x55: /* fcmpes, V9 %fcc */
3271 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3272 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3273 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3274 break;
3275 case 0x56: /* fcmped, V9 %fcc */
3276 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3277 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3278 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3279 break;
3280 case 0x57: /* fcmpeq, V9 %fcc */
3281 CHECK_FPU_FEATURE(dc, FLOAT128);
3282 gen_op_load_fpr_QT0(QFPREG(rs1));
3283 gen_op_load_fpr_QT1(QFPREG(rs2));
3284 gen_op_fcmpeq(rd & 3);
3285 break;
3286 default:
3287 goto illegal_insn;
3289 } else if (xop == 0x2) {
3290 // clr/mov shortcut
3292 rs1 = GET_FIELD(insn, 13, 17);
3293 if (rs1 == 0) {
3294 // or %g0, x, y -> mov T0, x; mov y, T0
3295 if (IS_IMM) { /* immediate */
3296 TCGv r_const;
3298 simm = GET_FIELDs(insn, 19, 31);
3299 r_const = tcg_const_tl(simm);
3300 gen_movl_TN_reg(rd, r_const);
3301 tcg_temp_free(r_const);
3302 } else { /* register */
3303 rs2 = GET_FIELD(insn, 27, 31);
3304 gen_movl_reg_TN(rs2, cpu_dst);
3305 gen_movl_TN_reg(rd, cpu_dst);
3307 } else {
3308 cpu_src1 = get_src1(insn, cpu_src1);
3309 if (IS_IMM) { /* immediate */
3310 simm = GET_FIELDs(insn, 19, 31);
3311 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3312 gen_movl_TN_reg(rd, cpu_dst);
3313 } else { /* register */
3314 // or x, %g0, y -> mov T1, x; mov y, T1
3315 rs2 = GET_FIELD(insn, 27, 31);
3316 if (rs2 != 0) {
3317 gen_movl_reg_TN(rs2, cpu_src2);
3318 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3319 gen_movl_TN_reg(rd, cpu_dst);
3320 } else
3321 gen_movl_TN_reg(rd, cpu_src1);
3324 #ifdef TARGET_SPARC64
3325 } else if (xop == 0x25) { /* sll, V9 sllx */
3326 cpu_src1 = get_src1(insn, cpu_src1);
3327 if (IS_IMM) { /* immediate */
3328 simm = GET_FIELDs(insn, 20, 31);
3329 if (insn & (1 << 12)) {
3330 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3331 } else {
3332 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3334 } else { /* register */
3335 rs2 = GET_FIELD(insn, 27, 31);
3336 gen_movl_reg_TN(rs2, cpu_src2);
3337 if (insn & (1 << 12)) {
3338 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3339 } else {
3340 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3342 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3344 gen_movl_TN_reg(rd, cpu_dst);
3345 } else if (xop == 0x26) { /* srl, V9 srlx */
3346 cpu_src1 = get_src1(insn, cpu_src1);
3347 if (IS_IMM) { /* immediate */
3348 simm = GET_FIELDs(insn, 20, 31);
3349 if (insn & (1 << 12)) {
3350 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3351 } else {
3352 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3353 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3355 } else { /* register */
3356 rs2 = GET_FIELD(insn, 27, 31);
3357 gen_movl_reg_TN(rs2, cpu_src2);
3358 if (insn & (1 << 12)) {
3359 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3360 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3361 } else {
3362 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3363 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3364 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3367 gen_movl_TN_reg(rd, cpu_dst);
3368 } else if (xop == 0x27) { /* sra, V9 srax */
3369 cpu_src1 = get_src1(insn, cpu_src1);
3370 if (IS_IMM) { /* immediate */
3371 simm = GET_FIELDs(insn, 20, 31);
3372 if (insn & (1 << 12)) {
3373 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3374 } else {
3375 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3376 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3377 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3379 } else { /* register */
3380 rs2 = GET_FIELD(insn, 27, 31);
3381 gen_movl_reg_TN(rs2, cpu_src2);
3382 if (insn & (1 << 12)) {
3383 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3384 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3385 } else {
3386 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3387 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3388 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3389 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3392 gen_movl_TN_reg(rd, cpu_dst);
3393 #endif
3394 } else if (xop < 0x36) {
3395 if (xop < 0x20) {
3396 cpu_src1 = get_src1(insn, cpu_src1);
3397 cpu_src2 = get_src2(insn, cpu_src2);
3398 switch (xop & ~0x10) {
3399 case 0x0: /* add */
3400 if (IS_IMM) {
3401 simm = GET_FIELDs(insn, 19, 31);
3402 if (xop & 0x10) {
3403 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3404 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3405 dc->cc_op = CC_OP_ADD;
3406 } else {
3407 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3409 } else {
3410 if (xop & 0x10) {
3411 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3412 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3413 dc->cc_op = CC_OP_ADD;
3414 } else {
3415 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3418 break;
3419 case 0x1: /* and */
3420 if (IS_IMM) {
3421 simm = GET_FIELDs(insn, 19, 31);
3422 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3423 } else {
3424 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3426 if (xop & 0x10) {
3427 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3428 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3429 dc->cc_op = CC_OP_LOGIC;
3431 break;
3432 case 0x2: /* or */
3433 if (IS_IMM) {
3434 simm = GET_FIELDs(insn, 19, 31);
3435 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3436 } else {
3437 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3439 if (xop & 0x10) {
3440 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3441 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3442 dc->cc_op = CC_OP_LOGIC;
3444 break;
3445 case 0x3: /* xor */
3446 if (IS_IMM) {
3447 simm = GET_FIELDs(insn, 19, 31);
3448 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3449 } else {
3450 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3452 if (xop & 0x10) {
3453 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3454 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3455 dc->cc_op = CC_OP_LOGIC;
3457 break;
3458 case 0x4: /* sub */
3459 if (IS_IMM) {
3460 simm = GET_FIELDs(insn, 19, 31);
3461 if (xop & 0x10) {
3462 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3463 } else {
3464 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3466 } else {
3467 if (xop & 0x10) {
3468 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3469 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3470 dc->cc_op = CC_OP_SUB;
3471 } else {
3472 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3475 break;
3476 case 0x5: /* andn */
3477 if (IS_IMM) {
3478 simm = GET_FIELDs(insn, 19, 31);
3479 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3480 } else {
3481 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3483 if (xop & 0x10) {
3484 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3485 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3486 dc->cc_op = CC_OP_LOGIC;
3488 break;
3489 case 0x6: /* orn */
3490 if (IS_IMM) {
3491 simm = GET_FIELDs(insn, 19, 31);
3492 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3493 } else {
3494 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3496 if (xop & 0x10) {
3497 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3498 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3499 dc->cc_op = CC_OP_LOGIC;
3501 break;
3502 case 0x7: /* xorn */
3503 if (IS_IMM) {
3504 simm = GET_FIELDs(insn, 19, 31);
3505 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3506 } else {
3507 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3508 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3510 if (xop & 0x10) {
3511 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3513 dc->cc_op = CC_OP_LOGIC;
3515 break;
3516 case 0x8: /* addx, V9 addc */
3517 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3518 (xop & 0x10));
3519 break;
3520 #ifdef TARGET_SPARC64
3521 case 0x9: /* V9 mulx */
3522 if (IS_IMM) {
3523 simm = GET_FIELDs(insn, 19, 31);
3524 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3525 } else {
3526 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3528 break;
3529 #endif
3530 case 0xa: /* umul */
3531 CHECK_IU_FEATURE(dc, MUL);
3532 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3533 if (xop & 0x10) {
3534 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3535 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3536 dc->cc_op = CC_OP_LOGIC;
3538 break;
3539 case 0xb: /* smul */
3540 CHECK_IU_FEATURE(dc, MUL);
3541 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3542 if (xop & 0x10) {
3543 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3544 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3545 dc->cc_op = CC_OP_LOGIC;
3547 break;
3548 case 0xc: /* subx, V9 subc */
3549 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3550 (xop & 0x10));
3551 break;
3552 #ifdef TARGET_SPARC64
3553 case 0xd: /* V9 udivx */
3555 TCGv r_temp1, r_temp2;
3556 r_temp1 = tcg_temp_local_new();
3557 r_temp2 = tcg_temp_local_new();
3558 tcg_gen_mov_tl(r_temp1, cpu_src1);
3559 tcg_gen_mov_tl(r_temp2, cpu_src2);
3560 gen_trap_ifdivzero_tl(r_temp2);
3561 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3562 tcg_temp_free(r_temp1);
3563 tcg_temp_free(r_temp2);
3565 break;
3566 #endif
3567 case 0xe: /* udiv */
3568 CHECK_IU_FEATURE(dc, DIV);
3569 if (xop & 0x10) {
3570 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3571 cpu_src2);
3572 dc->cc_op = CC_OP_DIV;
3573 } else {
3574 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3575 cpu_src2);
3577 break;
3578 case 0xf: /* sdiv */
3579 CHECK_IU_FEATURE(dc, DIV);
3580 if (xop & 0x10) {
3581 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3582 cpu_src2);
3583 dc->cc_op = CC_OP_DIV;
3584 } else {
3585 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3586 cpu_src2);
3588 break;
3589 default:
3590 goto illegal_insn;
3592 gen_movl_TN_reg(rd, cpu_dst);
3593 } else {
3594 cpu_src1 = get_src1(insn, cpu_src1);
3595 cpu_src2 = get_src2(insn, cpu_src2);
3596 switch (xop) {
3597 case 0x20: /* taddcc */
3598 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3599 gen_movl_TN_reg(rd, cpu_dst);
3600 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3601 dc->cc_op = CC_OP_TADD;
3602 break;
3603 case 0x21: /* tsubcc */
3604 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3605 gen_movl_TN_reg(rd, cpu_dst);
3606 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3607 dc->cc_op = CC_OP_TSUB;
3608 break;
3609 case 0x22: /* taddcctv */
3610 save_state(dc, cpu_cond);
3611 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3612 gen_movl_TN_reg(rd, cpu_dst);
3613 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3614 dc->cc_op = CC_OP_TADDTV;
3615 break;
3616 case 0x23: /* tsubcctv */
3617 save_state(dc, cpu_cond);
3618 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3619 gen_movl_TN_reg(rd, cpu_dst);
3620 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3621 dc->cc_op = CC_OP_TSUBTV;
3622 break;
3623 case 0x24: /* mulscc */
3624 gen_helper_compute_psr(cpu_env);
3625 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3626 gen_movl_TN_reg(rd, cpu_dst);
3627 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3628 dc->cc_op = CC_OP_ADD;
3629 break;
3630 #ifndef TARGET_SPARC64
3631 case 0x25: /* sll */
3632 if (IS_IMM) { /* immediate */
3633 simm = GET_FIELDs(insn, 20, 31);
3634 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3635 } else { /* register */
3636 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3637 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3639 gen_movl_TN_reg(rd, cpu_dst);
3640 break;
3641 case 0x26: /* srl */
3642 if (IS_IMM) { /* immediate */
3643 simm = GET_FIELDs(insn, 20, 31);
3644 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3645 } else { /* register */
3646 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3647 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3649 gen_movl_TN_reg(rd, cpu_dst);
3650 break;
3651 case 0x27: /* sra */
3652 if (IS_IMM) { /* immediate */
3653 simm = GET_FIELDs(insn, 20, 31);
3654 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3655 } else { /* register */
3656 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3657 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3659 gen_movl_TN_reg(rd, cpu_dst);
3660 break;
3661 #endif
3662 case 0x30:
3664 switch(rd) {
3665 case 0: /* wry */
3666 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3667 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3668 break;
3669 #ifndef TARGET_SPARC64
3670 case 0x01 ... 0x0f: /* undefined in the
3671 SPARCv8 manual, nop
3672 on the microSPARC
3673 II */
3674 case 0x10 ... 0x1f: /* implementation-dependent
3675 in the SPARCv8
3676 manual, nop on the
3677 microSPARC II */
3678 break;
3679 #else
3680 case 0x2: /* V9 wrccr */
3681 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3682 gen_helper_wrccr(cpu_env, cpu_dst);
3683 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3684 dc->cc_op = CC_OP_FLAGS;
3685 break;
3686 case 0x3: /* V9 wrasi */
3687 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3688 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3689 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3690 break;
3691 case 0x6: /* V9 wrfprs */
3692 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3693 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3694 save_state(dc, cpu_cond);
3695 gen_op_next_insn();
3696 tcg_gen_exit_tb(0);
3697 dc->is_br = 1;
3698 break;
3699 case 0xf: /* V9 sir, nop if user */
3700 #if !defined(CONFIG_USER_ONLY)
3701 if (supervisor(dc)) {
3702 ; // XXX
3704 #endif
3705 break;
3706 case 0x13: /* Graphics Status */
3707 if (gen_trap_ifnofpu(dc)) {
3708 goto jmp_insn;
3710 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3711 break;
3712 case 0x14: /* Softint set */
3713 if (!supervisor(dc))
3714 goto illegal_insn;
3715 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3716 gen_helper_set_softint(cpu_env, cpu_tmp64);
3717 break;
3718 case 0x15: /* Softint clear */
3719 if (!supervisor(dc))
3720 goto illegal_insn;
3721 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3722 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3723 break;
3724 case 0x16: /* Softint write */
3725 if (!supervisor(dc))
3726 goto illegal_insn;
3727 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3728 gen_helper_write_softint(cpu_env, cpu_tmp64);
3729 break;
3730 case 0x17: /* Tick compare */
3731 #if !defined(CONFIG_USER_ONLY)
3732 if (!supervisor(dc))
3733 goto illegal_insn;
3734 #endif
3736 TCGv_ptr r_tickptr;
3738 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3739 cpu_src2);
3740 r_tickptr = tcg_temp_new_ptr();
3741 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3742 offsetof(CPUSPARCState, tick));
3743 gen_helper_tick_set_limit(r_tickptr,
3744 cpu_tick_cmpr);
3745 tcg_temp_free_ptr(r_tickptr);
3747 break;
3748 case 0x18: /* System tick */
3749 #if !defined(CONFIG_USER_ONLY)
3750 if (!supervisor(dc))
3751 goto illegal_insn;
3752 #endif
3754 TCGv_ptr r_tickptr;
3756 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3757 cpu_src2);
3758 r_tickptr = tcg_temp_new_ptr();
3759 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3760 offsetof(CPUSPARCState, stick));
3761 gen_helper_tick_set_count(r_tickptr,
3762 cpu_dst);
3763 tcg_temp_free_ptr(r_tickptr);
3765 break;
3766 case 0x19: /* System tick compare */
3767 #if !defined(CONFIG_USER_ONLY)
3768 if (!supervisor(dc))
3769 goto illegal_insn;
3770 #endif
3772 TCGv_ptr r_tickptr;
3774 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3775 cpu_src2);
3776 r_tickptr = tcg_temp_new_ptr();
3777 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3778 offsetof(CPUSPARCState, stick));
3779 gen_helper_tick_set_limit(r_tickptr,
3780 cpu_stick_cmpr);
3781 tcg_temp_free_ptr(r_tickptr);
3783 break;
3785 case 0x10: /* Performance Control */
3786 case 0x11: /* Performance Instrumentation
3787 Counter */
3788 case 0x12: /* Dispatch Control */
3789 #endif
3790 default:
3791 goto illegal_insn;
3794 break;
3795 #if !defined(CONFIG_USER_ONLY)
3796 case 0x31: /* wrpsr, V9 saved, restored */
3798 if (!supervisor(dc))
3799 goto priv_insn;
3800 #ifdef TARGET_SPARC64
3801 switch (rd) {
3802 case 0:
3803 gen_helper_saved(cpu_env);
3804 break;
3805 case 1:
3806 gen_helper_restored(cpu_env);
3807 break;
3808 case 2: /* UA2005 allclean */
3809 case 3: /* UA2005 otherw */
3810 case 4: /* UA2005 normalw */
3811 case 5: /* UA2005 invalw */
3812 // XXX
3813 default:
3814 goto illegal_insn;
3816 #else
3817 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3818 gen_helper_wrpsr(cpu_env, cpu_dst);
3819 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3820 dc->cc_op = CC_OP_FLAGS;
3821 save_state(dc, cpu_cond);
3822 gen_op_next_insn();
3823 tcg_gen_exit_tb(0);
3824 dc->is_br = 1;
3825 #endif
3827 break;
3828 case 0x32: /* wrwim, V9 wrpr */
3830 if (!supervisor(dc))
3831 goto priv_insn;
3832 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3833 #ifdef TARGET_SPARC64
3834 switch (rd) {
3835 case 0: // tpc
3837 TCGv_ptr r_tsptr;
3839 r_tsptr = tcg_temp_new_ptr();
3840 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3841 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3842 offsetof(trap_state, tpc));
3843 tcg_temp_free_ptr(r_tsptr);
3845 break;
3846 case 1: // tnpc
3848 TCGv_ptr r_tsptr;
3850 r_tsptr = tcg_temp_new_ptr();
3851 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3852 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3853 offsetof(trap_state, tnpc));
3854 tcg_temp_free_ptr(r_tsptr);
3856 break;
3857 case 2: // tstate
3859 TCGv_ptr r_tsptr;
3861 r_tsptr = tcg_temp_new_ptr();
3862 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3863 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3864 offsetof(trap_state,
3865 tstate));
3866 tcg_temp_free_ptr(r_tsptr);
3868 break;
3869 case 3: // tt
3871 TCGv_ptr r_tsptr;
3873 r_tsptr = tcg_temp_new_ptr();
3874 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3875 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3876 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3877 offsetof(trap_state, tt));
3878 tcg_temp_free_ptr(r_tsptr);
3880 break;
3881 case 4: // tick
3883 TCGv_ptr r_tickptr;
3885 r_tickptr = tcg_temp_new_ptr();
3886 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3887 offsetof(CPUSPARCState, tick));
3888 gen_helper_tick_set_count(r_tickptr,
3889 cpu_tmp0);
3890 tcg_temp_free_ptr(r_tickptr);
3892 break;
3893 case 5: // tba
3894 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3895 break;
3896 case 6: // pstate
3898 TCGv r_tmp = tcg_temp_local_new();
3900 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3901 save_state(dc, cpu_cond);
3902 gen_helper_wrpstate(cpu_env, r_tmp);
3903 tcg_temp_free(r_tmp);
3904 dc->npc = DYNAMIC_PC;
3906 break;
3907 case 7: // tl
3909 TCGv r_tmp = tcg_temp_local_new();
3911 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3912 save_state(dc, cpu_cond);
3913 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3914 tcg_temp_free(r_tmp);
3915 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3916 offsetof(CPUSPARCState, tl));
3917 dc->npc = DYNAMIC_PC;
3919 break;
3920 case 8: // pil
3921 gen_helper_wrpil(cpu_env, cpu_tmp0);
3922 break;
3923 case 9: // cwp
3924 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3925 break;
3926 case 10: // cansave
3927 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3928 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3929 offsetof(CPUSPARCState,
3930 cansave));
3931 break;
3932 case 11: // canrestore
3933 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3934 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3935 offsetof(CPUSPARCState,
3936 canrestore));
3937 break;
3938 case 12: // cleanwin
3939 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3940 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3941 offsetof(CPUSPARCState,
3942 cleanwin));
3943 break;
3944 case 13: // otherwin
3945 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3946 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3947 offsetof(CPUSPARCState,
3948 otherwin));
3949 break;
3950 case 14: // wstate
3951 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3952 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3953 offsetof(CPUSPARCState,
3954 wstate));
3955 break;
3956 case 16: // UA2005 gl
3957 CHECK_IU_FEATURE(dc, GL);
3958 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3959 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3960 offsetof(CPUSPARCState, gl));
3961 break;
3962 case 26: // UA2005 strand status
3963 CHECK_IU_FEATURE(dc, HYPV);
3964 if (!hypervisor(dc))
3965 goto priv_insn;
3966 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3967 break;
3968 default:
3969 goto illegal_insn;
3971 #else
3972 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3973 if (dc->def->nwindows != 32)
3974 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3975 (1 << dc->def->nwindows) - 1);
3976 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3977 #endif
3979 break;
3980 case 0x33: /* wrtbr, UA2005 wrhpr */
3982 #ifndef TARGET_SPARC64
3983 if (!supervisor(dc))
3984 goto priv_insn;
3985 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3986 #else
3987 CHECK_IU_FEATURE(dc, HYPV);
3988 if (!hypervisor(dc))
3989 goto priv_insn;
3990 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3991 switch (rd) {
3992 case 0: // hpstate
3993 // XXX gen_op_wrhpstate();
3994 save_state(dc, cpu_cond);
3995 gen_op_next_insn();
3996 tcg_gen_exit_tb(0);
3997 dc->is_br = 1;
3998 break;
3999 case 1: // htstate
4000 // XXX gen_op_wrhtstate();
4001 break;
4002 case 3: // hintp
4003 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4004 break;
4005 case 5: // htba
4006 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4007 break;
4008 case 31: // hstick_cmpr
4010 TCGv_ptr r_tickptr;
4012 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4013 r_tickptr = tcg_temp_new_ptr();
4014 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4015 offsetof(CPUSPARCState, hstick));
4016 gen_helper_tick_set_limit(r_tickptr,
4017 cpu_hstick_cmpr);
4018 tcg_temp_free_ptr(r_tickptr);
4020 break;
4021 case 6: // hver readonly
4022 default:
4023 goto illegal_insn;
4025 #endif
4027 break;
4028 #endif
4029 #ifdef TARGET_SPARC64
4030 case 0x2c: /* V9 movcc */
4032 int cc = GET_FIELD_SP(insn, 11, 12);
4033 int cond = GET_FIELD_SP(insn, 14, 17);
4034 TCGv r_cond;
4035 int l1;
4037 r_cond = tcg_temp_new();
4038 if (insn & (1 << 18)) {
4039 if (cc == 0)
4040 gen_cond(r_cond, 0, cond, dc);
4041 else if (cc == 2)
4042 gen_cond(r_cond, 1, cond, dc);
4043 else
4044 goto illegal_insn;
4045 } else {
4046 gen_fcond(r_cond, cc, cond);
4049 l1 = gen_new_label();
4051 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
4052 if (IS_IMM) { /* immediate */
4053 TCGv r_const;
4055 simm = GET_FIELD_SPs(insn, 0, 10);
4056 r_const = tcg_const_tl(simm);
4057 gen_movl_TN_reg(rd, r_const);
4058 tcg_temp_free(r_const);
4059 } else {
4060 rs2 = GET_FIELD_SP(insn, 0, 4);
4061 gen_movl_reg_TN(rs2, cpu_tmp0);
4062 gen_movl_TN_reg(rd, cpu_tmp0);
4064 gen_set_label(l1);
4065 tcg_temp_free(r_cond);
4066 break;
4068 case 0x2d: /* V9 sdivx */
4069 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4070 gen_movl_TN_reg(rd, cpu_dst);
4071 break;
4072 case 0x2e: /* V9 popc */
4074 cpu_src2 = get_src2(insn, cpu_src2);
4075 gen_helper_popc(cpu_dst, cpu_src2);
4076 gen_movl_TN_reg(rd, cpu_dst);
4078 case 0x2f: /* V9 movr */
4080 int cond = GET_FIELD_SP(insn, 10, 12);
4081 int l1;
4083 cpu_src1 = get_src1(insn, cpu_src1);
4085 l1 = gen_new_label();
4087 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4088 cpu_src1, 0, l1);
4089 if (IS_IMM) { /* immediate */
4090 TCGv r_const;
4092 simm = GET_FIELD_SPs(insn, 0, 9);
4093 r_const = tcg_const_tl(simm);
4094 gen_movl_TN_reg(rd, r_const);
4095 tcg_temp_free(r_const);
4096 } else {
4097 rs2 = GET_FIELD_SP(insn, 0, 4);
4098 gen_movl_reg_TN(rs2, cpu_tmp0);
4099 gen_movl_TN_reg(rd, cpu_tmp0);
4101 gen_set_label(l1);
4102 break;
4104 #endif
4105 default:
4106 goto illegal_insn;
4109 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4110 #ifdef TARGET_SPARC64
4111 int opf = GET_FIELD_SP(insn, 5, 13);
4112 rs1 = GET_FIELD(insn, 13, 17);
4113 rs2 = GET_FIELD(insn, 27, 31);
4114 if (gen_trap_ifnofpu(dc)) {
4115 goto jmp_insn;
4118 switch (opf) {
4119 case 0x000: /* VIS I edge8cc */
4120 CHECK_FPU_FEATURE(dc, VIS1);
4121 gen_movl_reg_TN(rs1, cpu_src1);
4122 gen_movl_reg_TN(rs2, cpu_src2);
4123 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4124 gen_movl_TN_reg(rd, cpu_dst);
4125 break;
4126 case 0x001: /* VIS II edge8n */
4127 CHECK_FPU_FEATURE(dc, VIS2);
4128 gen_movl_reg_TN(rs1, cpu_src1);
4129 gen_movl_reg_TN(rs2, cpu_src2);
4130 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4131 gen_movl_TN_reg(rd, cpu_dst);
4132 break;
4133 case 0x002: /* VIS I edge8lcc */
4134 CHECK_FPU_FEATURE(dc, VIS1);
4135 gen_movl_reg_TN(rs1, cpu_src1);
4136 gen_movl_reg_TN(rs2, cpu_src2);
4137 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4138 gen_movl_TN_reg(rd, cpu_dst);
4139 break;
4140 case 0x003: /* VIS II edge8ln */
4141 CHECK_FPU_FEATURE(dc, VIS2);
4142 gen_movl_reg_TN(rs1, cpu_src1);
4143 gen_movl_reg_TN(rs2, cpu_src2);
4144 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4145 gen_movl_TN_reg(rd, cpu_dst);
4146 break;
4147 case 0x004: /* VIS I edge16cc */
4148 CHECK_FPU_FEATURE(dc, VIS1);
4149 gen_movl_reg_TN(rs1, cpu_src1);
4150 gen_movl_reg_TN(rs2, cpu_src2);
4151 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4152 gen_movl_TN_reg(rd, cpu_dst);
4153 break;
4154 case 0x005: /* VIS II edge16n */
4155 CHECK_FPU_FEATURE(dc, VIS2);
4156 gen_movl_reg_TN(rs1, cpu_src1);
4157 gen_movl_reg_TN(rs2, cpu_src2);
4158 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4159 gen_movl_TN_reg(rd, cpu_dst);
4160 break;
4161 case 0x006: /* VIS I edge16lcc */
4162 CHECK_FPU_FEATURE(dc, VIS1);
4163 gen_movl_reg_TN(rs1, cpu_src1);
4164 gen_movl_reg_TN(rs2, cpu_src2);
4165 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4166 gen_movl_TN_reg(rd, cpu_dst);
4167 break;
4168 case 0x007: /* VIS II edge16ln */
4169 CHECK_FPU_FEATURE(dc, VIS2);
4170 gen_movl_reg_TN(rs1, cpu_src1);
4171 gen_movl_reg_TN(rs2, cpu_src2);
4172 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4173 gen_movl_TN_reg(rd, cpu_dst);
4174 break;
4175 case 0x008: /* VIS I edge32cc */
4176 CHECK_FPU_FEATURE(dc, VIS1);
4177 gen_movl_reg_TN(rs1, cpu_src1);
4178 gen_movl_reg_TN(rs2, cpu_src2);
4179 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4180 gen_movl_TN_reg(rd, cpu_dst);
4181 break;
4182 case 0x009: /* VIS II edge32n */
4183 CHECK_FPU_FEATURE(dc, VIS2);
4184 gen_movl_reg_TN(rs1, cpu_src1);
4185 gen_movl_reg_TN(rs2, cpu_src2);
4186 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4187 gen_movl_TN_reg(rd, cpu_dst);
4188 break;
4189 case 0x00a: /* VIS I edge32lcc */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 gen_movl_reg_TN(rs1, cpu_src1);
4192 gen_movl_reg_TN(rs2, cpu_src2);
4193 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4194 gen_movl_TN_reg(rd, cpu_dst);
4195 break;
4196 case 0x00b: /* VIS II edge32ln */
4197 CHECK_FPU_FEATURE(dc, VIS2);
4198 gen_movl_reg_TN(rs1, cpu_src1);
4199 gen_movl_reg_TN(rs2, cpu_src2);
4200 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4201 gen_movl_TN_reg(rd, cpu_dst);
4202 break;
4203 case 0x010: /* VIS I array8 */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 cpu_src1 = get_src1(insn, cpu_src1);
4206 gen_movl_reg_TN(rs2, cpu_src2);
4207 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4208 gen_movl_TN_reg(rd, cpu_dst);
4209 break;
4210 case 0x012: /* VIS I array16 */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 cpu_src1 = get_src1(insn, cpu_src1);
4213 gen_movl_reg_TN(rs2, cpu_src2);
4214 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4215 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4216 gen_movl_TN_reg(rd, cpu_dst);
4217 break;
4218 case 0x014: /* VIS I array32 */
4219 CHECK_FPU_FEATURE(dc, VIS1);
4220 cpu_src1 = get_src1(insn, cpu_src1);
4221 gen_movl_reg_TN(rs2, cpu_src2);
4222 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4223 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4224 gen_movl_TN_reg(rd, cpu_dst);
4225 break;
4226 case 0x018: /* VIS I alignaddr */
4227 CHECK_FPU_FEATURE(dc, VIS1);
4228 cpu_src1 = get_src1(insn, cpu_src1);
4229 gen_movl_reg_TN(rs2, cpu_src2);
4230 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4231 gen_movl_TN_reg(rd, cpu_dst);
4232 break;
4233 case 0x01a: /* VIS I alignaddrl */
4234 CHECK_FPU_FEATURE(dc, VIS1);
4235 cpu_src1 = get_src1(insn, cpu_src1);
4236 gen_movl_reg_TN(rs2, cpu_src2);
4237 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4238 gen_movl_TN_reg(rd, cpu_dst);
4239 break;
4240 case 0x019: /* VIS II bmask */
4241 CHECK_FPU_FEATURE(dc, VIS2);
4242 cpu_src1 = get_src1(insn, cpu_src1);
4243 cpu_src2 = get_src1(insn, cpu_src2);
4244 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4245 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4246 gen_movl_TN_reg(rd, cpu_dst);
4247 break;
4248 case 0x020: /* VIS I fcmple16 */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4251 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4252 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4253 gen_movl_TN_reg(rd, cpu_dst);
4254 break;
4255 case 0x022: /* VIS I fcmpne16 */
4256 CHECK_FPU_FEATURE(dc, VIS1);
4257 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4258 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4259 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4260 gen_movl_TN_reg(rd, cpu_dst);
4261 break;
4262 case 0x024: /* VIS I fcmple32 */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4265 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4266 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4267 gen_movl_TN_reg(rd, cpu_dst);
4268 break;
4269 case 0x026: /* VIS I fcmpne32 */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4272 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4273 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4274 gen_movl_TN_reg(rd, cpu_dst);
4275 break;
4276 case 0x028: /* VIS I fcmpgt16 */
4277 CHECK_FPU_FEATURE(dc, VIS1);
4278 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4279 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4280 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4281 gen_movl_TN_reg(rd, cpu_dst);
4282 break;
4283 case 0x02a: /* VIS I fcmpeq16 */
4284 CHECK_FPU_FEATURE(dc, VIS1);
4285 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4286 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4287 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4288 gen_movl_TN_reg(rd, cpu_dst);
4289 break;
4290 case 0x02c: /* VIS I fcmpgt32 */
4291 CHECK_FPU_FEATURE(dc, VIS1);
4292 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4293 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4294 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4295 gen_movl_TN_reg(rd, cpu_dst);
4296 break;
4297 case 0x02e: /* VIS I fcmpeq32 */
4298 CHECK_FPU_FEATURE(dc, VIS1);
4299 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4300 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4301 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4302 gen_movl_TN_reg(rd, cpu_dst);
4303 break;
4304 case 0x031: /* VIS I fmul8x16 */
4305 CHECK_FPU_FEATURE(dc, VIS1);
4306 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4307 break;
4308 case 0x033: /* VIS I fmul8x16au */
4309 CHECK_FPU_FEATURE(dc, VIS1);
4310 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4311 break;
4312 case 0x035: /* VIS I fmul8x16al */
4313 CHECK_FPU_FEATURE(dc, VIS1);
4314 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4315 break;
4316 case 0x036: /* VIS I fmul8sux16 */
4317 CHECK_FPU_FEATURE(dc, VIS1);
4318 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4319 break;
4320 case 0x037: /* VIS I fmul8ulx16 */
4321 CHECK_FPU_FEATURE(dc, VIS1);
4322 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4323 break;
4324 case 0x038: /* VIS I fmuld8sux16 */
4325 CHECK_FPU_FEATURE(dc, VIS1);
4326 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4327 break;
4328 case 0x039: /* VIS I fmuld8ulx16 */
4329 CHECK_FPU_FEATURE(dc, VIS1);
4330 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4331 break;
4332 case 0x03a: /* VIS I fpack32 */
4333 CHECK_FPU_FEATURE(dc, VIS1);
4334 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4335 break;
4336 case 0x03b: /* VIS I fpack16 */
4337 CHECK_FPU_FEATURE(dc, VIS1);
4338 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4339 cpu_dst_32 = gen_dest_fpr_F();
4340 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4341 gen_store_fpr_F(dc, rd, cpu_dst_32);
4342 break;
4343 case 0x03d: /* VIS I fpackfix */
4344 CHECK_FPU_FEATURE(dc, VIS1);
4345 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4346 cpu_dst_32 = gen_dest_fpr_F();
4347 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4348 gen_store_fpr_F(dc, rd, cpu_dst_32);
4349 break;
4350 case 0x03e: /* VIS I pdist */
4351 CHECK_FPU_FEATURE(dc, VIS1);
4352 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4353 break;
4354 case 0x048: /* VIS I faligndata */
4355 CHECK_FPU_FEATURE(dc, VIS1);
4356 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4357 break;
4358 case 0x04b: /* VIS I fpmerge */
4359 CHECK_FPU_FEATURE(dc, VIS1);
4360 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4361 break;
4362 case 0x04c: /* VIS II bshuffle */
4363 CHECK_FPU_FEATURE(dc, VIS2);
4364 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4365 break;
4366 case 0x04d: /* VIS I fexpand */
4367 CHECK_FPU_FEATURE(dc, VIS1);
4368 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4369 break;
4370 case 0x050: /* VIS I fpadd16 */
4371 CHECK_FPU_FEATURE(dc, VIS1);
4372 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4373 break;
4374 case 0x051: /* VIS I fpadd16s */
4375 CHECK_FPU_FEATURE(dc, VIS1);
4376 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4377 break;
4378 case 0x052: /* VIS I fpadd32 */
4379 CHECK_FPU_FEATURE(dc, VIS1);
4380 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4381 break;
4382 case 0x053: /* VIS I fpadd32s */
4383 CHECK_FPU_FEATURE(dc, VIS1);
4384 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4385 break;
4386 case 0x054: /* VIS I fpsub16 */
4387 CHECK_FPU_FEATURE(dc, VIS1);
4388 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4389 break;
4390 case 0x055: /* VIS I fpsub16s */
4391 CHECK_FPU_FEATURE(dc, VIS1);
4392 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4393 break;
4394 case 0x056: /* VIS I fpsub32 */
4395 CHECK_FPU_FEATURE(dc, VIS1);
4396 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4397 break;
4398 case 0x057: /* VIS I fpsub32s */
4399 CHECK_FPU_FEATURE(dc, VIS1);
4400 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4401 break;
4402 case 0x060: /* VIS I fzero */
4403 CHECK_FPU_FEATURE(dc, VIS1);
4404 cpu_dst_64 = gen_dest_fpr_D();
4405 tcg_gen_movi_i64(cpu_dst_64, 0);
4406 gen_store_fpr_D(dc, rd, cpu_dst_64);
4407 break;
4408 case 0x061: /* VIS I fzeros */
4409 CHECK_FPU_FEATURE(dc, VIS1);
4410 cpu_dst_32 = gen_dest_fpr_F();
4411 tcg_gen_movi_i32(cpu_dst_32, 0);
4412 gen_store_fpr_F(dc, rd, cpu_dst_32);
4413 break;
4414 case 0x062: /* VIS I fnor */
4415 CHECK_FPU_FEATURE(dc, VIS1);
4416 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4417 break;
4418 case 0x063: /* VIS I fnors */
4419 CHECK_FPU_FEATURE(dc, VIS1);
4420 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4421 break;
4422 case 0x064: /* VIS I fandnot2 */
4423 CHECK_FPU_FEATURE(dc, VIS1);
4424 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4425 break;
4426 case 0x065: /* VIS I fandnot2s */
4427 CHECK_FPU_FEATURE(dc, VIS1);
4428 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4429 break;
4430 case 0x066: /* VIS I fnot2 */
4431 CHECK_FPU_FEATURE(dc, VIS1);
4432 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4433 break;
4434 case 0x067: /* VIS I fnot2s */
4435 CHECK_FPU_FEATURE(dc, VIS1);
4436 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4437 break;
4438 case 0x068: /* VIS I fandnot1 */
4439 CHECK_FPU_FEATURE(dc, VIS1);
4440 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4441 break;
4442 case 0x069: /* VIS I fandnot1s */
4443 CHECK_FPU_FEATURE(dc, VIS1);
4444 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4445 break;
4446 case 0x06a: /* VIS I fnot1 */
4447 CHECK_FPU_FEATURE(dc, VIS1);
4448 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4449 break;
4450 case 0x06b: /* VIS I fnot1s */
4451 CHECK_FPU_FEATURE(dc, VIS1);
4452 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4453 break;
4454 case 0x06c: /* VIS I fxor */
4455 CHECK_FPU_FEATURE(dc, VIS1);
4456 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4457 break;
4458 case 0x06d: /* VIS I fxors */
4459 CHECK_FPU_FEATURE(dc, VIS1);
4460 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4461 break;
4462 case 0x06e: /* VIS I fnand */
4463 CHECK_FPU_FEATURE(dc, VIS1);
4464 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4465 break;
4466 case 0x06f: /* VIS I fnands */
4467 CHECK_FPU_FEATURE(dc, VIS1);
4468 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4469 break;
4470 case 0x070: /* VIS I fand */
4471 CHECK_FPU_FEATURE(dc, VIS1);
4472 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4473 break;
4474 case 0x071: /* VIS I fands */
4475 CHECK_FPU_FEATURE(dc, VIS1);
4476 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4477 break;
4478 case 0x072: /* VIS I fxnor */
4479 CHECK_FPU_FEATURE(dc, VIS1);
4480 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4481 break;
4482 case 0x073: /* VIS I fxnors */
4483 CHECK_FPU_FEATURE(dc, VIS1);
4484 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4485 break;
4486 case 0x074: /* VIS I fsrc1 */
4487 CHECK_FPU_FEATURE(dc, VIS1);
4488 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4489 gen_store_fpr_D(dc, rd, cpu_src1_64);
4490 break;
4491 case 0x075: /* VIS I fsrc1s */
4492 CHECK_FPU_FEATURE(dc, VIS1);
4493 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4494 gen_store_fpr_F(dc, rd, cpu_src1_32);
4495 break;
4496 case 0x076: /* VIS I fornot2 */
4497 CHECK_FPU_FEATURE(dc, VIS1);
4498 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4499 break;
4500 case 0x077: /* VIS I fornot2s */
4501 CHECK_FPU_FEATURE(dc, VIS1);
4502 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4503 break;
4504 case 0x078: /* VIS I fsrc2 */
4505 CHECK_FPU_FEATURE(dc, VIS1);
4506 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4507 gen_store_fpr_D(dc, rd, cpu_src1_64);
4508 break;
4509 case 0x079: /* VIS I fsrc2s */
4510 CHECK_FPU_FEATURE(dc, VIS1);
4511 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4512 gen_store_fpr_F(dc, rd, cpu_src1_32);
4513 break;
4514 case 0x07a: /* VIS I fornot1 */
4515 CHECK_FPU_FEATURE(dc, VIS1);
4516 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4517 break;
4518 case 0x07b: /* VIS I fornot1s */
4519 CHECK_FPU_FEATURE(dc, VIS1);
4520 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4521 break;
4522 case 0x07c: /* VIS I for */
4523 CHECK_FPU_FEATURE(dc, VIS1);
4524 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4525 break;
4526 case 0x07d: /* VIS I fors */
4527 CHECK_FPU_FEATURE(dc, VIS1);
4528 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4529 break;
4530 case 0x07e: /* VIS I fone */
4531 CHECK_FPU_FEATURE(dc, VIS1);
4532 cpu_dst_64 = gen_dest_fpr_D();
4533 tcg_gen_movi_i64(cpu_dst_64, -1);
4534 gen_store_fpr_D(dc, rd, cpu_dst_64);
4535 break;
4536 case 0x07f: /* VIS I fones */
4537 CHECK_FPU_FEATURE(dc, VIS1);
4538 cpu_dst_32 = gen_dest_fpr_F();
4539 tcg_gen_movi_i32(cpu_dst_32, -1);
4540 gen_store_fpr_F(dc, rd, cpu_dst_32);
4541 break;
4542 case 0x080: /* VIS I shutdown */
4543 case 0x081: /* VIS II siam */
4544 // XXX
4545 goto illegal_insn;
4546 default:
4547 goto illegal_insn;
4549 #else
4550 goto ncp_insn;
4551 #endif
4552 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4553 #ifdef TARGET_SPARC64
4554 goto illegal_insn;
4555 #else
4556 goto ncp_insn;
4557 #endif
4558 #ifdef TARGET_SPARC64
4559 } else if (xop == 0x39) { /* V9 return */
4560 TCGv_i32 r_const;
4562 save_state(dc, cpu_cond);
4563 cpu_src1 = get_src1(insn, cpu_src1);
4564 if (IS_IMM) { /* immediate */
4565 simm = GET_FIELDs(insn, 19, 31);
4566 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4567 } else { /* register */
4568 rs2 = GET_FIELD(insn, 27, 31);
4569 if (rs2) {
4570 gen_movl_reg_TN(rs2, cpu_src2);
4571 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4572 } else
4573 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4575 gen_helper_restore(cpu_env);
4576 gen_mov_pc_npc(dc, cpu_cond);
4577 r_const = tcg_const_i32(3);
4578 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4579 tcg_temp_free_i32(r_const);
4580 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4581 dc->npc = DYNAMIC_PC;
4582 goto jmp_insn;
4583 #endif
4584 } else {
4585 cpu_src1 = get_src1(insn, cpu_src1);
4586 if (IS_IMM) { /* immediate */
4587 simm = GET_FIELDs(insn, 19, 31);
4588 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4589 } else { /* register */
4590 rs2 = GET_FIELD(insn, 27, 31);
4591 if (rs2) {
4592 gen_movl_reg_TN(rs2, cpu_src2);
4593 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4594 } else
4595 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4597 switch (xop) {
4598 case 0x38: /* jmpl */
4600 TCGv r_pc;
4601 TCGv_i32 r_const;
4603 r_pc = tcg_const_tl(dc->pc);
4604 gen_movl_TN_reg(rd, r_pc);
4605 tcg_temp_free(r_pc);
4606 gen_mov_pc_npc(dc, cpu_cond);
4607 r_const = tcg_const_i32(3);
4608 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4609 tcg_temp_free_i32(r_const);
4610 gen_address_mask(dc, cpu_dst);
4611 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4612 dc->npc = DYNAMIC_PC;
4614 goto jmp_insn;
4615 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4616 case 0x39: /* rett, V9 return */
4618 TCGv_i32 r_const;
4620 if (!supervisor(dc))
4621 goto priv_insn;
4622 gen_mov_pc_npc(dc, cpu_cond);
4623 r_const = tcg_const_i32(3);
4624 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4625 tcg_temp_free_i32(r_const);
4626 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4627 dc->npc = DYNAMIC_PC;
4628 gen_helper_rett(cpu_env);
4630 goto jmp_insn;
4631 #endif
4632 case 0x3b: /* flush */
4633 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4634 goto unimp_flush;
4635 /* nop */
4636 break;
4637 case 0x3c: /* save */
4638 save_state(dc, cpu_cond);
4639 gen_helper_save(cpu_env);
4640 gen_movl_TN_reg(rd, cpu_dst);
4641 break;
4642 case 0x3d: /* restore */
4643 save_state(dc, cpu_cond);
4644 gen_helper_restore(cpu_env);
4645 gen_movl_TN_reg(rd, cpu_dst);
4646 break;
4647 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4648 case 0x3e: /* V9 done/retry */
4650 switch (rd) {
4651 case 0:
4652 if (!supervisor(dc))
4653 goto priv_insn;
4654 dc->npc = DYNAMIC_PC;
4655 dc->pc = DYNAMIC_PC;
4656 gen_helper_done(cpu_env);
4657 goto jmp_insn;
4658 case 1:
4659 if (!supervisor(dc))
4660 goto priv_insn;
4661 dc->npc = DYNAMIC_PC;
4662 dc->pc = DYNAMIC_PC;
4663 gen_helper_retry(cpu_env);
4664 goto jmp_insn;
4665 default:
4666 goto illegal_insn;
4669 break;
4670 #endif
4671 default:
4672 goto illegal_insn;
4675 break;
4677 break;
4678 case 3: /* load/store instructions */
4680 unsigned int xop = GET_FIELD(insn, 7, 12);
4682 /* flush pending conditional evaluations before exposing
4683 cpu state */
4684 if (dc->cc_op != CC_OP_FLAGS) {
4685 dc->cc_op = CC_OP_FLAGS;
4686 gen_helper_compute_psr(cpu_env);
4688 cpu_src1 = get_src1(insn, cpu_src1);
4689 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4690 rs2 = GET_FIELD(insn, 27, 31);
4691 gen_movl_reg_TN(rs2, cpu_src2);
4692 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4693 } else if (IS_IMM) { /* immediate */
4694 simm = GET_FIELDs(insn, 19, 31);
4695 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4696 } else { /* register */
4697 rs2 = GET_FIELD(insn, 27, 31);
4698 if (rs2 != 0) {
4699 gen_movl_reg_TN(rs2, cpu_src2);
4700 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4701 } else
4702 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4704 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4705 (xop > 0x17 && xop <= 0x1d ) ||
4706 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4707 switch (xop) {
4708 case 0x0: /* ld, V9 lduw, load unsigned word */
4709 gen_address_mask(dc, cpu_addr);
4710 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4711 break;
4712 case 0x1: /* ldub, load unsigned byte */
4713 gen_address_mask(dc, cpu_addr);
4714 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4715 break;
4716 case 0x2: /* lduh, load unsigned halfword */
4717 gen_address_mask(dc, cpu_addr);
4718 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4719 break;
4720 case 0x3: /* ldd, load double word */
4721 if (rd & 1)
4722 goto illegal_insn;
4723 else {
4724 TCGv_i32 r_const;
4726 save_state(dc, cpu_cond);
4727 r_const = tcg_const_i32(7);
4728 /* XXX remove alignment check */
4729 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4730 tcg_temp_free_i32(r_const);
4731 gen_address_mask(dc, cpu_addr);
4732 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4733 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4734 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4735 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4736 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4737 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4738 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4740 break;
4741 case 0x9: /* ldsb, load signed byte */
4742 gen_address_mask(dc, cpu_addr);
4743 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4744 break;
4745 case 0xa: /* ldsh, load signed halfword */
4746 gen_address_mask(dc, cpu_addr);
4747 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4748 break;
4749 case 0xd: /* ldstub -- XXX: should be atomically */
4751 TCGv r_const;
4753 gen_address_mask(dc, cpu_addr);
4754 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4755 r_const = tcg_const_tl(0xff);
4756 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4757 tcg_temp_free(r_const);
4759 break;
4760 case 0x0f: /* swap, swap register with memory. Also
4761 atomically */
4762 CHECK_IU_FEATURE(dc, SWAP);
4763 gen_movl_reg_TN(rd, cpu_val);
4764 gen_address_mask(dc, cpu_addr);
4765 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4766 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4767 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4768 break;
4769 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4770 case 0x10: /* lda, V9 lduwa, load word alternate */
4771 #ifndef TARGET_SPARC64
4772 if (IS_IMM)
4773 goto illegal_insn;
4774 if (!supervisor(dc))
4775 goto priv_insn;
4776 #endif
4777 save_state(dc, cpu_cond);
4778 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4779 break;
4780 case 0x11: /* lduba, load unsigned byte alternate */
4781 #ifndef TARGET_SPARC64
4782 if (IS_IMM)
4783 goto illegal_insn;
4784 if (!supervisor(dc))
4785 goto priv_insn;
4786 #endif
4787 save_state(dc, cpu_cond);
4788 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4789 break;
4790 case 0x12: /* lduha, load unsigned halfword alternate */
4791 #ifndef TARGET_SPARC64
4792 if (IS_IMM)
4793 goto illegal_insn;
4794 if (!supervisor(dc))
4795 goto priv_insn;
4796 #endif
4797 save_state(dc, cpu_cond);
4798 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4799 break;
4800 case 0x13: /* ldda, load double word alternate */
4801 #ifndef TARGET_SPARC64
4802 if (IS_IMM)
4803 goto illegal_insn;
4804 if (!supervisor(dc))
4805 goto priv_insn;
4806 #endif
4807 if (rd & 1)
4808 goto illegal_insn;
4809 save_state(dc, cpu_cond);
4810 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4811 goto skip_move;
4812 case 0x19: /* ldsba, load signed byte alternate */
4813 #ifndef TARGET_SPARC64
4814 if (IS_IMM)
4815 goto illegal_insn;
4816 if (!supervisor(dc))
4817 goto priv_insn;
4818 #endif
4819 save_state(dc, cpu_cond);
4820 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4821 break;
4822 case 0x1a: /* ldsha, load signed halfword alternate */
4823 #ifndef TARGET_SPARC64
4824 if (IS_IMM)
4825 goto illegal_insn;
4826 if (!supervisor(dc))
4827 goto priv_insn;
4828 #endif
4829 save_state(dc, cpu_cond);
4830 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4831 break;
4832 case 0x1d: /* ldstuba -- XXX: should be atomically */
4833 #ifndef TARGET_SPARC64
4834 if (IS_IMM)
4835 goto illegal_insn;
4836 if (!supervisor(dc))
4837 goto priv_insn;
4838 #endif
4839 save_state(dc, cpu_cond);
4840 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4841 break;
4842 case 0x1f: /* swapa, swap reg with alt. memory. Also
4843 atomically */
4844 CHECK_IU_FEATURE(dc, SWAP);
4845 #ifndef TARGET_SPARC64
4846 if (IS_IMM)
4847 goto illegal_insn;
4848 if (!supervisor(dc))
4849 goto priv_insn;
4850 #endif
4851 save_state(dc, cpu_cond);
4852 gen_movl_reg_TN(rd, cpu_val);
4853 gen_swap_asi(cpu_val, cpu_addr, insn);
4854 break;
4856 #ifndef TARGET_SPARC64
4857 case 0x30: /* ldc */
4858 case 0x31: /* ldcsr */
4859 case 0x33: /* lddc */
4860 goto ncp_insn;
4861 #endif
4862 #endif
4863 #ifdef TARGET_SPARC64
4864 case 0x08: /* V9 ldsw */
4865 gen_address_mask(dc, cpu_addr);
4866 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4867 break;
4868 case 0x0b: /* V9 ldx */
4869 gen_address_mask(dc, cpu_addr);
4870 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4871 break;
4872 case 0x18: /* V9 ldswa */
4873 save_state(dc, cpu_cond);
4874 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4875 break;
4876 case 0x1b: /* V9 ldxa */
4877 save_state(dc, cpu_cond);
4878 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4879 break;
4880 case 0x2d: /* V9 prefetch, no effect */
4881 goto skip_move;
4882 case 0x30: /* V9 ldfa */
4883 if (gen_trap_ifnofpu(dc)) {
4884 goto jmp_insn;
4886 save_state(dc, cpu_cond);
4887 gen_ldf_asi(cpu_addr, insn, 4, rd);
4888 gen_update_fprs_dirty(rd);
4889 goto skip_move;
4890 case 0x33: /* V9 lddfa */
4891 if (gen_trap_ifnofpu(dc)) {
4892 goto jmp_insn;
4894 save_state(dc, cpu_cond);
4895 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4896 gen_update_fprs_dirty(DFPREG(rd));
4897 goto skip_move;
4898 case 0x3d: /* V9 prefetcha, no effect */
4899 goto skip_move;
4900 case 0x32: /* V9 ldqfa */
4901 CHECK_FPU_FEATURE(dc, FLOAT128);
4902 if (gen_trap_ifnofpu(dc)) {
4903 goto jmp_insn;
4905 save_state(dc, cpu_cond);
4906 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4907 gen_update_fprs_dirty(QFPREG(rd));
4908 goto skip_move;
4909 #endif
4910 default:
4911 goto illegal_insn;
4913 gen_movl_TN_reg(rd, cpu_val);
4914 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4915 skip_move: ;
4916 #endif
4917 } else if (xop >= 0x20 && xop < 0x24) {
4918 if (gen_trap_ifnofpu(dc)) {
4919 goto jmp_insn;
4921 save_state(dc, cpu_cond);
4922 switch (xop) {
4923 case 0x20: /* ldf, load fpreg */
4924 gen_address_mask(dc, cpu_addr);
4925 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4926 cpu_dst_32 = gen_dest_fpr_F();
4927 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4928 gen_store_fpr_F(dc, rd, cpu_dst_32);
4929 break;
4930 case 0x21: /* ldfsr, V9 ldxfsr */
4931 #ifdef TARGET_SPARC64
4932 gen_address_mask(dc, cpu_addr);
4933 if (rd == 1) {
4934 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4935 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4936 } else {
4937 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4938 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4939 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4941 #else
4943 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4944 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4946 #endif
4947 break;
4948 case 0x22: /* ldqf, load quad fpreg */
4950 TCGv_i32 r_const;
4952 CHECK_FPU_FEATURE(dc, FLOAT128);
4953 r_const = tcg_const_i32(dc->mem_idx);
4954 gen_address_mask(dc, cpu_addr);
4955 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4956 tcg_temp_free_i32(r_const);
4957 gen_op_store_QT0_fpr(QFPREG(rd));
4958 gen_update_fprs_dirty(QFPREG(rd));
4960 break;
4961 case 0x23: /* lddf, load double fpreg */
4962 gen_address_mask(dc, cpu_addr);
4963 cpu_dst_64 = gen_dest_fpr_D();
4964 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4965 gen_store_fpr_D(dc, rd, cpu_dst_64);
4966 break;
4967 default:
4968 goto illegal_insn;
4970 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4971 xop == 0xe || xop == 0x1e) {
4972 gen_movl_reg_TN(rd, cpu_val);
4973 switch (xop) {
4974 case 0x4: /* st, store word */
4975 gen_address_mask(dc, cpu_addr);
4976 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4977 break;
4978 case 0x5: /* stb, store byte */
4979 gen_address_mask(dc, cpu_addr);
4980 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4981 break;
4982 case 0x6: /* sth, store halfword */
4983 gen_address_mask(dc, cpu_addr);
4984 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4985 break;
4986 case 0x7: /* std, store double word */
4987 if (rd & 1)
4988 goto illegal_insn;
4989 else {
4990 TCGv_i32 r_const;
4992 save_state(dc, cpu_cond);
4993 gen_address_mask(dc, cpu_addr);
4994 r_const = tcg_const_i32(7);
4995 /* XXX remove alignment check */
4996 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4997 tcg_temp_free_i32(r_const);
4998 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4999 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
5000 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5002 break;
5003 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5004 case 0x14: /* sta, V9 stwa, store word alternate */
5005 #ifndef TARGET_SPARC64
5006 if (IS_IMM)
5007 goto illegal_insn;
5008 if (!supervisor(dc))
5009 goto priv_insn;
5010 #endif
5011 save_state(dc, cpu_cond);
5012 gen_st_asi(cpu_val, cpu_addr, insn, 4);
5013 dc->npc = DYNAMIC_PC;
5014 break;
5015 case 0x15: /* stba, store byte alternate */
5016 #ifndef TARGET_SPARC64
5017 if (IS_IMM)
5018 goto illegal_insn;
5019 if (!supervisor(dc))
5020 goto priv_insn;
5021 #endif
5022 save_state(dc, cpu_cond);
5023 gen_st_asi(cpu_val, cpu_addr, insn, 1);
5024 dc->npc = DYNAMIC_PC;
5025 break;
5026 case 0x16: /* stha, store halfword alternate */
5027 #ifndef TARGET_SPARC64
5028 if (IS_IMM)
5029 goto illegal_insn;
5030 if (!supervisor(dc))
5031 goto priv_insn;
5032 #endif
5033 save_state(dc, cpu_cond);
5034 gen_st_asi(cpu_val, cpu_addr, insn, 2);
5035 dc->npc = DYNAMIC_PC;
5036 break;
5037 case 0x17: /* stda, store double word alternate */
5038 #ifndef TARGET_SPARC64
5039 if (IS_IMM)
5040 goto illegal_insn;
5041 if (!supervisor(dc))
5042 goto priv_insn;
5043 #endif
5044 if (rd & 1)
5045 goto illegal_insn;
5046 else {
5047 save_state(dc, cpu_cond);
5048 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5050 break;
5051 #endif
5052 #ifdef TARGET_SPARC64
5053 case 0x0e: /* V9 stx */
5054 gen_address_mask(dc, cpu_addr);
5055 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5056 break;
5057 case 0x1e: /* V9 stxa */
5058 save_state(dc, cpu_cond);
5059 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5060 dc->npc = DYNAMIC_PC;
5061 break;
5062 #endif
5063 default:
5064 goto illegal_insn;
5066 } else if (xop > 0x23 && xop < 0x28) {
5067 if (gen_trap_ifnofpu(dc)) {
5068 goto jmp_insn;
5070 save_state(dc, cpu_cond);
5071 switch (xop) {
5072 case 0x24: /* stf, store fpreg */
5073 gen_address_mask(dc, cpu_addr);
5074 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5075 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5076 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5077 break;
5078 case 0x25: /* stfsr, V9 stxfsr */
5079 #ifdef TARGET_SPARC64
5080 gen_address_mask(dc, cpu_addr);
5081 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5082 if (rd == 1)
5083 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5084 else
5085 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5086 #else
5087 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5088 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5089 #endif
5090 break;
5091 case 0x26:
5092 #ifdef TARGET_SPARC64
5093 /* V9 stqf, store quad fpreg */
5095 TCGv_i32 r_const;
5097 CHECK_FPU_FEATURE(dc, FLOAT128);
5098 gen_op_load_fpr_QT0(QFPREG(rd));
5099 r_const = tcg_const_i32(dc->mem_idx);
5100 gen_address_mask(dc, cpu_addr);
5101 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5102 tcg_temp_free_i32(r_const);
5104 break;
5105 #else /* !TARGET_SPARC64 */
5106 /* stdfq, store floating point queue */
5107 #if defined(CONFIG_USER_ONLY)
5108 goto illegal_insn;
5109 #else
5110 if (!supervisor(dc))
5111 goto priv_insn;
5112 if (gen_trap_ifnofpu(dc)) {
5113 goto jmp_insn;
5115 goto nfq_insn;
5116 #endif
5117 #endif
5118 case 0x27: /* stdf, store double fpreg */
5119 gen_address_mask(dc, cpu_addr);
5120 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5121 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5122 break;
5123 default:
5124 goto illegal_insn;
5126 } else if (xop > 0x33 && xop < 0x3f) {
5127 save_state(dc, cpu_cond);
5128 switch (xop) {
5129 #ifdef TARGET_SPARC64
5130 case 0x34: /* V9 stfa */
5131 if (gen_trap_ifnofpu(dc)) {
5132 goto jmp_insn;
5134 gen_stf_asi(cpu_addr, insn, 4, rd);
5135 break;
5136 case 0x36: /* V9 stqfa */
5138 TCGv_i32 r_const;
5140 CHECK_FPU_FEATURE(dc, FLOAT128);
5141 if (gen_trap_ifnofpu(dc)) {
5142 goto jmp_insn;
5144 r_const = tcg_const_i32(7);
5145 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5146 tcg_temp_free_i32(r_const);
5147 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5149 break;
5150 case 0x37: /* V9 stdfa */
5151 if (gen_trap_ifnofpu(dc)) {
5152 goto jmp_insn;
5154 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5155 break;
5156 case 0x3c: /* V9 casa */
5157 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5158 gen_movl_TN_reg(rd, cpu_val);
5159 break;
5160 case 0x3e: /* V9 casxa */
5161 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5162 gen_movl_TN_reg(rd, cpu_val);
5163 break;
5164 #else
5165 case 0x34: /* stc */
5166 case 0x35: /* stcsr */
5167 case 0x36: /* stdcq */
5168 case 0x37: /* stdc */
5169 goto ncp_insn;
5170 #endif
5171 default:
5172 goto illegal_insn;
5174 } else
5175 goto illegal_insn;
5177 break;
5179 /* default case for non jump instructions */
5180 if (dc->npc == DYNAMIC_PC) {
5181 dc->pc = DYNAMIC_PC;
5182 gen_op_next_insn();
5183 } else if (dc->npc == JUMP_PC) {
5184 /* we can do a static jump */
5185 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5186 dc->is_br = 1;
5187 } else {
5188 dc->pc = dc->npc;
5189 dc->npc = dc->npc + 4;
5191 jmp_insn:
5192 goto egress;
5193 illegal_insn:
5195 TCGv_i32 r_const;
5197 save_state(dc, cpu_cond);
5198 r_const = tcg_const_i32(TT_ILL_INSN);
5199 gen_helper_raise_exception(cpu_env, r_const);
5200 tcg_temp_free_i32(r_const);
5201 dc->is_br = 1;
5203 goto egress;
5204 unimp_flush:
5206 TCGv_i32 r_const;
5208 save_state(dc, cpu_cond);
5209 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5210 gen_helper_raise_exception(cpu_env, r_const);
5211 tcg_temp_free_i32(r_const);
5212 dc->is_br = 1;
5214 goto egress;
5215 #if !defined(CONFIG_USER_ONLY)
5216 priv_insn:
5218 TCGv_i32 r_const;
5220 save_state(dc, cpu_cond);
5221 r_const = tcg_const_i32(TT_PRIV_INSN);
5222 gen_helper_raise_exception(cpu_env, r_const);
5223 tcg_temp_free_i32(r_const);
5224 dc->is_br = 1;
5226 goto egress;
5227 #endif
5228 nfpu_insn:
5229 save_state(dc, cpu_cond);
5230 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5231 dc->is_br = 1;
5232 goto egress;
5233 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5234 nfq_insn:
5235 save_state(dc, cpu_cond);
5236 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5237 dc->is_br = 1;
5238 goto egress;
5239 #endif
5240 #ifndef TARGET_SPARC64
5241 ncp_insn:
5243 TCGv r_const;
5245 save_state(dc, cpu_cond);
5246 r_const = tcg_const_i32(TT_NCP_INSN);
5247 gen_helper_raise_exception(cpu_env, r_const);
5248 tcg_temp_free(r_const);
5249 dc->is_br = 1;
5251 goto egress;
5252 #endif
5253 egress:
5254 tcg_temp_free(cpu_tmp1);
5255 tcg_temp_free(cpu_tmp2);
5256 if (dc->n_t32 != 0) {
5257 int i;
5258 for (i = dc->n_t32 - 1; i >= 0; --i) {
5259 tcg_temp_free_i32(dc->t32[i]);
5261 dc->n_t32 = 0;
5265 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5266 int spc, CPUSPARCState *env)
5268 target_ulong pc_start, last_pc;
5269 uint16_t *gen_opc_end;
5270 DisasContext dc1, *dc = &dc1;
5271 CPUBreakpoint *bp;
5272 int j, lj = -1;
5273 int num_insns;
5274 int max_insns;
5275 unsigned int insn;
5277 memset(dc, 0, sizeof(DisasContext));
5278 dc->tb = tb;
5279 pc_start = tb->pc;
5280 dc->pc = pc_start;
5281 last_pc = dc->pc;
5282 dc->npc = (target_ulong) tb->cs_base;
5283 dc->cc_op = CC_OP_DYNAMIC;
5284 dc->mem_idx = cpu_mmu_index(env);
5285 dc->def = env->def;
5286 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5287 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5288 dc->singlestep = (env->singlestep_enabled || singlestep);
5289 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5291 cpu_tmp0 = tcg_temp_new();
5292 cpu_tmp32 = tcg_temp_new_i32();
5293 cpu_tmp64 = tcg_temp_new_i64();
5295 cpu_dst = tcg_temp_local_new();
5297 // loads and stores
5298 cpu_val = tcg_temp_local_new();
5299 cpu_addr = tcg_temp_local_new();
5301 num_insns = 0;
5302 max_insns = tb->cflags & CF_COUNT_MASK;
5303 if (max_insns == 0)
5304 max_insns = CF_COUNT_MASK;
5305 gen_icount_start();
5306 do {
5307 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5308 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5309 if (bp->pc == dc->pc) {
5310 if (dc->pc != pc_start)
5311 save_state(dc, cpu_cond);
5312 gen_helper_debug(cpu_env);
5313 tcg_gen_exit_tb(0);
5314 dc->is_br = 1;
5315 goto exit_gen_loop;
5319 if (spc) {
5320 qemu_log("Search PC...\n");
5321 j = gen_opc_ptr - gen_opc_buf;
5322 if (lj < j) {
5323 lj++;
5324 while (lj < j)
5325 gen_opc_instr_start[lj++] = 0;
5326 gen_opc_pc[lj] = dc->pc;
5327 gen_opc_npc[lj] = dc->npc;
5328 gen_opc_instr_start[lj] = 1;
5329 gen_opc_icount[lj] = num_insns;
5332 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5333 gen_io_start();
5334 last_pc = dc->pc;
5335 insn = cpu_ldl_code(env, dc->pc);
5336 disas_sparc_insn(dc, insn);
5337 num_insns++;
5339 if (dc->is_br)
5340 break;
5341 /* if the next PC is different, we abort now */
5342 if (dc->pc != (last_pc + 4))
5343 break;
5344 /* if we reach a page boundary, we stop generation so that the
5345 PC of a TT_TFAULT exception is always in the right page */
5346 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5347 break;
5348 /* if single step mode, we generate only one instruction and
5349 generate an exception */
5350 if (dc->singlestep) {
5351 break;
5353 } while ((gen_opc_ptr < gen_opc_end) &&
5354 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5355 num_insns < max_insns);
5357 exit_gen_loop:
5358 tcg_temp_free(cpu_addr);
5359 tcg_temp_free(cpu_val);
5360 tcg_temp_free(cpu_dst);
5361 tcg_temp_free_i64(cpu_tmp64);
5362 tcg_temp_free_i32(cpu_tmp32);
5363 tcg_temp_free(cpu_tmp0);
5365 if (tb->cflags & CF_LAST_IO)
5366 gen_io_end();
5367 if (!dc->is_br) {
5368 if (dc->pc != DYNAMIC_PC &&
5369 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5370 /* static PC and NPC: we can use direct chaining */
5371 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5372 } else {
5373 if (dc->pc != DYNAMIC_PC)
5374 tcg_gen_movi_tl(cpu_pc, dc->pc);
5375 save_npc(dc, cpu_cond);
5376 tcg_gen_exit_tb(0);
5379 gen_icount_end(tb, num_insns);
5380 *gen_opc_ptr = INDEX_op_end;
5381 if (spc) {
5382 j = gen_opc_ptr - gen_opc_buf;
5383 lj++;
5384 while (lj <= j)
5385 gen_opc_instr_start[lj++] = 0;
5386 #if 0
5387 log_page_dump();
5388 #endif
5389 gen_opc_jump_pc[0] = dc->jump_pc[0];
5390 gen_opc_jump_pc[1] = dc->jump_pc[1];
5391 } else {
5392 tb->size = last_pc + 4 - pc_start;
5393 tb->icount = num_insns;
5395 #ifdef DEBUG_DISAS
5396 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5397 qemu_log("--------------\n");
5398 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5399 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5400 qemu_log("\n");
5402 #endif
5405 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5407 gen_intermediate_code_internal(tb, 0, env);
5410 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5412 gen_intermediate_code_internal(tb, 1, env);
5415 void gen_intermediate_code_init(CPUSPARCState *env)
5417 unsigned int i;
5418 static int inited;
5419 static const char * const gregnames[8] = {
5420 NULL, // g0 not used
5421 "g1",
5422 "g2",
5423 "g3",
5424 "g4",
5425 "g5",
5426 "g6",
5427 "g7",
5429 static const char * const fregnames[32] = {
5430 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5431 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5432 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5433 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5436 /* init various static tables */
5437 if (!inited) {
5438 inited = 1;
5440 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5441 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5442 offsetof(CPUSPARCState, regwptr),
5443 "regwptr");
5444 #ifdef TARGET_SPARC64
5445 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5446 "xcc");
5447 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5448 "asi");
5449 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5450 "fprs");
5451 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5452 "gsr");
5453 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5454 offsetof(CPUSPARCState, tick_cmpr),
5455 "tick_cmpr");
5456 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5457 offsetof(CPUSPARCState, stick_cmpr),
5458 "stick_cmpr");
5459 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5460 offsetof(CPUSPARCState, hstick_cmpr),
5461 "hstick_cmpr");
5462 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5463 "hintp");
5464 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5465 "htba");
5466 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5467 "hver");
5468 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5469 offsetof(CPUSPARCState, ssr), "ssr");
5470 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5471 offsetof(CPUSPARCState, version), "ver");
5472 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5473 offsetof(CPUSPARCState, softint),
5474 "softint");
5475 #else
5476 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5477 "wim");
5478 #endif
5479 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5480 "cond");
5481 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5482 "cc_src");
5483 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5484 offsetof(CPUSPARCState, cc_src2),
5485 "cc_src2");
5486 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5487 "cc_dst");
5488 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5489 "cc_op");
5490 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5491 "psr");
5492 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5493 "fsr");
5494 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5495 "pc");
5496 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5497 "npc");
5498 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5499 #ifndef CONFIG_USER_ONLY
5500 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5501 "tbr");
5502 #endif
5503 for (i = 1; i < 8; i++) {
5504 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5505 offsetof(CPUSPARCState, gregs[i]),
5506 gregnames[i]);
5508 for (i = 0; i < TARGET_DPREGS; i++) {
5509 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5510 offsetof(CPUSPARCState, fpr[i]),
5511 fregnames[i]);
5514 /* register helpers */
5516 #define GEN_HELPER 2
5517 #include "helper.h"
5521 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5523 target_ulong npc;
5524 env->pc = gen_opc_pc[pc_pos];
5525 npc = gen_opc_npc[pc_pos];
5526 if (npc == 1) {
5527 /* dynamic NPC: already stored */
5528 } else if (npc == 2) {
5529 /* jump PC: use 'cond' and the jump targets of the translation */
5530 if (env->cond) {
5531 env->npc = gen_opc_jump_pc[0];
5532 } else {
5533 env->npc = gen_opc_jump_pc[1];
5535 } else {
5536 env->npc = npc;
5539 /* flush pending conditional evaluations before exposing cpu state */
5540 if (CC_OP != CC_OP_FLAGS) {
5541 helper_compute_psr(env);