xilinx_zynq: Added SPI controllers + flashes
[qemu-kvm.git] / target-sparc / translate.c
blob472eb518cd8a0e78f2b84c2742ed6cd959868def
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
89 typedef struct {
90 TCGCond cond;
91 bool is_bool;
92 bool g1, g2;
93 TCGv c1, c2;
94 } DisasCompare;
96 // This function uses non-native bit order
97 #define GET_FIELD(X, FROM, TO) \
98 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100 // This function uses the order in the manuals, i.e. bit 0 is 2^0
101 #define GET_FIELD_SP(X, FROM, TO) \
102 GET_FIELD(X, 31 - (TO), 31 - (FROM))
104 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
105 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107 #ifdef TARGET_SPARC64
108 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
109 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
110 #else
111 #define DFPREG(r) (r & 0x1e)
112 #define QFPREG(r) (r & 0x1c)
113 #endif
115 #define UA2005_HTRAP_MASK 0xff
116 #define V8_TRAP_MASK 0x7f
118 static int sign_extend(int x, int len)
120 len = 32 - len;
121 return (x << len) >> len;
124 #define IS_IMM (insn & (1<<13))
126 static inline void gen_update_fprs_dirty(int rd)
128 #if defined(TARGET_SPARC64)
129 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
130 #endif
133 /* floating point registers moves */
134 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
136 #if TCG_TARGET_REG_BITS == 32
137 if (src & 1) {
138 return TCGV_LOW(cpu_fpr[src / 2]);
139 } else {
140 return TCGV_HIGH(cpu_fpr[src / 2]);
142 #else
143 if (src & 1) {
144 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
145 } else {
146 TCGv_i32 ret = tcg_temp_local_new_i32();
147 TCGv_i64 t = tcg_temp_new_i64();
149 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
150 tcg_gen_trunc_i64_i32(ret, t);
151 tcg_temp_free_i64(t);
153 dc->t32[dc->n_t32++] = ret;
154 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
156 return ret;
158 #endif
161 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
163 #if TCG_TARGET_REG_BITS == 32
164 if (dst & 1) {
165 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
166 } else {
167 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
169 #else
170 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
171 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
172 (dst & 1 ? 0 : 32), 32);
173 #endif
174 gen_update_fprs_dirty(dst);
177 static TCGv_i32 gen_dest_fpr_F(void)
179 return cpu_tmp32;
182 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
184 src = DFPREG(src);
185 return cpu_fpr[src / 2];
188 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
190 dst = DFPREG(dst);
191 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
192 gen_update_fprs_dirty(dst);
195 static TCGv_i64 gen_dest_fpr_D(void)
197 return cpu_tmp64;
200 static void gen_op_load_fpr_QT0(unsigned int src)
202 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
203 offsetof(CPU_QuadU, ll.upper));
204 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
205 offsetof(CPU_QuadU, ll.lower));
208 static void gen_op_load_fpr_QT1(unsigned int src)
210 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
211 offsetof(CPU_QuadU, ll.upper));
212 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
213 offsetof(CPU_QuadU, ll.lower));
216 static void gen_op_store_QT0_fpr(unsigned int dst)
218 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
219 offsetof(CPU_QuadU, ll.upper));
220 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
221 offsetof(CPU_QuadU, ll.lower));
224 #ifdef TARGET_SPARC64
225 static void gen_move_Q(unsigned int rd, unsigned int rs)
227 rd = QFPREG(rd);
228 rs = QFPREG(rs);
230 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
231 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
232 gen_update_fprs_dirty(rd);
234 #endif
236 /* moves */
237 #ifdef CONFIG_USER_ONLY
238 #define supervisor(dc) 0
239 #ifdef TARGET_SPARC64
240 #define hypervisor(dc) 0
241 #endif
242 #else
243 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
244 #ifdef TARGET_SPARC64
245 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
246 #else
247 #endif
248 #endif
250 #ifdef TARGET_SPARC64
251 #ifndef TARGET_ABI32
252 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
253 #else
254 #define AM_CHECK(dc) (1)
255 #endif
256 #endif
258 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
260 #ifdef TARGET_SPARC64
261 if (AM_CHECK(dc))
262 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
263 #endif
266 static inline void gen_movl_reg_TN(int reg, TCGv tn)
268 if (reg == 0)
269 tcg_gen_movi_tl(tn, 0);
270 else if (reg < 8)
271 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
272 else {
273 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
277 static inline void gen_movl_TN_reg(int reg, TCGv tn)
279 if (reg == 0)
280 return;
281 else if (reg < 8)
282 tcg_gen_mov_tl(cpu_gregs[reg], tn);
283 else {
284 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
288 static inline void gen_goto_tb(DisasContext *s, int tb_num,
289 target_ulong pc, target_ulong npc)
291 TranslationBlock *tb;
293 tb = s->tb;
294 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
295 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
296 !s->singlestep) {
297 /* jump to same page: we can use a direct jump */
298 tcg_gen_goto_tb(tb_num);
299 tcg_gen_movi_tl(cpu_pc, pc);
300 tcg_gen_movi_tl(cpu_npc, npc);
301 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
302 } else {
303 /* jump to another page: currently not optimized */
304 tcg_gen_movi_tl(cpu_pc, pc);
305 tcg_gen_movi_tl(cpu_npc, npc);
306 tcg_gen_exit_tb(0);
310 // XXX suboptimal
311 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
318 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
325 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
332 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
334 tcg_gen_extu_i32_tl(reg, src);
335 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
336 tcg_gen_andi_tl(reg, reg, 0x1);
339 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
341 tcg_gen_mov_tl(cpu_cc_src, src1);
342 tcg_gen_movi_tl(cpu_cc_src2, src2);
343 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
344 tcg_gen_mov_tl(dst, cpu_cc_dst);
347 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
349 tcg_gen_mov_tl(cpu_cc_src, src1);
350 tcg_gen_mov_tl(cpu_cc_src2, src2);
351 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
352 tcg_gen_mov_tl(dst, cpu_cc_dst);
355 static TCGv_i32 gen_add32_carry32(void)
357 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
359 /* Carry is computed from a previous add: (dst < src) */
360 #if TARGET_LONG_BITS == 64
361 cc_src1_32 = tcg_temp_new_i32();
362 cc_src2_32 = tcg_temp_new_i32();
363 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
364 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
365 #else
366 cc_src1_32 = cpu_cc_dst;
367 cc_src2_32 = cpu_cc_src;
368 #endif
370 carry_32 = tcg_temp_new_i32();
371 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
373 #if TARGET_LONG_BITS == 64
374 tcg_temp_free_i32(cc_src1_32);
375 tcg_temp_free_i32(cc_src2_32);
376 #endif
378 return carry_32;
381 static TCGv_i32 gen_sub32_carry32(void)
383 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
385 /* Carry is computed from a previous borrow: (src1 < src2) */
386 #if TARGET_LONG_BITS == 64
387 cc_src1_32 = tcg_temp_new_i32();
388 cc_src2_32 = tcg_temp_new_i32();
389 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
390 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
391 #else
392 cc_src1_32 = cpu_cc_src;
393 cc_src2_32 = cpu_cc_src2;
394 #endif
396 carry_32 = tcg_temp_new_i32();
397 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
399 #if TARGET_LONG_BITS == 64
400 tcg_temp_free_i32(cc_src1_32);
401 tcg_temp_free_i32(cc_src2_32);
402 #endif
404 return carry_32;
407 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
408 TCGv src2, int update_cc)
410 TCGv_i32 carry_32;
411 TCGv carry;
413 switch (dc->cc_op) {
414 case CC_OP_DIV:
415 case CC_OP_LOGIC:
416 /* Carry is known to be zero. Fall back to plain ADD. */
417 if (update_cc) {
418 gen_op_add_cc(dst, src1, src2);
419 } else {
420 tcg_gen_add_tl(dst, src1, src2);
422 return;
424 case CC_OP_ADD:
425 case CC_OP_TADD:
426 case CC_OP_TADDTV:
427 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
429 /* For 32-bit hosts, we can re-use the host's hardware carry
430 generation by using an ADD2 opcode. We discard the low
431 part of the output. Ideally we'd combine this operation
432 with the add that generated the carry in the first place. */
433 TCGv dst_low = tcg_temp_new();
434 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
435 cpu_cc_src, src1, cpu_cc_src2, src2);
436 tcg_temp_free(dst_low);
437 goto add_done;
439 #endif
440 carry_32 = gen_add32_carry32();
441 break;
443 case CC_OP_SUB:
444 case CC_OP_TSUB:
445 case CC_OP_TSUBTV:
446 carry_32 = gen_sub32_carry32();
447 break;
449 default:
450 /* We need external help to produce the carry. */
451 carry_32 = tcg_temp_new_i32();
452 gen_helper_compute_C_icc(carry_32, cpu_env);
453 break;
456 #if TARGET_LONG_BITS == 64
457 carry = tcg_temp_new();
458 tcg_gen_extu_i32_i64(carry, carry_32);
459 #else
460 carry = carry_32;
461 #endif
463 tcg_gen_add_tl(dst, src1, src2);
464 tcg_gen_add_tl(dst, dst, carry);
466 tcg_temp_free_i32(carry_32);
467 #if TARGET_LONG_BITS == 64
468 tcg_temp_free(carry);
469 #endif
471 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
472 add_done:
473 #endif
474 if (update_cc) {
475 tcg_gen_mov_tl(cpu_cc_src, src1);
476 tcg_gen_mov_tl(cpu_cc_src2, src2);
477 tcg_gen_mov_tl(cpu_cc_dst, dst);
478 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
479 dc->cc_op = CC_OP_ADDX;
483 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
485 tcg_gen_mov_tl(cpu_cc_src, src1);
486 tcg_gen_movi_tl(cpu_cc_src2, src2);
487 if (src2 == 0) {
488 tcg_gen_mov_tl(cpu_cc_dst, src1);
489 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
490 dc->cc_op = CC_OP_LOGIC;
491 } else {
492 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
493 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
494 dc->cc_op = CC_OP_SUB;
496 tcg_gen_mov_tl(dst, cpu_cc_dst);
499 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
501 tcg_gen_mov_tl(cpu_cc_src, src1);
502 tcg_gen_mov_tl(cpu_cc_src2, src2);
503 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
504 tcg_gen_mov_tl(dst, cpu_cc_dst);
507 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
508 TCGv src2, int update_cc)
510 TCGv_i32 carry_32;
511 TCGv carry;
513 switch (dc->cc_op) {
514 case CC_OP_DIV:
515 case CC_OP_LOGIC:
516 /* Carry is known to be zero. Fall back to plain SUB. */
517 if (update_cc) {
518 gen_op_sub_cc(dst, src1, src2);
519 } else {
520 tcg_gen_sub_tl(dst, src1, src2);
522 return;
524 case CC_OP_ADD:
525 case CC_OP_TADD:
526 case CC_OP_TADDTV:
527 carry_32 = gen_add32_carry32();
528 break;
530 case CC_OP_SUB:
531 case CC_OP_TSUB:
532 case CC_OP_TSUBTV:
533 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
535 /* For 32-bit hosts, we can re-use the host's hardware carry
536 generation by using a SUB2 opcode. We discard the low
537 part of the output. Ideally we'd combine this operation
538 with the add that generated the carry in the first place. */
539 TCGv dst_low = tcg_temp_new();
540 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
541 cpu_cc_src, src1, cpu_cc_src2, src2);
542 tcg_temp_free(dst_low);
543 goto sub_done;
545 #endif
546 carry_32 = gen_sub32_carry32();
547 break;
549 default:
550 /* We need external help to produce the carry. */
551 carry_32 = tcg_temp_new_i32();
552 gen_helper_compute_C_icc(carry_32, cpu_env);
553 break;
556 #if TARGET_LONG_BITS == 64
557 carry = tcg_temp_new();
558 tcg_gen_extu_i32_i64(carry, carry_32);
559 #else
560 carry = carry_32;
561 #endif
563 tcg_gen_sub_tl(dst, src1, src2);
564 tcg_gen_sub_tl(dst, dst, carry);
566 tcg_temp_free_i32(carry_32);
567 #if TARGET_LONG_BITS == 64
568 tcg_temp_free(carry);
569 #endif
571 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
572 sub_done:
573 #endif
574 if (update_cc) {
575 tcg_gen_mov_tl(cpu_cc_src, src1);
576 tcg_gen_mov_tl(cpu_cc_src2, src2);
577 tcg_gen_mov_tl(cpu_cc_dst, dst);
578 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
579 dc->cc_op = CC_OP_SUBX;
583 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
585 TCGv r_temp, zero;
587 r_temp = tcg_temp_new();
589 /* old op:
590 if (!(env->y & 1))
591 T1 = 0;
593 zero = tcg_const_tl(0);
594 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
595 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
596 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
597 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
598 zero, cpu_cc_src2);
599 tcg_temp_free(zero);
601 // b2 = T0 & 1;
602 // env->y = (b2 << 31) | (env->y >> 1);
603 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
604 tcg_gen_shli_tl(r_temp, r_temp, 31);
605 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
606 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
607 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
608 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
610 // b1 = N ^ V;
611 gen_mov_reg_N(cpu_tmp0, cpu_psr);
612 gen_mov_reg_V(r_temp, cpu_psr);
613 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
614 tcg_temp_free(r_temp);
616 // T0 = (b1 << 31) | (T0 >> 1);
617 // src1 = T0;
618 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
619 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
620 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
622 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
624 tcg_gen_mov_tl(dst, cpu_cc_dst);
627 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
629 TCGv_i32 r_src1, r_src2;
630 TCGv_i64 r_temp, r_temp2;
632 r_src1 = tcg_temp_new_i32();
633 r_src2 = tcg_temp_new_i32();
635 tcg_gen_trunc_tl_i32(r_src1, src1);
636 tcg_gen_trunc_tl_i32(r_src2, src2);
638 r_temp = tcg_temp_new_i64();
639 r_temp2 = tcg_temp_new_i64();
641 if (sign_ext) {
642 tcg_gen_ext_i32_i64(r_temp, r_src2);
643 tcg_gen_ext_i32_i64(r_temp2, r_src1);
644 } else {
645 tcg_gen_extu_i32_i64(r_temp, r_src2);
646 tcg_gen_extu_i32_i64(r_temp2, r_src1);
649 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
651 tcg_gen_shri_i64(r_temp, r_temp2, 32);
652 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
653 tcg_temp_free_i64(r_temp);
654 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
656 tcg_gen_trunc_i64_tl(dst, r_temp2);
658 tcg_temp_free_i64(r_temp2);
660 tcg_temp_free_i32(r_src1);
661 tcg_temp_free_i32(r_src2);
664 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
666 /* zero-extend truncated operands before multiplication */
667 gen_op_multiply(dst, src1, src2, 0);
670 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
672 /* sign-extend truncated operands before multiplication */
673 gen_op_multiply(dst, src1, src2, 1);
676 // 1
677 static inline void gen_op_eval_ba(TCGv dst)
679 tcg_gen_movi_tl(dst, 1);
682 // Z
683 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
685 gen_mov_reg_Z(dst, src);
688 // Z | (N ^ V)
689 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
691 gen_mov_reg_N(cpu_tmp0, src);
692 gen_mov_reg_V(dst, src);
693 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
694 gen_mov_reg_Z(cpu_tmp0, src);
695 tcg_gen_or_tl(dst, dst, cpu_tmp0);
698 // N ^ V
699 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
701 gen_mov_reg_V(cpu_tmp0, src);
702 gen_mov_reg_N(dst, src);
703 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
706 // C | Z
707 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
709 gen_mov_reg_Z(cpu_tmp0, src);
710 gen_mov_reg_C(dst, src);
711 tcg_gen_or_tl(dst, dst, cpu_tmp0);
714 // C
715 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
717 gen_mov_reg_C(dst, src);
720 // V
721 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
723 gen_mov_reg_V(dst, src);
726 // 0
727 static inline void gen_op_eval_bn(TCGv dst)
729 tcg_gen_movi_tl(dst, 0);
732 // N
733 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
735 gen_mov_reg_N(dst, src);
738 // !Z
739 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
741 gen_mov_reg_Z(dst, src);
742 tcg_gen_xori_tl(dst, dst, 0x1);
745 // !(Z | (N ^ V))
746 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
748 gen_mov_reg_N(cpu_tmp0, src);
749 gen_mov_reg_V(dst, src);
750 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
751 gen_mov_reg_Z(cpu_tmp0, src);
752 tcg_gen_or_tl(dst, dst, cpu_tmp0);
753 tcg_gen_xori_tl(dst, dst, 0x1);
756 // !(N ^ V)
757 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
759 gen_mov_reg_V(cpu_tmp0, src);
760 gen_mov_reg_N(dst, src);
761 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762 tcg_gen_xori_tl(dst, dst, 0x1);
765 // !(C | Z)
766 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
768 gen_mov_reg_Z(cpu_tmp0, src);
769 gen_mov_reg_C(dst, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
771 tcg_gen_xori_tl(dst, dst, 0x1);
774 // !C
775 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
777 gen_mov_reg_C(dst, src);
778 tcg_gen_xori_tl(dst, dst, 0x1);
781 // !N
782 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
784 gen_mov_reg_N(dst, src);
785 tcg_gen_xori_tl(dst, dst, 0x1);
788 // !V
789 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
791 gen_mov_reg_V(dst, src);
792 tcg_gen_xori_tl(dst, dst, 0x1);
796 FPSR bit field FCC1 | FCC0:
800 3 unordered
802 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
803 unsigned int fcc_offset)
805 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
806 tcg_gen_andi_tl(reg, reg, 0x1);
809 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
810 unsigned int fcc_offset)
812 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
813 tcg_gen_andi_tl(reg, reg, 0x1);
816 // !0: FCC0 | FCC1
817 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
818 unsigned int fcc_offset)
820 gen_mov_reg_FCC0(dst, src, fcc_offset);
821 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
822 tcg_gen_or_tl(dst, dst, cpu_tmp0);
825 // 1 or 2: FCC0 ^ FCC1
826 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
827 unsigned int fcc_offset)
829 gen_mov_reg_FCC0(dst, src, fcc_offset);
830 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
834 // 1 or 3: FCC0
835 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
841 // 1: FCC0 & !FCC1
842 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
843 unsigned int fcc_offset)
845 gen_mov_reg_FCC0(dst, src, fcc_offset);
846 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
847 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
848 tcg_gen_and_tl(dst, dst, cpu_tmp0);
851 // 2 or 3: FCC1
852 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
853 unsigned int fcc_offset)
855 gen_mov_reg_FCC1(dst, src, fcc_offset);
858 // 2: !FCC0 & FCC1
859 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
860 unsigned int fcc_offset)
862 gen_mov_reg_FCC0(dst, src, fcc_offset);
863 tcg_gen_xori_tl(dst, dst, 0x1);
864 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
865 tcg_gen_and_tl(dst, dst, cpu_tmp0);
868 // 3: FCC0 & FCC1
869 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
870 unsigned int fcc_offset)
872 gen_mov_reg_FCC0(dst, src, fcc_offset);
873 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
874 tcg_gen_and_tl(dst, dst, cpu_tmp0);
877 // 0: !(FCC0 | FCC1)
878 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
879 unsigned int fcc_offset)
881 gen_mov_reg_FCC0(dst, src, fcc_offset);
882 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
883 tcg_gen_or_tl(dst, dst, cpu_tmp0);
884 tcg_gen_xori_tl(dst, dst, 0x1);
887 // 0 or 3: !(FCC0 ^ FCC1)
888 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
889 unsigned int fcc_offset)
891 gen_mov_reg_FCC0(dst, src, fcc_offset);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
894 tcg_gen_xori_tl(dst, dst, 0x1);
897 // 0 or 2: !FCC0
898 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
899 unsigned int fcc_offset)
901 gen_mov_reg_FCC0(dst, src, fcc_offset);
902 tcg_gen_xori_tl(dst, dst, 0x1);
905 // !1: !(FCC0 & !FCC1)
906 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
910 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
912 tcg_gen_and_tl(dst, dst, cpu_tmp0);
913 tcg_gen_xori_tl(dst, dst, 0x1);
916 // 0 or 1: !FCC1
917 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
920 gen_mov_reg_FCC1(dst, src, fcc_offset);
921 tcg_gen_xori_tl(dst, dst, 0x1);
924 // !2: !(!FCC0 & FCC1)
925 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
926 unsigned int fcc_offset)
928 gen_mov_reg_FCC0(dst, src, fcc_offset);
929 tcg_gen_xori_tl(dst, dst, 0x1);
930 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
931 tcg_gen_and_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
935 // !3: !(FCC0 & FCC1)
936 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
937 unsigned int fcc_offset)
939 gen_mov_reg_FCC0(dst, src, fcc_offset);
940 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
941 tcg_gen_and_tl(dst, dst, cpu_tmp0);
942 tcg_gen_xori_tl(dst, dst, 0x1);
945 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
946 target_ulong pc2, TCGv r_cond)
948 int l1;
950 l1 = gen_new_label();
952 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
954 gen_goto_tb(dc, 0, pc1, pc1 + 4);
956 gen_set_label(l1);
957 gen_goto_tb(dc, 1, pc2, pc2 + 4);
960 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
961 target_ulong pc2, TCGv r_cond)
963 int l1;
965 l1 = gen_new_label();
967 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
969 gen_goto_tb(dc, 0, pc2, pc1);
971 gen_set_label(l1);
972 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
975 static inline void gen_generic_branch(DisasContext *dc)
977 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
978 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
979 TCGv zero = tcg_const_tl(0);
981 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
983 tcg_temp_free(npc0);
984 tcg_temp_free(npc1);
985 tcg_temp_free(zero);
988 /* call this function before using the condition register as it may
989 have been set for a jump */
990 static inline void flush_cond(DisasContext *dc)
992 if (dc->npc == JUMP_PC) {
993 gen_generic_branch(dc);
994 dc->npc = DYNAMIC_PC;
998 static inline void save_npc(DisasContext *dc)
1000 if (dc->npc == JUMP_PC) {
1001 gen_generic_branch(dc);
1002 dc->npc = DYNAMIC_PC;
1003 } else if (dc->npc != DYNAMIC_PC) {
1004 tcg_gen_movi_tl(cpu_npc, dc->npc);
1008 static inline void save_state(DisasContext *dc)
1010 tcg_gen_movi_tl(cpu_pc, dc->pc);
1011 /* flush pending conditional evaluations before exposing cpu state */
1012 if (dc->cc_op != CC_OP_FLAGS) {
1013 dc->cc_op = CC_OP_FLAGS;
1014 gen_helper_compute_psr(cpu_env);
1016 save_npc(dc);
1019 static inline void gen_mov_pc_npc(DisasContext *dc)
1021 if (dc->npc == JUMP_PC) {
1022 gen_generic_branch(dc);
1023 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1024 dc->pc = DYNAMIC_PC;
1025 } else if (dc->npc == DYNAMIC_PC) {
1026 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1027 dc->pc = DYNAMIC_PC;
1028 } else {
1029 dc->pc = dc->npc;
1033 static inline void gen_op_next_insn(void)
1035 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1036 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1039 static void free_compare(DisasCompare *cmp)
1041 if (!cmp->g1) {
1042 tcg_temp_free(cmp->c1);
1044 if (!cmp->g2) {
1045 tcg_temp_free(cmp->c2);
1049 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1050 DisasContext *dc)
1052 static int subcc_cond[16] = {
1053 -1, /* never */
1054 TCG_COND_EQ,
1055 TCG_COND_LE,
1056 TCG_COND_LT,
1057 TCG_COND_LEU,
1058 TCG_COND_LTU,
1059 -1, /* neg */
1060 -1, /* overflow */
1061 -1, /* always */
1062 TCG_COND_NE,
1063 TCG_COND_GT,
1064 TCG_COND_GE,
1065 TCG_COND_GTU,
1066 TCG_COND_GEU,
1067 -1, /* pos */
1068 -1, /* no overflow */
1071 TCGv_i32 r_src;
1072 TCGv r_dst;
1074 #ifdef TARGET_SPARC64
1075 if (xcc) {
1076 r_src = cpu_xcc;
1077 } else {
1078 r_src = cpu_psr;
1080 #else
1081 r_src = cpu_psr;
1082 #endif
1084 switch (dc->cc_op) {
1085 case CC_OP_SUB:
1086 switch (cond) {
1087 case 6: /* neg */
1088 case 14: /* pos */
1089 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1090 cmp->is_bool = false;
1091 cmp->g2 = false;
1092 cmp->c2 = tcg_const_tl(0);
1093 #ifdef TARGET_SPARC64
1094 if (!xcc) {
1095 cmp->g1 = false;
1096 cmp->c1 = tcg_temp_new();
1097 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1098 break;
1100 #endif
1101 cmp->g1 = true;
1102 cmp->c1 = cpu_cc_dst;
1103 break;
1105 case 0: /* never */
1106 case 8: /* always */
1107 case 7: /* overflow */
1108 case 15: /* !overflow */
1109 goto do_dynamic;
1111 default:
1112 cmp->cond = subcc_cond[cond];
1113 cmp->is_bool = false;
1114 #ifdef TARGET_SPARC64
1115 if (!xcc) {
1116 /* Note that sign-extension works for unsigned compares as
1117 long as both operands are sign-extended. */
1118 cmp->g1 = cmp->g2 = false;
1119 cmp->c1 = tcg_temp_new();
1120 cmp->c2 = tcg_temp_new();
1121 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1122 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1124 #endif
1125 cmp->g1 = cmp->g2 = true;
1126 cmp->c1 = cpu_cc_src;
1127 cmp->c2 = cpu_cc_src2;
1128 break;
1130 break;
1132 default:
1133 do_dynamic:
1134 gen_helper_compute_psr(cpu_env);
1135 dc->cc_op = CC_OP_FLAGS;
1136 /* FALLTHRU */
1138 case CC_OP_FLAGS:
1139 /* We're going to generate a boolean result. */
1140 cmp->cond = TCG_COND_NE;
1141 cmp->is_bool = true;
1142 cmp->g1 = cmp->g2 = false;
1143 cmp->c1 = r_dst = tcg_temp_new();
1144 cmp->c2 = tcg_const_tl(0);
1146 switch (cond) {
1147 case 0x0:
1148 gen_op_eval_bn(r_dst);
1149 break;
1150 case 0x1:
1151 gen_op_eval_be(r_dst, r_src);
1152 break;
1153 case 0x2:
1154 gen_op_eval_ble(r_dst, r_src);
1155 break;
1156 case 0x3:
1157 gen_op_eval_bl(r_dst, r_src);
1158 break;
1159 case 0x4:
1160 gen_op_eval_bleu(r_dst, r_src);
1161 break;
1162 case 0x5:
1163 gen_op_eval_bcs(r_dst, r_src);
1164 break;
1165 case 0x6:
1166 gen_op_eval_bneg(r_dst, r_src);
1167 break;
1168 case 0x7:
1169 gen_op_eval_bvs(r_dst, r_src);
1170 break;
1171 case 0x8:
1172 gen_op_eval_ba(r_dst);
1173 break;
1174 case 0x9:
1175 gen_op_eval_bne(r_dst, r_src);
1176 break;
1177 case 0xa:
1178 gen_op_eval_bg(r_dst, r_src);
1179 break;
1180 case 0xb:
1181 gen_op_eval_bge(r_dst, r_src);
1182 break;
1183 case 0xc:
1184 gen_op_eval_bgu(r_dst, r_src);
1185 break;
1186 case 0xd:
1187 gen_op_eval_bcc(r_dst, r_src);
1188 break;
1189 case 0xe:
1190 gen_op_eval_bpos(r_dst, r_src);
1191 break;
1192 case 0xf:
1193 gen_op_eval_bvc(r_dst, r_src);
1194 break;
1196 break;
1200 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1202 unsigned int offset;
1203 TCGv r_dst;
1205 /* For now we still generate a straight boolean result. */
1206 cmp->cond = TCG_COND_NE;
1207 cmp->is_bool = true;
1208 cmp->g1 = cmp->g2 = false;
1209 cmp->c1 = r_dst = tcg_temp_new();
1210 cmp->c2 = tcg_const_tl(0);
1212 switch (cc) {
1213 default:
1214 case 0x0:
1215 offset = 0;
1216 break;
1217 case 0x1:
1218 offset = 32 - 10;
1219 break;
1220 case 0x2:
1221 offset = 34 - 10;
1222 break;
1223 case 0x3:
1224 offset = 36 - 10;
1225 break;
1228 switch (cond) {
1229 case 0x0:
1230 gen_op_eval_bn(r_dst);
1231 break;
1232 case 0x1:
1233 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1234 break;
1235 case 0x2:
1236 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1237 break;
1238 case 0x3:
1239 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1240 break;
1241 case 0x4:
1242 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1243 break;
1244 case 0x5:
1245 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1246 break;
1247 case 0x6:
1248 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1249 break;
1250 case 0x7:
1251 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1252 break;
1253 case 0x8:
1254 gen_op_eval_ba(r_dst);
1255 break;
1256 case 0x9:
1257 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1258 break;
1259 case 0xa:
1260 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1261 break;
1262 case 0xb:
1263 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1264 break;
1265 case 0xc:
1266 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1267 break;
1268 case 0xd:
1269 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1270 break;
1271 case 0xe:
1272 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1273 break;
1274 case 0xf:
1275 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1276 break;
1280 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1281 DisasContext *dc)
1283 DisasCompare cmp;
1284 gen_compare(&cmp, cc, cond, dc);
1286 /* The interface is to return a boolean in r_dst. */
1287 if (cmp.is_bool) {
1288 tcg_gen_mov_tl(r_dst, cmp.c1);
1289 } else {
1290 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1293 free_compare(&cmp);
1296 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1298 DisasCompare cmp;
1299 gen_fcompare(&cmp, cc, cond);
1301 /* The interface is to return a boolean in r_dst. */
1302 if (cmp.is_bool) {
1303 tcg_gen_mov_tl(r_dst, cmp.c1);
1304 } else {
1305 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1308 free_compare(&cmp);
1311 #ifdef TARGET_SPARC64
1312 // Inverted logic
1313 static const int gen_tcg_cond_reg[8] = {
1315 TCG_COND_NE,
1316 TCG_COND_GT,
1317 TCG_COND_GE,
1319 TCG_COND_EQ,
1320 TCG_COND_LE,
1321 TCG_COND_LT,
1324 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1326 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1327 cmp->is_bool = false;
1328 cmp->g1 = true;
1329 cmp->g2 = false;
1330 cmp->c1 = r_src;
1331 cmp->c2 = tcg_const_tl(0);
1334 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1336 DisasCompare cmp;
1337 gen_compare_reg(&cmp, cond, r_src);
1339 /* The interface is to return a boolean in r_dst. */
1340 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1342 free_compare(&cmp);
1344 #endif
1346 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1348 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1349 target_ulong target = dc->pc + offset;
1351 #ifdef TARGET_SPARC64
1352 if (unlikely(AM_CHECK(dc))) {
1353 target &= 0xffffffffULL;
1355 #endif
1356 if (cond == 0x0) {
1357 /* unconditional not taken */
1358 if (a) {
1359 dc->pc = dc->npc + 4;
1360 dc->npc = dc->pc + 4;
1361 } else {
1362 dc->pc = dc->npc;
1363 dc->npc = dc->pc + 4;
1365 } else if (cond == 0x8) {
1366 /* unconditional taken */
1367 if (a) {
1368 dc->pc = target;
1369 dc->npc = dc->pc + 4;
1370 } else {
1371 dc->pc = dc->npc;
1372 dc->npc = target;
1373 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1375 } else {
1376 flush_cond(dc);
1377 gen_cond(cpu_cond, cc, cond, dc);
1378 if (a) {
1379 gen_branch_a(dc, target, dc->npc, cpu_cond);
1380 dc->is_br = 1;
1381 } else {
1382 dc->pc = dc->npc;
1383 dc->jump_pc[0] = target;
1384 if (unlikely(dc->npc == DYNAMIC_PC)) {
1385 dc->jump_pc[1] = DYNAMIC_PC;
1386 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1387 } else {
1388 dc->jump_pc[1] = dc->npc + 4;
1389 dc->npc = JUMP_PC;
1395 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1397 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1398 target_ulong target = dc->pc + offset;
1400 #ifdef TARGET_SPARC64
1401 if (unlikely(AM_CHECK(dc))) {
1402 target &= 0xffffffffULL;
1404 #endif
1405 if (cond == 0x0) {
1406 /* unconditional not taken */
1407 if (a) {
1408 dc->pc = dc->npc + 4;
1409 dc->npc = dc->pc + 4;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->npc = dc->pc + 4;
1414 } else if (cond == 0x8) {
1415 /* unconditional taken */
1416 if (a) {
1417 dc->pc = target;
1418 dc->npc = dc->pc + 4;
1419 } else {
1420 dc->pc = dc->npc;
1421 dc->npc = target;
1422 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1424 } else {
1425 flush_cond(dc);
1426 gen_fcond(cpu_cond, cc, cond);
1427 if (a) {
1428 gen_branch_a(dc, target, dc->npc, cpu_cond);
1429 dc->is_br = 1;
1430 } else {
1431 dc->pc = dc->npc;
1432 dc->jump_pc[0] = target;
1433 if (unlikely(dc->npc == DYNAMIC_PC)) {
1434 dc->jump_pc[1] = DYNAMIC_PC;
1435 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1436 } else {
1437 dc->jump_pc[1] = dc->npc + 4;
1438 dc->npc = JUMP_PC;
1444 #ifdef TARGET_SPARC64
1445 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1446 TCGv r_reg)
1448 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1449 target_ulong target = dc->pc + offset;
1451 if (unlikely(AM_CHECK(dc))) {
1452 target &= 0xffffffffULL;
1454 flush_cond(dc);
1455 gen_cond_reg(cpu_cond, cond, r_reg);
1456 if (a) {
1457 gen_branch_a(dc, target, dc->npc, cpu_cond);
1458 dc->is_br = 1;
1459 } else {
1460 dc->pc = dc->npc;
1461 dc->jump_pc[0] = target;
1462 if (unlikely(dc->npc == DYNAMIC_PC)) {
1463 dc->jump_pc[1] = DYNAMIC_PC;
1464 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1465 } else {
1466 dc->jump_pc[1] = dc->npc + 4;
1467 dc->npc = JUMP_PC;
1472 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1474 switch (fccno) {
1475 case 0:
1476 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1477 break;
1478 case 1:
1479 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1480 break;
1481 case 2:
1482 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1483 break;
1484 case 3:
1485 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1486 break;
1490 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1492 switch (fccno) {
1493 case 0:
1494 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1495 break;
1496 case 1:
1497 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1498 break;
1499 case 2:
1500 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1501 break;
1502 case 3:
1503 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1504 break;
1508 static inline void gen_op_fcmpq(int fccno)
1510 switch (fccno) {
1511 case 0:
1512 gen_helper_fcmpq(cpu_env);
1513 break;
1514 case 1:
1515 gen_helper_fcmpq_fcc1(cpu_env);
1516 break;
1517 case 2:
1518 gen_helper_fcmpq_fcc2(cpu_env);
1519 break;
1520 case 3:
1521 gen_helper_fcmpq_fcc3(cpu_env);
1522 break;
1526 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1528 switch (fccno) {
1529 case 0:
1530 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1531 break;
1532 case 1:
1533 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1534 break;
1535 case 2:
1536 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1537 break;
1538 case 3:
1539 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1540 break;
1544 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1546 switch (fccno) {
1547 case 0:
1548 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1549 break;
1550 case 1:
1551 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1552 break;
1553 case 2:
1554 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1555 break;
1556 case 3:
1557 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1558 break;
1562 static inline void gen_op_fcmpeq(int fccno)
1564 switch (fccno) {
1565 case 0:
1566 gen_helper_fcmpeq(cpu_env);
1567 break;
1568 case 1:
1569 gen_helper_fcmpeq_fcc1(cpu_env);
1570 break;
1571 case 2:
1572 gen_helper_fcmpeq_fcc2(cpu_env);
1573 break;
1574 case 3:
1575 gen_helper_fcmpeq_fcc3(cpu_env);
1576 break;
1580 #else
1582 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1584 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1587 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1589 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1592 static inline void gen_op_fcmpq(int fccno)
1594 gen_helper_fcmpq(cpu_env);
1597 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1599 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1602 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1604 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1607 static inline void gen_op_fcmpeq(int fccno)
1609 gen_helper_fcmpeq(cpu_env);
1611 #endif
1613 static inline void gen_op_fpexception_im(int fsr_flags)
1615 TCGv_i32 r_const;
1617 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1618 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1619 r_const = tcg_const_i32(TT_FP_EXCP);
1620 gen_helper_raise_exception(cpu_env, r_const);
1621 tcg_temp_free_i32(r_const);
1624 static int gen_trap_ifnofpu(DisasContext *dc)
1626 #if !defined(CONFIG_USER_ONLY)
1627 if (!dc->fpu_enabled) {
1628 TCGv_i32 r_const;
1630 save_state(dc);
1631 r_const = tcg_const_i32(TT_NFPU_INSN);
1632 gen_helper_raise_exception(cpu_env, r_const);
1633 tcg_temp_free_i32(r_const);
1634 dc->is_br = 1;
1635 return 1;
1637 #endif
1638 return 0;
1641 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1643 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1646 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1647 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1649 TCGv_i32 dst, src;
1651 src = gen_load_fpr_F(dc, rs);
1652 dst = gen_dest_fpr_F();
1654 gen(dst, cpu_env, src);
1656 gen_store_fpr_F(dc, rd, dst);
1659 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1660 void (*gen)(TCGv_i32, TCGv_i32))
1662 TCGv_i32 dst, src;
1664 src = gen_load_fpr_F(dc, rs);
1665 dst = gen_dest_fpr_F();
1667 gen(dst, src);
1669 gen_store_fpr_F(dc, rd, dst);
1672 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1673 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1675 TCGv_i32 dst, src1, src2;
1677 src1 = gen_load_fpr_F(dc, rs1);
1678 src2 = gen_load_fpr_F(dc, rs2);
1679 dst = gen_dest_fpr_F();
1681 gen(dst, cpu_env, src1, src2);
1683 gen_store_fpr_F(dc, rd, dst);
1686 #ifdef TARGET_SPARC64
1687 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1688 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1690 TCGv_i32 dst, src1, src2;
1692 src1 = gen_load_fpr_F(dc, rs1);
1693 src2 = gen_load_fpr_F(dc, rs2);
1694 dst = gen_dest_fpr_F();
1696 gen(dst, src1, src2);
1698 gen_store_fpr_F(dc, rd, dst);
1700 #endif
1702 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1703 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1705 TCGv_i64 dst, src;
1707 src = gen_load_fpr_D(dc, rs);
1708 dst = gen_dest_fpr_D();
1710 gen(dst, cpu_env, src);
1712 gen_store_fpr_D(dc, rd, dst);
1715 #ifdef TARGET_SPARC64
1716 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1717 void (*gen)(TCGv_i64, TCGv_i64))
1719 TCGv_i64 dst, src;
1721 src = gen_load_fpr_D(dc, rs);
1722 dst = gen_dest_fpr_D();
1724 gen(dst, src);
1726 gen_store_fpr_D(dc, rd, dst);
1728 #endif
1730 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1731 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1733 TCGv_i64 dst, src1, src2;
1735 src1 = gen_load_fpr_D(dc, rs1);
1736 src2 = gen_load_fpr_D(dc, rs2);
1737 dst = gen_dest_fpr_D();
1739 gen(dst, cpu_env, src1, src2);
1741 gen_store_fpr_D(dc, rd, dst);
1744 #ifdef TARGET_SPARC64
1745 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1746 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1748 TCGv_i64 dst, src1, src2;
1750 src1 = gen_load_fpr_D(dc, rs1);
1751 src2 = gen_load_fpr_D(dc, rs2);
1752 dst = gen_dest_fpr_D();
1754 gen(dst, src1, src2);
1756 gen_store_fpr_D(dc, rd, dst);
1759 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1760 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1762 TCGv_i64 dst, src1, src2;
1764 src1 = gen_load_fpr_D(dc, rs1);
1765 src2 = gen_load_fpr_D(dc, rs2);
1766 dst = gen_dest_fpr_D();
1768 gen(dst, cpu_gsr, src1, src2);
1770 gen_store_fpr_D(dc, rd, dst);
1773 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1774 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1776 TCGv_i64 dst, src0, src1, src2;
1778 src1 = gen_load_fpr_D(dc, rs1);
1779 src2 = gen_load_fpr_D(dc, rs2);
1780 src0 = gen_load_fpr_D(dc, rd);
1781 dst = gen_dest_fpr_D();
1783 gen(dst, src0, src1, src2);
1785 gen_store_fpr_D(dc, rd, dst);
1787 #endif
1789 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1790 void (*gen)(TCGv_ptr))
1792 gen_op_load_fpr_QT1(QFPREG(rs));
1794 gen(cpu_env);
1796 gen_op_store_QT0_fpr(QFPREG(rd));
1797 gen_update_fprs_dirty(QFPREG(rd));
1800 #ifdef TARGET_SPARC64
1801 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1802 void (*gen)(TCGv_ptr))
1804 gen_op_load_fpr_QT1(QFPREG(rs));
1806 gen(cpu_env);
1808 gen_op_store_QT0_fpr(QFPREG(rd));
1809 gen_update_fprs_dirty(QFPREG(rd));
1811 #endif
1813 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1814 void (*gen)(TCGv_ptr))
1816 gen_op_load_fpr_QT0(QFPREG(rs1));
1817 gen_op_load_fpr_QT1(QFPREG(rs2));
1819 gen(cpu_env);
1821 gen_op_store_QT0_fpr(QFPREG(rd));
1822 gen_update_fprs_dirty(QFPREG(rd));
1825 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1826 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1828 TCGv_i64 dst;
1829 TCGv_i32 src1, src2;
1831 src1 = gen_load_fpr_F(dc, rs1);
1832 src2 = gen_load_fpr_F(dc, rs2);
1833 dst = gen_dest_fpr_D();
1835 gen(dst, cpu_env, src1, src2);
1837 gen_store_fpr_D(dc, rd, dst);
1840 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1841 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1843 TCGv_i64 src1, src2;
1845 src1 = gen_load_fpr_D(dc, rs1);
1846 src2 = gen_load_fpr_D(dc, rs2);
1848 gen(cpu_env, src1, src2);
1850 gen_op_store_QT0_fpr(QFPREG(rd));
1851 gen_update_fprs_dirty(QFPREG(rd));
1854 #ifdef TARGET_SPARC64
1855 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1856 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1858 TCGv_i64 dst;
1859 TCGv_i32 src;
1861 src = gen_load_fpr_F(dc, rs);
1862 dst = gen_dest_fpr_D();
1864 gen(dst, cpu_env, src);
1866 gen_store_fpr_D(dc, rd, dst);
1868 #endif
1870 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1871 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1873 TCGv_i64 dst;
1874 TCGv_i32 src;
1876 src = gen_load_fpr_F(dc, rs);
1877 dst = gen_dest_fpr_D();
1879 gen(dst, cpu_env, src);
1881 gen_store_fpr_D(dc, rd, dst);
1884 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1885 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1887 TCGv_i32 dst;
1888 TCGv_i64 src;
1890 src = gen_load_fpr_D(dc, rs);
1891 dst = gen_dest_fpr_F();
1893 gen(dst, cpu_env, src);
1895 gen_store_fpr_F(dc, rd, dst);
1898 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1899 void (*gen)(TCGv_i32, TCGv_ptr))
1901 TCGv_i32 dst;
1903 gen_op_load_fpr_QT1(QFPREG(rs));
1904 dst = gen_dest_fpr_F();
1906 gen(dst, cpu_env);
1908 gen_store_fpr_F(dc, rd, dst);
1911 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1912 void (*gen)(TCGv_i64, TCGv_ptr))
1914 TCGv_i64 dst;
1916 gen_op_load_fpr_QT1(QFPREG(rs));
1917 dst = gen_dest_fpr_D();
1919 gen(dst, cpu_env);
1921 gen_store_fpr_D(dc, rd, dst);
1924 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1925 void (*gen)(TCGv_ptr, TCGv_i32))
1927 TCGv_i32 src;
1929 src = gen_load_fpr_F(dc, rs);
1931 gen(cpu_env, src);
1933 gen_op_store_QT0_fpr(QFPREG(rd));
1934 gen_update_fprs_dirty(QFPREG(rd));
1937 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1938 void (*gen)(TCGv_ptr, TCGv_i64))
1940 TCGv_i64 src;
1942 src = gen_load_fpr_D(dc, rs);
1944 gen(cpu_env, src);
1946 gen_op_store_QT0_fpr(QFPREG(rd));
1947 gen_update_fprs_dirty(QFPREG(rd));
1950 /* asi moves */
1951 #ifdef TARGET_SPARC64
1952 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1954 int asi;
1955 TCGv_i32 r_asi;
1957 if (IS_IMM) {
1958 r_asi = tcg_temp_new_i32();
1959 tcg_gen_mov_i32(r_asi, cpu_asi);
1960 } else {
1961 asi = GET_FIELD(insn, 19, 26);
1962 r_asi = tcg_const_i32(asi);
1964 return r_asi;
1967 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1968 int sign)
1970 TCGv_i32 r_asi, r_size, r_sign;
1972 r_asi = gen_get_asi(insn, addr);
1973 r_size = tcg_const_i32(size);
1974 r_sign = tcg_const_i32(sign);
1975 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1976 tcg_temp_free_i32(r_sign);
1977 tcg_temp_free_i32(r_size);
1978 tcg_temp_free_i32(r_asi);
1981 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1983 TCGv_i32 r_asi, r_size;
1985 r_asi = gen_get_asi(insn, addr);
1986 r_size = tcg_const_i32(size);
1987 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1988 tcg_temp_free_i32(r_size);
1989 tcg_temp_free_i32(r_asi);
1992 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1994 TCGv_i32 r_asi, r_size, r_rd;
1996 r_asi = gen_get_asi(insn, addr);
1997 r_size = tcg_const_i32(size);
1998 r_rd = tcg_const_i32(rd);
1999 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2000 tcg_temp_free_i32(r_rd);
2001 tcg_temp_free_i32(r_size);
2002 tcg_temp_free_i32(r_asi);
2005 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2007 TCGv_i32 r_asi, r_size, r_rd;
2009 r_asi = gen_get_asi(insn, addr);
2010 r_size = tcg_const_i32(size);
2011 r_rd = tcg_const_i32(rd);
2012 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2013 tcg_temp_free_i32(r_rd);
2014 tcg_temp_free_i32(r_size);
2015 tcg_temp_free_i32(r_asi);
2018 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2020 TCGv_i32 r_asi, r_size, r_sign;
2022 r_asi = gen_get_asi(insn, addr);
2023 r_size = tcg_const_i32(4);
2024 r_sign = tcg_const_i32(0);
2025 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2026 tcg_temp_free_i32(r_sign);
2027 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2028 tcg_temp_free_i32(r_size);
2029 tcg_temp_free_i32(r_asi);
2030 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2033 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2035 TCGv_i32 r_asi, r_rd;
2037 r_asi = gen_get_asi(insn, addr);
2038 r_rd = tcg_const_i32(rd);
2039 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2040 tcg_temp_free_i32(r_rd);
2041 tcg_temp_free_i32(r_asi);
2044 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2046 TCGv_i32 r_asi, r_size;
2048 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2049 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2050 r_asi = gen_get_asi(insn, addr);
2051 r_size = tcg_const_i32(8);
2052 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2053 tcg_temp_free_i32(r_size);
2054 tcg_temp_free_i32(r_asi);
2057 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2058 int rd)
2060 TCGv r_val1;
2061 TCGv_i32 r_asi;
2063 r_val1 = tcg_temp_new();
2064 gen_movl_reg_TN(rd, r_val1);
2065 r_asi = gen_get_asi(insn, addr);
2066 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2067 tcg_temp_free_i32(r_asi);
2068 tcg_temp_free(r_val1);
2071 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2072 int rd)
2074 TCGv_i32 r_asi;
2076 gen_movl_reg_TN(rd, cpu_tmp64);
2077 r_asi = gen_get_asi(insn, addr);
2078 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2079 tcg_temp_free_i32(r_asi);
2082 #elif !defined(CONFIG_USER_ONLY)
2084 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2085 int sign)
2087 TCGv_i32 r_asi, r_size, r_sign;
2089 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2090 r_size = tcg_const_i32(size);
2091 r_sign = tcg_const_i32(sign);
2092 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2093 tcg_temp_free(r_sign);
2094 tcg_temp_free(r_size);
2095 tcg_temp_free(r_asi);
2096 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2099 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2101 TCGv_i32 r_asi, r_size;
2103 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2104 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2105 r_size = tcg_const_i32(size);
2106 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2107 tcg_temp_free(r_size);
2108 tcg_temp_free(r_asi);
2111 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2113 TCGv_i32 r_asi, r_size, r_sign;
2114 TCGv_i64 r_val;
2116 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2117 r_size = tcg_const_i32(4);
2118 r_sign = tcg_const_i32(0);
2119 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2120 tcg_temp_free(r_sign);
2121 r_val = tcg_temp_new_i64();
2122 tcg_gen_extu_tl_i64(r_val, dst);
2123 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2124 tcg_temp_free_i64(r_val);
2125 tcg_temp_free(r_size);
2126 tcg_temp_free(r_asi);
2127 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2130 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2132 TCGv_i32 r_asi, r_size, r_sign;
2134 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2135 r_size = tcg_const_i32(8);
2136 r_sign = tcg_const_i32(0);
2137 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2138 tcg_temp_free(r_sign);
2139 tcg_temp_free(r_size);
2140 tcg_temp_free(r_asi);
2141 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2142 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2143 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2144 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2145 gen_movl_TN_reg(rd, hi);
2148 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2150 TCGv_i32 r_asi, r_size;
2152 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2153 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2154 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2155 r_size = tcg_const_i32(8);
2156 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2157 tcg_temp_free(r_size);
2158 tcg_temp_free(r_asi);
2160 #endif
2162 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2163 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2165 TCGv_i64 r_val;
2166 TCGv_i32 r_asi, r_size;
2168 gen_ld_asi(dst, addr, insn, 1, 0);
2170 r_val = tcg_const_i64(0xffULL);
2171 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2172 r_size = tcg_const_i32(1);
2173 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2174 tcg_temp_free_i32(r_size);
2175 tcg_temp_free_i32(r_asi);
2176 tcg_temp_free_i64(r_val);
2178 #endif
2180 static inline TCGv get_src1(unsigned int insn, TCGv def)
2182 TCGv r_rs1 = def;
2183 unsigned int rs1;
2185 rs1 = GET_FIELD(insn, 13, 17);
2186 if (rs1 == 0) {
2187 tcg_gen_movi_tl(def, 0);
2188 } else if (rs1 < 8) {
2189 r_rs1 = cpu_gregs[rs1];
2190 } else {
2191 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2193 return r_rs1;
2196 static inline TCGv get_src2(unsigned int insn, TCGv def)
2198 TCGv r_rs2 = def;
2200 if (IS_IMM) { /* immediate */
2201 target_long simm = GET_FIELDs(insn, 19, 31);
2202 tcg_gen_movi_tl(def, simm);
2203 } else { /* register */
2204 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2205 if (rs2 == 0) {
2206 tcg_gen_movi_tl(def, 0);
2207 } else if (rs2 < 8) {
2208 r_rs2 = cpu_gregs[rs2];
2209 } else {
2210 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2213 return r_rs2;
2216 #ifdef TARGET_SPARC64
2217 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2219 TCGv_i32 c32, zero, dst, s1, s2;
2221 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2222 or fold the comparison down to 32 bits and use movcond_i32. Choose
2223 the later. */
2224 c32 = tcg_temp_new_i32();
2225 if (cmp->is_bool) {
2226 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2227 } else {
2228 TCGv_i64 c64 = tcg_temp_new_i64();
2229 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2230 tcg_gen_trunc_i64_i32(c32, c64);
2231 tcg_temp_free_i64(c64);
2234 s1 = gen_load_fpr_F(dc, rs);
2235 s2 = gen_load_fpr_F(dc, rd);
2236 dst = gen_dest_fpr_F();
2237 zero = tcg_const_i32(0);
2239 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2241 tcg_temp_free_i32(c32);
2242 tcg_temp_free_i32(zero);
2243 gen_store_fpr_F(dc, rd, dst);
2246 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2248 TCGv_i64 dst = gen_dest_fpr_D();
2249 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2250 gen_load_fpr_D(dc, rs),
2251 gen_load_fpr_D(dc, rd));
2252 gen_store_fpr_D(dc, rd, dst);
2255 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2257 int qd = QFPREG(rd);
2258 int qs = QFPREG(rs);
2260 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2261 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2262 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2263 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2265 gen_update_fprs_dirty(qd);
2268 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2270 TCGv_i32 r_tl = tcg_temp_new_i32();
2272 /* load env->tl into r_tl */
2273 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2275 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2276 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2278 /* calculate offset to current trap state from env->ts, reuse r_tl */
2279 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2280 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2282 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2284 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2285 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2286 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2287 tcg_temp_free_ptr(r_tl_tmp);
2290 tcg_temp_free_i32(r_tl);
2293 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2294 int width, bool cc, bool left)
2296 TCGv lo1, lo2, t1, t2;
2297 uint64_t amask, tabl, tabr;
2298 int shift, imask, omask;
2300 if (cc) {
2301 tcg_gen_mov_tl(cpu_cc_src, s1);
2302 tcg_gen_mov_tl(cpu_cc_src2, s2);
2303 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2304 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2305 dc->cc_op = CC_OP_SUB;
2308 /* Theory of operation: there are two tables, left and right (not to
2309 be confused with the left and right versions of the opcode). These
2310 are indexed by the low 3 bits of the inputs. To make things "easy",
2311 these tables are loaded into two constants, TABL and TABR below.
2312 The operation index = (input & imask) << shift calculates the index
2313 into the constant, while val = (table >> index) & omask calculates
2314 the value we're looking for. */
2315 switch (width) {
2316 case 8:
2317 imask = 0x7;
2318 shift = 3;
2319 omask = 0xff;
2320 if (left) {
2321 tabl = 0x80c0e0f0f8fcfeffULL;
2322 tabr = 0xff7f3f1f0f070301ULL;
2323 } else {
2324 tabl = 0x0103070f1f3f7fffULL;
2325 tabr = 0xfffefcf8f0e0c080ULL;
2327 break;
2328 case 16:
2329 imask = 0x6;
2330 shift = 1;
2331 omask = 0xf;
2332 if (left) {
2333 tabl = 0x8cef;
2334 tabr = 0xf731;
2335 } else {
2336 tabl = 0x137f;
2337 tabr = 0xfec8;
2339 break;
2340 case 32:
2341 imask = 0x4;
2342 shift = 0;
2343 omask = 0x3;
2344 if (left) {
2345 tabl = (2 << 2) | 3;
2346 tabr = (3 << 2) | 1;
2347 } else {
2348 tabl = (1 << 2) | 3;
2349 tabr = (3 << 2) | 2;
2351 break;
2352 default:
2353 abort();
2356 lo1 = tcg_temp_new();
2357 lo2 = tcg_temp_new();
2358 tcg_gen_andi_tl(lo1, s1, imask);
2359 tcg_gen_andi_tl(lo2, s2, imask);
2360 tcg_gen_shli_tl(lo1, lo1, shift);
2361 tcg_gen_shli_tl(lo2, lo2, shift);
2363 t1 = tcg_const_tl(tabl);
2364 t2 = tcg_const_tl(tabr);
2365 tcg_gen_shr_tl(lo1, t1, lo1);
2366 tcg_gen_shr_tl(lo2, t2, lo2);
2367 tcg_gen_andi_tl(dst, lo1, omask);
2368 tcg_gen_andi_tl(lo2, lo2, omask);
2370 amask = -8;
2371 if (AM_CHECK(dc)) {
2372 amask &= 0xffffffffULL;
2374 tcg_gen_andi_tl(s1, s1, amask);
2375 tcg_gen_andi_tl(s2, s2, amask);
2377 /* We want to compute
2378 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2379 We've already done dst = lo1, so this reduces to
2380 dst &= (s1 == s2 ? -1 : lo2)
2381 Which we perform by
2382 lo2 |= -(s1 == s2)
2383 dst &= lo2
2385 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2386 tcg_gen_neg_tl(t1, t1);
2387 tcg_gen_or_tl(lo2, lo2, t1);
2388 tcg_gen_and_tl(dst, dst, lo2);
2390 tcg_temp_free(lo1);
2391 tcg_temp_free(lo2);
2392 tcg_temp_free(t1);
2393 tcg_temp_free(t2);
2396 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2398 TCGv tmp = tcg_temp_new();
2400 tcg_gen_add_tl(tmp, s1, s2);
2401 tcg_gen_andi_tl(dst, tmp, -8);
2402 if (left) {
2403 tcg_gen_neg_tl(tmp, tmp);
2405 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2407 tcg_temp_free(tmp);
2410 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2412 TCGv t1, t2, shift;
2414 t1 = tcg_temp_new();
2415 t2 = tcg_temp_new();
2416 shift = tcg_temp_new();
2418 tcg_gen_andi_tl(shift, gsr, 7);
2419 tcg_gen_shli_tl(shift, shift, 3);
2420 tcg_gen_shl_tl(t1, s1, shift);
2422 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2423 shift of (up to 63) followed by a constant shift of 1. */
2424 tcg_gen_xori_tl(shift, shift, 63);
2425 tcg_gen_shr_tl(t2, s2, shift);
2426 tcg_gen_shri_tl(t2, t2, 1);
2428 tcg_gen_or_tl(dst, t1, t2);
2430 tcg_temp_free(t1);
2431 tcg_temp_free(t2);
2432 tcg_temp_free(shift);
2434 #endif
2436 #define CHECK_IU_FEATURE(dc, FEATURE) \
2437 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2438 goto illegal_insn;
2439 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2440 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2441 goto nfpu_insn;
2443 /* before an instruction, dc->pc must be static */
2444 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2446 unsigned int opc, rs1, rs2, rd;
2447 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2448 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2449 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2450 target_long simm;
2452 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2453 tcg_gen_debug_insn_start(dc->pc);
2456 opc = GET_FIELD(insn, 0, 1);
2458 rd = GET_FIELD(insn, 2, 6);
2460 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2461 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2463 switch (opc) {
2464 case 0: /* branches/sethi */
2466 unsigned int xop = GET_FIELD(insn, 7, 9);
2467 int32_t target;
2468 switch (xop) {
2469 #ifdef TARGET_SPARC64
2470 case 0x1: /* V9 BPcc */
2472 int cc;
2474 target = GET_FIELD_SP(insn, 0, 18);
2475 target = sign_extend(target, 19);
2476 target <<= 2;
2477 cc = GET_FIELD_SP(insn, 20, 21);
2478 if (cc == 0)
2479 do_branch(dc, target, insn, 0);
2480 else if (cc == 2)
2481 do_branch(dc, target, insn, 1);
2482 else
2483 goto illegal_insn;
2484 goto jmp_insn;
2486 case 0x3: /* V9 BPr */
2488 target = GET_FIELD_SP(insn, 0, 13) |
2489 (GET_FIELD_SP(insn, 20, 21) << 14);
2490 target = sign_extend(target, 16);
2491 target <<= 2;
2492 cpu_src1 = get_src1(insn, cpu_src1);
2493 do_branch_reg(dc, target, insn, cpu_src1);
2494 goto jmp_insn;
2496 case 0x5: /* V9 FBPcc */
2498 int cc = GET_FIELD_SP(insn, 20, 21);
2499 if (gen_trap_ifnofpu(dc)) {
2500 goto jmp_insn;
2502 target = GET_FIELD_SP(insn, 0, 18);
2503 target = sign_extend(target, 19);
2504 target <<= 2;
2505 do_fbranch(dc, target, insn, cc);
2506 goto jmp_insn;
2508 #else
2509 case 0x7: /* CBN+x */
2511 goto ncp_insn;
2513 #endif
2514 case 0x2: /* BN+x */
2516 target = GET_FIELD(insn, 10, 31);
2517 target = sign_extend(target, 22);
2518 target <<= 2;
2519 do_branch(dc, target, insn, 0);
2520 goto jmp_insn;
2522 case 0x6: /* FBN+x */
2524 if (gen_trap_ifnofpu(dc)) {
2525 goto jmp_insn;
2527 target = GET_FIELD(insn, 10, 31);
2528 target = sign_extend(target, 22);
2529 target <<= 2;
2530 do_fbranch(dc, target, insn, 0);
2531 goto jmp_insn;
2533 case 0x4: /* SETHI */
2534 if (rd) { // nop
2535 uint32_t value = GET_FIELD(insn, 10, 31);
2536 TCGv r_const;
2538 r_const = tcg_const_tl(value << 10);
2539 gen_movl_TN_reg(rd, r_const);
2540 tcg_temp_free(r_const);
2542 break;
2543 case 0x0: /* UNIMPL */
2544 default:
2545 goto illegal_insn;
2547 break;
2549 break;
2550 case 1: /*CALL*/
2552 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2553 TCGv r_const;
2555 r_const = tcg_const_tl(dc->pc);
2556 gen_movl_TN_reg(15, r_const);
2557 tcg_temp_free(r_const);
2558 target += dc->pc;
2559 gen_mov_pc_npc(dc);
2560 #ifdef TARGET_SPARC64
2561 if (unlikely(AM_CHECK(dc))) {
2562 target &= 0xffffffffULL;
2564 #endif
2565 dc->npc = target;
2567 goto jmp_insn;
2568 case 2: /* FPU & Logical Operations */
2570 unsigned int xop = GET_FIELD(insn, 7, 12);
2571 if (xop == 0x3a) { /* generate trap */
2572 int cond = GET_FIELD(insn, 3, 6);
2573 TCGv_i32 trap;
2574 int l1 = -1, mask;
2576 if (cond == 0) {
2577 /* Trap never. */
2578 break;
2581 save_state(dc);
2583 if (cond != 8) {
2584 /* Conditional trap. */
2585 DisasCompare cmp;
2586 #ifdef TARGET_SPARC64
2587 /* V9 icc/xcc */
2588 int cc = GET_FIELD_SP(insn, 11, 12);
2589 if (cc == 0) {
2590 gen_compare(&cmp, 0, cond, dc);
2591 } else if (cc == 2) {
2592 gen_compare(&cmp, 1, cond, dc);
2593 } else {
2594 goto illegal_insn;
2596 #else
2597 gen_compare(&cmp, 0, cond, dc);
2598 #endif
2599 l1 = gen_new_label();
2600 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2601 cmp.c1, cmp.c2, l1);
2602 free_compare(&cmp);
2605 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2606 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2608 /* Don't use the normal temporaries, as they may well have
2609 gone out of scope with the branch above. While we're
2610 doing that we might as well pre-truncate to 32-bit. */
2611 trap = tcg_temp_new_i32();
2613 rs1 = GET_FIELD_SP(insn, 14, 18);
2614 if (IS_IMM) {
2615 rs2 = GET_FIELD_SP(insn, 0, 6);
2616 if (rs1 == 0) {
2617 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2618 /* Signal that the trap value is fully constant. */
2619 mask = 0;
2620 } else {
2621 TCGv t1 = tcg_temp_new();
2622 gen_movl_reg_TN(rs1, t1);
2623 tcg_gen_trunc_tl_i32(trap, t1);
2624 tcg_temp_free(t1);
2625 tcg_gen_addi_i32(trap, trap, rs2);
2627 } else {
2628 TCGv t1 = tcg_temp_new();
2629 TCGv t2 = tcg_temp_new();
2630 rs2 = GET_FIELD_SP(insn, 0, 4);
2631 gen_movl_reg_TN(rs1, t1);
2632 gen_movl_reg_TN(rs2, t2);
2633 tcg_gen_add_tl(t1, t1, t2);
2634 tcg_gen_trunc_tl_i32(trap, t1);
2635 tcg_temp_free(t1);
2636 tcg_temp_free(t2);
2638 if (mask != 0) {
2639 tcg_gen_andi_i32(trap, trap, mask);
2640 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2643 gen_helper_raise_exception(cpu_env, trap);
2644 tcg_temp_free_i32(trap);
2646 if (cond == 8) {
2647 /* An unconditional trap ends the TB. */
2648 dc->is_br = 1;
2649 goto jmp_insn;
2650 } else {
2651 /* A conditional trap falls through to the next insn. */
2652 gen_set_label(l1);
2653 break;
2655 } else if (xop == 0x28) {
2656 rs1 = GET_FIELD(insn, 13, 17);
2657 switch(rs1) {
2658 case 0: /* rdy */
2659 #ifndef TARGET_SPARC64
2660 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2661 manual, rdy on the microSPARC
2662 II */
2663 case 0x0f: /* stbar in the SPARCv8 manual,
2664 rdy on the microSPARC II */
2665 case 0x10 ... 0x1f: /* implementation-dependent in the
2666 SPARCv8 manual, rdy on the
2667 microSPARC II */
2668 /* Read Asr17 */
2669 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2670 TCGv r_const;
2672 /* Read Asr17 for a Leon3 monoprocessor */
2673 r_const = tcg_const_tl((1 << 8)
2674 | (dc->def->nwindows - 1));
2675 gen_movl_TN_reg(rd, r_const);
2676 tcg_temp_free(r_const);
2677 break;
2679 #endif
2680 gen_movl_TN_reg(rd, cpu_y);
2681 break;
2682 #ifdef TARGET_SPARC64
2683 case 0x2: /* V9 rdccr */
2684 gen_helper_compute_psr(cpu_env);
2685 gen_helper_rdccr(cpu_dst, cpu_env);
2686 gen_movl_TN_reg(rd, cpu_dst);
2687 break;
2688 case 0x3: /* V9 rdasi */
2689 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2690 gen_movl_TN_reg(rd, cpu_dst);
2691 break;
2692 case 0x4: /* V9 rdtick */
2694 TCGv_ptr r_tickptr;
2696 r_tickptr = tcg_temp_new_ptr();
2697 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2698 offsetof(CPUSPARCState, tick));
2699 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2700 tcg_temp_free_ptr(r_tickptr);
2701 gen_movl_TN_reg(rd, cpu_dst);
2703 break;
2704 case 0x5: /* V9 rdpc */
2706 TCGv r_const;
2708 if (unlikely(AM_CHECK(dc))) {
2709 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2710 } else {
2711 r_const = tcg_const_tl(dc->pc);
2713 gen_movl_TN_reg(rd, r_const);
2714 tcg_temp_free(r_const);
2716 break;
2717 case 0x6: /* V9 rdfprs */
2718 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2719 gen_movl_TN_reg(rd, cpu_dst);
2720 break;
2721 case 0xf: /* V9 membar */
2722 break; /* no effect */
2723 case 0x13: /* Graphics Status */
2724 if (gen_trap_ifnofpu(dc)) {
2725 goto jmp_insn;
2727 gen_movl_TN_reg(rd, cpu_gsr);
2728 break;
2729 case 0x16: /* Softint */
2730 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2731 gen_movl_TN_reg(rd, cpu_dst);
2732 break;
2733 case 0x17: /* Tick compare */
2734 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2735 break;
2736 case 0x18: /* System tick */
2738 TCGv_ptr r_tickptr;
2740 r_tickptr = tcg_temp_new_ptr();
2741 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2742 offsetof(CPUSPARCState, stick));
2743 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2744 tcg_temp_free_ptr(r_tickptr);
2745 gen_movl_TN_reg(rd, cpu_dst);
2747 break;
2748 case 0x19: /* System tick compare */
2749 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2750 break;
2751 case 0x10: /* Performance Control */
2752 case 0x11: /* Performance Instrumentation Counter */
2753 case 0x12: /* Dispatch Control */
2754 case 0x14: /* Softint set, WO */
2755 case 0x15: /* Softint clear, WO */
2756 #endif
2757 default:
2758 goto illegal_insn;
2760 #if !defined(CONFIG_USER_ONLY)
2761 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2762 #ifndef TARGET_SPARC64
2763 if (!supervisor(dc))
2764 goto priv_insn;
2765 gen_helper_compute_psr(cpu_env);
2766 dc->cc_op = CC_OP_FLAGS;
2767 gen_helper_rdpsr(cpu_dst, cpu_env);
2768 #else
2769 CHECK_IU_FEATURE(dc, HYPV);
2770 if (!hypervisor(dc))
2771 goto priv_insn;
2772 rs1 = GET_FIELD(insn, 13, 17);
2773 switch (rs1) {
2774 case 0: // hpstate
2775 // gen_op_rdhpstate();
2776 break;
2777 case 1: // htstate
2778 // gen_op_rdhtstate();
2779 break;
2780 case 3: // hintp
2781 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2782 break;
2783 case 5: // htba
2784 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2785 break;
2786 case 6: // hver
2787 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2788 break;
2789 case 31: // hstick_cmpr
2790 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2791 break;
2792 default:
2793 goto illegal_insn;
2795 #endif
2796 gen_movl_TN_reg(rd, cpu_dst);
2797 break;
2798 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2799 if (!supervisor(dc))
2800 goto priv_insn;
2801 #ifdef TARGET_SPARC64
2802 rs1 = GET_FIELD(insn, 13, 17);
2803 switch (rs1) {
2804 case 0: // tpc
2806 TCGv_ptr r_tsptr;
2808 r_tsptr = tcg_temp_new_ptr();
2809 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2810 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2811 offsetof(trap_state, tpc));
2812 tcg_temp_free_ptr(r_tsptr);
2814 break;
2815 case 1: // tnpc
2817 TCGv_ptr r_tsptr;
2819 r_tsptr = tcg_temp_new_ptr();
2820 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2821 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2822 offsetof(trap_state, tnpc));
2823 tcg_temp_free_ptr(r_tsptr);
2825 break;
2826 case 2: // tstate
2828 TCGv_ptr r_tsptr;
2830 r_tsptr = tcg_temp_new_ptr();
2831 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2832 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2833 offsetof(trap_state, tstate));
2834 tcg_temp_free_ptr(r_tsptr);
2836 break;
2837 case 3: // tt
2839 TCGv_ptr r_tsptr;
2841 r_tsptr = tcg_temp_new_ptr();
2842 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2843 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2844 offsetof(trap_state, tt));
2845 tcg_temp_free_ptr(r_tsptr);
2846 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2848 break;
2849 case 4: // tick
2851 TCGv_ptr r_tickptr;
2853 r_tickptr = tcg_temp_new_ptr();
2854 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2855 offsetof(CPUSPARCState, tick));
2856 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2857 gen_movl_TN_reg(rd, cpu_tmp0);
2858 tcg_temp_free_ptr(r_tickptr);
2860 break;
2861 case 5: // tba
2862 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2863 break;
2864 case 6: // pstate
2865 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2866 offsetof(CPUSPARCState, pstate));
2867 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2868 break;
2869 case 7: // tl
2870 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2871 offsetof(CPUSPARCState, tl));
2872 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2873 break;
2874 case 8: // pil
2875 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2876 offsetof(CPUSPARCState, psrpil));
2877 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2878 break;
2879 case 9: // cwp
2880 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2881 break;
2882 case 10: // cansave
2883 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2884 offsetof(CPUSPARCState, cansave));
2885 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2886 break;
2887 case 11: // canrestore
2888 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2889 offsetof(CPUSPARCState, canrestore));
2890 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2891 break;
2892 case 12: // cleanwin
2893 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2894 offsetof(CPUSPARCState, cleanwin));
2895 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2896 break;
2897 case 13: // otherwin
2898 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2899 offsetof(CPUSPARCState, otherwin));
2900 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2901 break;
2902 case 14: // wstate
2903 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2904 offsetof(CPUSPARCState, wstate));
2905 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2906 break;
2907 case 16: // UA2005 gl
2908 CHECK_IU_FEATURE(dc, GL);
2909 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2910 offsetof(CPUSPARCState, gl));
2911 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2912 break;
2913 case 26: // UA2005 strand status
2914 CHECK_IU_FEATURE(dc, HYPV);
2915 if (!hypervisor(dc))
2916 goto priv_insn;
2917 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2918 break;
2919 case 31: // ver
2920 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2921 break;
2922 case 15: // fq
2923 default:
2924 goto illegal_insn;
2926 #else
2927 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2928 #endif
2929 gen_movl_TN_reg(rd, cpu_tmp0);
2930 break;
2931 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2932 #ifdef TARGET_SPARC64
2933 save_state(dc);
2934 gen_helper_flushw(cpu_env);
2935 #else
2936 if (!supervisor(dc))
2937 goto priv_insn;
2938 gen_movl_TN_reg(rd, cpu_tbr);
2939 #endif
2940 break;
2941 #endif
2942 } else if (xop == 0x34) { /* FPU Operations */
2943 if (gen_trap_ifnofpu(dc)) {
2944 goto jmp_insn;
2946 gen_op_clear_ieee_excp_and_FTT();
2947 rs1 = GET_FIELD(insn, 13, 17);
2948 rs2 = GET_FIELD(insn, 27, 31);
2949 xop = GET_FIELD(insn, 18, 26);
2950 save_state(dc);
2951 switch (xop) {
2952 case 0x1: /* fmovs */
2953 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2954 gen_store_fpr_F(dc, rd, cpu_src1_32);
2955 break;
2956 case 0x5: /* fnegs */
2957 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2958 break;
2959 case 0x9: /* fabss */
2960 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2961 break;
2962 case 0x29: /* fsqrts */
2963 CHECK_FPU_FEATURE(dc, FSQRT);
2964 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2965 break;
2966 case 0x2a: /* fsqrtd */
2967 CHECK_FPU_FEATURE(dc, FSQRT);
2968 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2969 break;
2970 case 0x2b: /* fsqrtq */
2971 CHECK_FPU_FEATURE(dc, FLOAT128);
2972 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2973 break;
2974 case 0x41: /* fadds */
2975 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2976 break;
2977 case 0x42: /* faddd */
2978 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2979 break;
2980 case 0x43: /* faddq */
2981 CHECK_FPU_FEATURE(dc, FLOAT128);
2982 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2983 break;
2984 case 0x45: /* fsubs */
2985 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2986 break;
2987 case 0x46: /* fsubd */
2988 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2989 break;
2990 case 0x47: /* fsubq */
2991 CHECK_FPU_FEATURE(dc, FLOAT128);
2992 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2993 break;
2994 case 0x49: /* fmuls */
2995 CHECK_FPU_FEATURE(dc, FMUL);
2996 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2997 break;
2998 case 0x4a: /* fmuld */
2999 CHECK_FPU_FEATURE(dc, FMUL);
3000 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3001 break;
3002 case 0x4b: /* fmulq */
3003 CHECK_FPU_FEATURE(dc, FLOAT128);
3004 CHECK_FPU_FEATURE(dc, FMUL);
3005 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3006 break;
3007 case 0x4d: /* fdivs */
3008 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3009 break;
3010 case 0x4e: /* fdivd */
3011 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3012 break;
3013 case 0x4f: /* fdivq */
3014 CHECK_FPU_FEATURE(dc, FLOAT128);
3015 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3016 break;
3017 case 0x69: /* fsmuld */
3018 CHECK_FPU_FEATURE(dc, FSMULD);
3019 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3020 break;
3021 case 0x6e: /* fdmulq */
3022 CHECK_FPU_FEATURE(dc, FLOAT128);
3023 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3024 break;
3025 case 0xc4: /* fitos */
3026 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3027 break;
3028 case 0xc6: /* fdtos */
3029 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3030 break;
3031 case 0xc7: /* fqtos */
3032 CHECK_FPU_FEATURE(dc, FLOAT128);
3033 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3034 break;
3035 case 0xc8: /* fitod */
3036 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3037 break;
3038 case 0xc9: /* fstod */
3039 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3040 break;
3041 case 0xcb: /* fqtod */
3042 CHECK_FPU_FEATURE(dc, FLOAT128);
3043 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3044 break;
3045 case 0xcc: /* fitoq */
3046 CHECK_FPU_FEATURE(dc, FLOAT128);
3047 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3048 break;
3049 case 0xcd: /* fstoq */
3050 CHECK_FPU_FEATURE(dc, FLOAT128);
3051 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3052 break;
3053 case 0xce: /* fdtoq */
3054 CHECK_FPU_FEATURE(dc, FLOAT128);
3055 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3056 break;
3057 case 0xd1: /* fstoi */
3058 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3059 break;
3060 case 0xd2: /* fdtoi */
3061 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3062 break;
3063 case 0xd3: /* fqtoi */
3064 CHECK_FPU_FEATURE(dc, FLOAT128);
3065 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3066 break;
3067 #ifdef TARGET_SPARC64
3068 case 0x2: /* V9 fmovd */
3069 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3070 gen_store_fpr_D(dc, rd, cpu_src1_64);
3071 break;
3072 case 0x3: /* V9 fmovq */
3073 CHECK_FPU_FEATURE(dc, FLOAT128);
3074 gen_move_Q(rd, rs2);
3075 break;
3076 case 0x6: /* V9 fnegd */
3077 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3078 break;
3079 case 0x7: /* V9 fnegq */
3080 CHECK_FPU_FEATURE(dc, FLOAT128);
3081 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3082 break;
3083 case 0xa: /* V9 fabsd */
3084 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3085 break;
3086 case 0xb: /* V9 fabsq */
3087 CHECK_FPU_FEATURE(dc, FLOAT128);
3088 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3089 break;
3090 case 0x81: /* V9 fstox */
3091 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3092 break;
3093 case 0x82: /* V9 fdtox */
3094 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3095 break;
3096 case 0x83: /* V9 fqtox */
3097 CHECK_FPU_FEATURE(dc, FLOAT128);
3098 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3099 break;
3100 case 0x84: /* V9 fxtos */
3101 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3102 break;
3103 case 0x88: /* V9 fxtod */
3104 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3105 break;
3106 case 0x8c: /* V9 fxtoq */
3107 CHECK_FPU_FEATURE(dc, FLOAT128);
3108 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3109 break;
3110 #endif
3111 default:
3112 goto illegal_insn;
3114 } else if (xop == 0x35) { /* FPU Operations */
3115 #ifdef TARGET_SPARC64
3116 int cond;
3117 #endif
3118 if (gen_trap_ifnofpu(dc)) {
3119 goto jmp_insn;
3121 gen_op_clear_ieee_excp_and_FTT();
3122 rs1 = GET_FIELD(insn, 13, 17);
3123 rs2 = GET_FIELD(insn, 27, 31);
3124 xop = GET_FIELD(insn, 18, 26);
3125 save_state(dc);
3127 #ifdef TARGET_SPARC64
3128 #define FMOVR(sz) \
3129 do { \
3130 DisasCompare cmp; \
3131 cond = GET_FIELD_SP(insn, 14, 17); \
3132 cpu_src1 = get_src1(insn, cpu_src1); \
3133 gen_compare_reg(&cmp, cond, cpu_src1); \
3134 gen_fmov##sz(dc, &cmp, rd, rs2); \
3135 free_compare(&cmp); \
3136 } while (0)
3138 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3139 FMOVR(s);
3140 break;
3141 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3142 FMOVR(d);
3143 break;
3144 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3145 CHECK_FPU_FEATURE(dc, FLOAT128);
3146 FMOVR(q);
3147 break;
3149 #undef FMOVR
3150 #endif
3151 switch (xop) {
3152 #ifdef TARGET_SPARC64
3153 #define FMOVCC(fcc, sz) \
3154 do { \
3155 DisasCompare cmp; \
3156 cond = GET_FIELD_SP(insn, 14, 17); \
3157 gen_fcompare(&cmp, fcc, cond); \
3158 gen_fmov##sz(dc, &cmp, rd, rs2); \
3159 free_compare(&cmp); \
3160 } while (0)
3162 case 0x001: /* V9 fmovscc %fcc0 */
3163 FMOVCC(0, s);
3164 break;
3165 case 0x002: /* V9 fmovdcc %fcc0 */
3166 FMOVCC(0, d);
3167 break;
3168 case 0x003: /* V9 fmovqcc %fcc0 */
3169 CHECK_FPU_FEATURE(dc, FLOAT128);
3170 FMOVCC(0, q);
3171 break;
3172 case 0x041: /* V9 fmovscc %fcc1 */
3173 FMOVCC(1, s);
3174 break;
3175 case 0x042: /* V9 fmovdcc %fcc1 */
3176 FMOVCC(1, d);
3177 break;
3178 case 0x043: /* V9 fmovqcc %fcc1 */
3179 CHECK_FPU_FEATURE(dc, FLOAT128);
3180 FMOVCC(1, q);
3181 break;
3182 case 0x081: /* V9 fmovscc %fcc2 */
3183 FMOVCC(2, s);
3184 break;
3185 case 0x082: /* V9 fmovdcc %fcc2 */
3186 FMOVCC(2, d);
3187 break;
3188 case 0x083: /* V9 fmovqcc %fcc2 */
3189 CHECK_FPU_FEATURE(dc, FLOAT128);
3190 FMOVCC(2, q);
3191 break;
3192 case 0x0c1: /* V9 fmovscc %fcc3 */
3193 FMOVCC(3, s);
3194 break;
3195 case 0x0c2: /* V9 fmovdcc %fcc3 */
3196 FMOVCC(3, d);
3197 break;
3198 case 0x0c3: /* V9 fmovqcc %fcc3 */
3199 CHECK_FPU_FEATURE(dc, FLOAT128);
3200 FMOVCC(3, q);
3201 break;
3202 #undef FMOVCC
3203 #define FMOVCC(xcc, sz) \
3204 do { \
3205 DisasCompare cmp; \
3206 cond = GET_FIELD_SP(insn, 14, 17); \
3207 gen_compare(&cmp, xcc, cond, dc); \
3208 gen_fmov##sz(dc, &cmp, rd, rs2); \
3209 free_compare(&cmp); \
3210 } while (0)
3212 case 0x101: /* V9 fmovscc %icc */
3213 FMOVCC(0, s);
3214 break;
3215 case 0x102: /* V9 fmovdcc %icc */
3216 FMOVCC(0, d);
3217 break;
3218 case 0x103: /* V9 fmovqcc %icc */
3219 CHECK_FPU_FEATURE(dc, FLOAT128);
3220 FMOVCC(0, q);
3221 break;
3222 case 0x181: /* V9 fmovscc %xcc */
3223 FMOVCC(1, s);
3224 break;
3225 case 0x182: /* V9 fmovdcc %xcc */
3226 FMOVCC(1, d);
3227 break;
3228 case 0x183: /* V9 fmovqcc %xcc */
3229 CHECK_FPU_FEATURE(dc, FLOAT128);
3230 FMOVCC(1, q);
3231 break;
3232 #undef FMOVCC
3233 #endif
3234 case 0x51: /* fcmps, V9 %fcc */
3235 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3236 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3237 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3238 break;
3239 case 0x52: /* fcmpd, V9 %fcc */
3240 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3241 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3242 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3243 break;
3244 case 0x53: /* fcmpq, V9 %fcc */
3245 CHECK_FPU_FEATURE(dc, FLOAT128);
3246 gen_op_load_fpr_QT0(QFPREG(rs1));
3247 gen_op_load_fpr_QT1(QFPREG(rs2));
3248 gen_op_fcmpq(rd & 3);
3249 break;
3250 case 0x55: /* fcmpes, V9 %fcc */
3251 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3252 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3253 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3254 break;
3255 case 0x56: /* fcmped, V9 %fcc */
3256 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3257 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3258 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3259 break;
3260 case 0x57: /* fcmpeq, V9 %fcc */
3261 CHECK_FPU_FEATURE(dc, FLOAT128);
3262 gen_op_load_fpr_QT0(QFPREG(rs1));
3263 gen_op_load_fpr_QT1(QFPREG(rs2));
3264 gen_op_fcmpeq(rd & 3);
3265 break;
3266 default:
3267 goto illegal_insn;
3269 } else if (xop == 0x2) {
3270 // clr/mov shortcut
3272 rs1 = GET_FIELD(insn, 13, 17);
3273 if (rs1 == 0) {
3274 // or %g0, x, y -> mov T0, x; mov y, T0
3275 if (IS_IMM) { /* immediate */
3276 TCGv r_const;
3278 simm = GET_FIELDs(insn, 19, 31);
3279 r_const = tcg_const_tl(simm);
3280 gen_movl_TN_reg(rd, r_const);
3281 tcg_temp_free(r_const);
3282 } else { /* register */
3283 rs2 = GET_FIELD(insn, 27, 31);
3284 gen_movl_reg_TN(rs2, cpu_dst);
3285 gen_movl_TN_reg(rd, cpu_dst);
3287 } else {
3288 cpu_src1 = get_src1(insn, cpu_src1);
3289 if (IS_IMM) { /* immediate */
3290 simm = GET_FIELDs(insn, 19, 31);
3291 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3292 gen_movl_TN_reg(rd, cpu_dst);
3293 } else { /* register */
3294 // or x, %g0, y -> mov T1, x; mov y, T1
3295 rs2 = GET_FIELD(insn, 27, 31);
3296 if (rs2 != 0) {
3297 gen_movl_reg_TN(rs2, cpu_src2);
3298 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3299 gen_movl_TN_reg(rd, cpu_dst);
3300 } else
3301 gen_movl_TN_reg(rd, cpu_src1);
3304 #ifdef TARGET_SPARC64
3305 } else if (xop == 0x25) { /* sll, V9 sllx */
3306 cpu_src1 = get_src1(insn, cpu_src1);
3307 if (IS_IMM) { /* immediate */
3308 simm = GET_FIELDs(insn, 20, 31);
3309 if (insn & (1 << 12)) {
3310 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3311 } else {
3312 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3314 } else { /* register */
3315 rs2 = GET_FIELD(insn, 27, 31);
3316 gen_movl_reg_TN(rs2, cpu_src2);
3317 if (insn & (1 << 12)) {
3318 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3319 } else {
3320 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3322 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3324 gen_movl_TN_reg(rd, cpu_dst);
3325 } else if (xop == 0x26) { /* srl, V9 srlx */
3326 cpu_src1 = get_src1(insn, cpu_src1);
3327 if (IS_IMM) { /* immediate */
3328 simm = GET_FIELDs(insn, 20, 31);
3329 if (insn & (1 << 12)) {
3330 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3331 } else {
3332 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3333 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3335 } else { /* register */
3336 rs2 = GET_FIELD(insn, 27, 31);
3337 gen_movl_reg_TN(rs2, cpu_src2);
3338 if (insn & (1 << 12)) {
3339 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3340 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3341 } else {
3342 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3343 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3344 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3347 gen_movl_TN_reg(rd, cpu_dst);
3348 } else if (xop == 0x27) { /* sra, V9 srax */
3349 cpu_src1 = get_src1(insn, cpu_src1);
3350 if (IS_IMM) { /* immediate */
3351 simm = GET_FIELDs(insn, 20, 31);
3352 if (insn & (1 << 12)) {
3353 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3354 } else {
3355 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3356 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3357 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3359 } else { /* register */
3360 rs2 = GET_FIELD(insn, 27, 31);
3361 gen_movl_reg_TN(rs2, cpu_src2);
3362 if (insn & (1 << 12)) {
3363 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3364 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3365 } else {
3366 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3367 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3368 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3369 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3372 gen_movl_TN_reg(rd, cpu_dst);
3373 #endif
3374 } else if (xop < 0x36) {
3375 if (xop < 0x20) {
3376 cpu_src1 = get_src1(insn, cpu_src1);
3377 cpu_src2 = get_src2(insn, cpu_src2);
3378 switch (xop & ~0x10) {
3379 case 0x0: /* add */
3380 if (IS_IMM) {
3381 simm = GET_FIELDs(insn, 19, 31);
3382 if (xop & 0x10) {
3383 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3384 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3385 dc->cc_op = CC_OP_ADD;
3386 } else {
3387 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3389 } else {
3390 if (xop & 0x10) {
3391 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3392 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3393 dc->cc_op = CC_OP_ADD;
3394 } else {
3395 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3398 break;
3399 case 0x1: /* and */
3400 if (IS_IMM) {
3401 simm = GET_FIELDs(insn, 19, 31);
3402 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3403 } else {
3404 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3406 if (xop & 0x10) {
3407 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3408 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3409 dc->cc_op = CC_OP_LOGIC;
3411 break;
3412 case 0x2: /* or */
3413 if (IS_IMM) {
3414 simm = GET_FIELDs(insn, 19, 31);
3415 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3416 } else {
3417 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3419 if (xop & 0x10) {
3420 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3421 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3422 dc->cc_op = CC_OP_LOGIC;
3424 break;
3425 case 0x3: /* xor */
3426 if (IS_IMM) {
3427 simm = GET_FIELDs(insn, 19, 31);
3428 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3429 } else {
3430 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3432 if (xop & 0x10) {
3433 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3434 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3435 dc->cc_op = CC_OP_LOGIC;
3437 break;
3438 case 0x4: /* sub */
3439 if (IS_IMM) {
3440 simm = GET_FIELDs(insn, 19, 31);
3441 if (xop & 0x10) {
3442 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3443 } else {
3444 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3446 } else {
3447 if (xop & 0x10) {
3448 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3449 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3450 dc->cc_op = CC_OP_SUB;
3451 } else {
3452 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3455 break;
3456 case 0x5: /* andn */
3457 if (IS_IMM) {
3458 simm = GET_FIELDs(insn, 19, 31);
3459 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3460 } else {
3461 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3463 if (xop & 0x10) {
3464 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3465 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3466 dc->cc_op = CC_OP_LOGIC;
3468 break;
3469 case 0x6: /* orn */
3470 if (IS_IMM) {
3471 simm = GET_FIELDs(insn, 19, 31);
3472 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3473 } else {
3474 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3476 if (xop & 0x10) {
3477 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3478 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3479 dc->cc_op = CC_OP_LOGIC;
3481 break;
3482 case 0x7: /* xorn */
3483 if (IS_IMM) {
3484 simm = GET_FIELDs(insn, 19, 31);
3485 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3486 } else {
3487 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3488 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3490 if (xop & 0x10) {
3491 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3492 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3493 dc->cc_op = CC_OP_LOGIC;
3495 break;
3496 case 0x8: /* addx, V9 addc */
3497 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3498 (xop & 0x10));
3499 break;
3500 #ifdef TARGET_SPARC64
3501 case 0x9: /* V9 mulx */
3502 if (IS_IMM) {
3503 simm = GET_FIELDs(insn, 19, 31);
3504 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3505 } else {
3506 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3508 break;
3509 #endif
3510 case 0xa: /* umul */
3511 CHECK_IU_FEATURE(dc, MUL);
3512 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3513 if (xop & 0x10) {
3514 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3515 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3516 dc->cc_op = CC_OP_LOGIC;
3518 break;
3519 case 0xb: /* smul */
3520 CHECK_IU_FEATURE(dc, MUL);
3521 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3522 if (xop & 0x10) {
3523 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3524 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3525 dc->cc_op = CC_OP_LOGIC;
3527 break;
3528 case 0xc: /* subx, V9 subc */
3529 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3530 (xop & 0x10));
3531 break;
3532 #ifdef TARGET_SPARC64
3533 case 0xd: /* V9 udivx */
3534 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3535 break;
3536 #endif
3537 case 0xe: /* udiv */
3538 CHECK_IU_FEATURE(dc, DIV);
3539 if (xop & 0x10) {
3540 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3541 cpu_src2);
3542 dc->cc_op = CC_OP_DIV;
3543 } else {
3544 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3545 cpu_src2);
3547 break;
3548 case 0xf: /* sdiv */
3549 CHECK_IU_FEATURE(dc, DIV);
3550 if (xop & 0x10) {
3551 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3552 cpu_src2);
3553 dc->cc_op = CC_OP_DIV;
3554 } else {
3555 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3556 cpu_src2);
3558 break;
3559 default:
3560 goto illegal_insn;
3562 gen_movl_TN_reg(rd, cpu_dst);
3563 } else {
3564 cpu_src1 = get_src1(insn, cpu_src1);
3565 cpu_src2 = get_src2(insn, cpu_src2);
3566 switch (xop) {
3567 case 0x20: /* taddcc */
3568 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3569 gen_movl_TN_reg(rd, cpu_dst);
3570 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3571 dc->cc_op = CC_OP_TADD;
3572 break;
3573 case 0x21: /* tsubcc */
3574 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3575 gen_movl_TN_reg(rd, cpu_dst);
3576 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3577 dc->cc_op = CC_OP_TSUB;
3578 break;
3579 case 0x22: /* taddcctv */
3580 gen_helper_taddcctv(cpu_dst, cpu_env,
3581 cpu_src1, cpu_src2);
3582 gen_movl_TN_reg(rd, cpu_dst);
3583 dc->cc_op = CC_OP_TADDTV;
3584 break;
3585 case 0x23: /* tsubcctv */
3586 gen_helper_tsubcctv(cpu_dst, cpu_env,
3587 cpu_src1, cpu_src2);
3588 gen_movl_TN_reg(rd, cpu_dst);
3589 dc->cc_op = CC_OP_TSUBTV;
3590 break;
3591 case 0x24: /* mulscc */
3592 gen_helper_compute_psr(cpu_env);
3593 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3594 gen_movl_TN_reg(rd, cpu_dst);
3595 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3596 dc->cc_op = CC_OP_ADD;
3597 break;
3598 #ifndef TARGET_SPARC64
3599 case 0x25: /* sll */
3600 if (IS_IMM) { /* immediate */
3601 simm = GET_FIELDs(insn, 20, 31);
3602 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3603 } else { /* register */
3604 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3605 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3607 gen_movl_TN_reg(rd, cpu_dst);
3608 break;
3609 case 0x26: /* srl */
3610 if (IS_IMM) { /* immediate */
3611 simm = GET_FIELDs(insn, 20, 31);
3612 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3613 } else { /* register */
3614 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3615 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3617 gen_movl_TN_reg(rd, cpu_dst);
3618 break;
3619 case 0x27: /* sra */
3620 if (IS_IMM) { /* immediate */
3621 simm = GET_FIELDs(insn, 20, 31);
3622 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3623 } else { /* register */
3624 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3625 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3627 gen_movl_TN_reg(rd, cpu_dst);
3628 break;
3629 #endif
3630 case 0x30:
3632 switch(rd) {
3633 case 0: /* wry */
3634 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3635 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3636 break;
3637 #ifndef TARGET_SPARC64
3638 case 0x01 ... 0x0f: /* undefined in the
3639 SPARCv8 manual, nop
3640 on the microSPARC
3641 II */
3642 case 0x10 ... 0x1f: /* implementation-dependent
3643 in the SPARCv8
3644 manual, nop on the
3645 microSPARC II */
3646 break;
3647 #else
3648 case 0x2: /* V9 wrccr */
3649 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3650 gen_helper_wrccr(cpu_env, cpu_dst);
3651 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3652 dc->cc_op = CC_OP_FLAGS;
3653 break;
3654 case 0x3: /* V9 wrasi */
3655 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3656 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3657 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3658 break;
3659 case 0x6: /* V9 wrfprs */
3660 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3661 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3662 save_state(dc);
3663 gen_op_next_insn();
3664 tcg_gen_exit_tb(0);
3665 dc->is_br = 1;
3666 break;
3667 case 0xf: /* V9 sir, nop if user */
3668 #if !defined(CONFIG_USER_ONLY)
3669 if (supervisor(dc)) {
3670 ; // XXX
3672 #endif
3673 break;
3674 case 0x13: /* Graphics Status */
3675 if (gen_trap_ifnofpu(dc)) {
3676 goto jmp_insn;
3678 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3679 break;
3680 case 0x14: /* Softint set */
3681 if (!supervisor(dc))
3682 goto illegal_insn;
3683 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3684 gen_helper_set_softint(cpu_env, cpu_tmp64);
3685 break;
3686 case 0x15: /* Softint clear */
3687 if (!supervisor(dc))
3688 goto illegal_insn;
3689 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3690 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3691 break;
3692 case 0x16: /* Softint write */
3693 if (!supervisor(dc))
3694 goto illegal_insn;
3695 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3696 gen_helper_write_softint(cpu_env, cpu_tmp64);
3697 break;
3698 case 0x17: /* Tick compare */
3699 #if !defined(CONFIG_USER_ONLY)
3700 if (!supervisor(dc))
3701 goto illegal_insn;
3702 #endif
3704 TCGv_ptr r_tickptr;
3706 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3707 cpu_src2);
3708 r_tickptr = tcg_temp_new_ptr();
3709 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3710 offsetof(CPUSPARCState, tick));
3711 gen_helper_tick_set_limit(r_tickptr,
3712 cpu_tick_cmpr);
3713 tcg_temp_free_ptr(r_tickptr);
3715 break;
3716 case 0x18: /* System tick */
3717 #if !defined(CONFIG_USER_ONLY)
3718 if (!supervisor(dc))
3719 goto illegal_insn;
3720 #endif
3722 TCGv_ptr r_tickptr;
3724 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3725 cpu_src2);
3726 r_tickptr = tcg_temp_new_ptr();
3727 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3728 offsetof(CPUSPARCState, stick));
3729 gen_helper_tick_set_count(r_tickptr,
3730 cpu_dst);
3731 tcg_temp_free_ptr(r_tickptr);
3733 break;
3734 case 0x19: /* System tick compare */
3735 #if !defined(CONFIG_USER_ONLY)
3736 if (!supervisor(dc))
3737 goto illegal_insn;
3738 #endif
3740 TCGv_ptr r_tickptr;
3742 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3743 cpu_src2);
3744 r_tickptr = tcg_temp_new_ptr();
3745 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3746 offsetof(CPUSPARCState, stick));
3747 gen_helper_tick_set_limit(r_tickptr,
3748 cpu_stick_cmpr);
3749 tcg_temp_free_ptr(r_tickptr);
3751 break;
3753 case 0x10: /* Performance Control */
3754 case 0x11: /* Performance Instrumentation
3755 Counter */
3756 case 0x12: /* Dispatch Control */
3757 #endif
3758 default:
3759 goto illegal_insn;
3762 break;
3763 #if !defined(CONFIG_USER_ONLY)
3764 case 0x31: /* wrpsr, V9 saved, restored */
3766 if (!supervisor(dc))
3767 goto priv_insn;
3768 #ifdef TARGET_SPARC64
3769 switch (rd) {
3770 case 0:
3771 gen_helper_saved(cpu_env);
3772 break;
3773 case 1:
3774 gen_helper_restored(cpu_env);
3775 break;
3776 case 2: /* UA2005 allclean */
3777 case 3: /* UA2005 otherw */
3778 case 4: /* UA2005 normalw */
3779 case 5: /* UA2005 invalw */
3780 // XXX
3781 default:
3782 goto illegal_insn;
3784 #else
3785 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3786 gen_helper_wrpsr(cpu_env, cpu_dst);
3787 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3788 dc->cc_op = CC_OP_FLAGS;
3789 save_state(dc);
3790 gen_op_next_insn();
3791 tcg_gen_exit_tb(0);
3792 dc->is_br = 1;
3793 #endif
3795 break;
3796 case 0x32: /* wrwim, V9 wrpr */
3798 if (!supervisor(dc))
3799 goto priv_insn;
3800 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3801 #ifdef TARGET_SPARC64
3802 switch (rd) {
3803 case 0: // tpc
3805 TCGv_ptr r_tsptr;
3807 r_tsptr = tcg_temp_new_ptr();
3808 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3809 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3810 offsetof(trap_state, tpc));
3811 tcg_temp_free_ptr(r_tsptr);
3813 break;
3814 case 1: // tnpc
3816 TCGv_ptr r_tsptr;
3818 r_tsptr = tcg_temp_new_ptr();
3819 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3820 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3821 offsetof(trap_state, tnpc));
3822 tcg_temp_free_ptr(r_tsptr);
3824 break;
3825 case 2: // tstate
3827 TCGv_ptr r_tsptr;
3829 r_tsptr = tcg_temp_new_ptr();
3830 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3831 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3832 offsetof(trap_state,
3833 tstate));
3834 tcg_temp_free_ptr(r_tsptr);
3836 break;
3837 case 3: // tt
3839 TCGv_ptr r_tsptr;
3841 r_tsptr = tcg_temp_new_ptr();
3842 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3843 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3844 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3845 offsetof(trap_state, tt));
3846 tcg_temp_free_ptr(r_tsptr);
3848 break;
3849 case 4: // tick
3851 TCGv_ptr r_tickptr;
3853 r_tickptr = tcg_temp_new_ptr();
3854 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3855 offsetof(CPUSPARCState, tick));
3856 gen_helper_tick_set_count(r_tickptr,
3857 cpu_tmp0);
3858 tcg_temp_free_ptr(r_tickptr);
3860 break;
3861 case 5: // tba
3862 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3863 break;
3864 case 6: // pstate
3866 TCGv r_tmp = tcg_temp_local_new();
3868 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3869 save_state(dc);
3870 gen_helper_wrpstate(cpu_env, r_tmp);
3871 tcg_temp_free(r_tmp);
3872 dc->npc = DYNAMIC_PC;
3874 break;
3875 case 7: // tl
3877 TCGv r_tmp = tcg_temp_local_new();
3879 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3880 save_state(dc);
3881 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3882 tcg_temp_free(r_tmp);
3883 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3884 offsetof(CPUSPARCState, tl));
3885 dc->npc = DYNAMIC_PC;
3887 break;
3888 case 8: // pil
3889 gen_helper_wrpil(cpu_env, cpu_tmp0);
3890 break;
3891 case 9: // cwp
3892 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3893 break;
3894 case 10: // cansave
3895 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3896 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3897 offsetof(CPUSPARCState,
3898 cansave));
3899 break;
3900 case 11: // canrestore
3901 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3902 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3903 offsetof(CPUSPARCState,
3904 canrestore));
3905 break;
3906 case 12: // cleanwin
3907 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3908 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3909 offsetof(CPUSPARCState,
3910 cleanwin));
3911 break;
3912 case 13: // otherwin
3913 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3914 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3915 offsetof(CPUSPARCState,
3916 otherwin));
3917 break;
3918 case 14: // wstate
3919 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3920 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3921 offsetof(CPUSPARCState,
3922 wstate));
3923 break;
3924 case 16: // UA2005 gl
3925 CHECK_IU_FEATURE(dc, GL);
3926 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3927 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3928 offsetof(CPUSPARCState, gl));
3929 break;
3930 case 26: // UA2005 strand status
3931 CHECK_IU_FEATURE(dc, HYPV);
3932 if (!hypervisor(dc))
3933 goto priv_insn;
3934 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3935 break;
3936 default:
3937 goto illegal_insn;
3939 #else
3940 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3941 if (dc->def->nwindows != 32)
3942 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3943 (1 << dc->def->nwindows) - 1);
3944 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3945 #endif
3947 break;
3948 case 0x33: /* wrtbr, UA2005 wrhpr */
3950 #ifndef TARGET_SPARC64
3951 if (!supervisor(dc))
3952 goto priv_insn;
3953 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3954 #else
3955 CHECK_IU_FEATURE(dc, HYPV);
3956 if (!hypervisor(dc))
3957 goto priv_insn;
3958 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3959 switch (rd) {
3960 case 0: // hpstate
3961 // XXX gen_op_wrhpstate();
3962 save_state(dc);
3963 gen_op_next_insn();
3964 tcg_gen_exit_tb(0);
3965 dc->is_br = 1;
3966 break;
3967 case 1: // htstate
3968 // XXX gen_op_wrhtstate();
3969 break;
3970 case 3: // hintp
3971 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3972 break;
3973 case 5: // htba
3974 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3975 break;
3976 case 31: // hstick_cmpr
3978 TCGv_ptr r_tickptr;
3980 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3981 r_tickptr = tcg_temp_new_ptr();
3982 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3983 offsetof(CPUSPARCState, hstick));
3984 gen_helper_tick_set_limit(r_tickptr,
3985 cpu_hstick_cmpr);
3986 tcg_temp_free_ptr(r_tickptr);
3988 break;
3989 case 6: // hver readonly
3990 default:
3991 goto illegal_insn;
3993 #endif
3995 break;
3996 #endif
3997 #ifdef TARGET_SPARC64
3998 case 0x2c: /* V9 movcc */
4000 int cc = GET_FIELD_SP(insn, 11, 12);
4001 int cond = GET_FIELD_SP(insn, 14, 17);
4002 DisasCompare cmp;
4004 if (insn & (1 << 18)) {
4005 if (cc == 0) {
4006 gen_compare(&cmp, 0, cond, dc);
4007 } else if (cc == 2) {
4008 gen_compare(&cmp, 1, cond, dc);
4009 } else {
4010 goto illegal_insn;
4012 } else {
4013 gen_fcompare(&cmp, cc, cond);
4016 /* The get_src2 above loaded the normal 13-bit
4017 immediate field, not the 11-bit field we have
4018 in movcc. But it did handle the reg case. */
4019 if (IS_IMM) {
4020 simm = GET_FIELD_SPs(insn, 0, 10);
4021 tcg_gen_movi_tl(cpu_src2, simm);
4024 gen_movl_reg_TN(rd, cpu_dst);
4025 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
4026 cmp.c1, cmp.c2,
4027 cpu_src2, cpu_dst);
4028 free_compare(&cmp);
4029 gen_movl_TN_reg(rd, cpu_dst);
4030 break;
4032 case 0x2d: /* V9 sdivx */
4033 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4034 gen_movl_TN_reg(rd, cpu_dst);
4035 break;
4036 case 0x2e: /* V9 popc */
4038 cpu_src2 = get_src2(insn, cpu_src2);
4039 gen_helper_popc(cpu_dst, cpu_src2);
4040 gen_movl_TN_reg(rd, cpu_dst);
4042 case 0x2f: /* V9 movr */
4044 int cond = GET_FIELD_SP(insn, 10, 12);
4045 DisasCompare cmp;
4047 gen_compare_reg(&cmp, cond, cpu_src1);
4049 /* The get_src2 above loaded the normal 13-bit
4050 immediate field, not the 10-bit field we have
4051 in movr. But it did handle the reg case. */
4052 if (IS_IMM) {
4053 simm = GET_FIELD_SPs(insn, 0, 9);
4054 tcg_gen_movi_tl(cpu_src2, simm);
4057 gen_movl_reg_TN(rd, cpu_dst);
4058 tcg_gen_movcond_tl(cmp.cond, cpu_dst,
4059 cmp.c1, cmp.c2,
4060 cpu_src2, cpu_dst);
4061 free_compare(&cmp);
4062 gen_movl_TN_reg(rd, cpu_dst);
4063 break;
4065 #endif
4066 default:
4067 goto illegal_insn;
4070 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4071 #ifdef TARGET_SPARC64
4072 int opf = GET_FIELD_SP(insn, 5, 13);
4073 rs1 = GET_FIELD(insn, 13, 17);
4074 rs2 = GET_FIELD(insn, 27, 31);
4075 if (gen_trap_ifnofpu(dc)) {
4076 goto jmp_insn;
4079 switch (opf) {
4080 case 0x000: /* VIS I edge8cc */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 gen_movl_reg_TN(rs1, cpu_src1);
4083 gen_movl_reg_TN(rs2, cpu_src2);
4084 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4085 gen_movl_TN_reg(rd, cpu_dst);
4086 break;
4087 case 0x001: /* VIS II edge8n */
4088 CHECK_FPU_FEATURE(dc, VIS2);
4089 gen_movl_reg_TN(rs1, cpu_src1);
4090 gen_movl_reg_TN(rs2, cpu_src2);
4091 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4092 gen_movl_TN_reg(rd, cpu_dst);
4093 break;
4094 case 0x002: /* VIS I edge8lcc */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 gen_movl_reg_TN(rs1, cpu_src1);
4097 gen_movl_reg_TN(rs2, cpu_src2);
4098 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4099 gen_movl_TN_reg(rd, cpu_dst);
4100 break;
4101 case 0x003: /* VIS II edge8ln */
4102 CHECK_FPU_FEATURE(dc, VIS2);
4103 gen_movl_reg_TN(rs1, cpu_src1);
4104 gen_movl_reg_TN(rs2, cpu_src2);
4105 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4106 gen_movl_TN_reg(rd, cpu_dst);
4107 break;
4108 case 0x004: /* VIS I edge16cc */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 gen_movl_reg_TN(rs1, cpu_src1);
4111 gen_movl_reg_TN(rs2, cpu_src2);
4112 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4113 gen_movl_TN_reg(rd, cpu_dst);
4114 break;
4115 case 0x005: /* VIS II edge16n */
4116 CHECK_FPU_FEATURE(dc, VIS2);
4117 gen_movl_reg_TN(rs1, cpu_src1);
4118 gen_movl_reg_TN(rs2, cpu_src2);
4119 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4120 gen_movl_TN_reg(rd, cpu_dst);
4121 break;
4122 case 0x006: /* VIS I edge16lcc */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 gen_movl_reg_TN(rs1, cpu_src1);
4125 gen_movl_reg_TN(rs2, cpu_src2);
4126 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4127 gen_movl_TN_reg(rd, cpu_dst);
4128 break;
4129 case 0x007: /* VIS II edge16ln */
4130 CHECK_FPU_FEATURE(dc, VIS2);
4131 gen_movl_reg_TN(rs1, cpu_src1);
4132 gen_movl_reg_TN(rs2, cpu_src2);
4133 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4134 gen_movl_TN_reg(rd, cpu_dst);
4135 break;
4136 case 0x008: /* VIS I edge32cc */
4137 CHECK_FPU_FEATURE(dc, VIS1);
4138 gen_movl_reg_TN(rs1, cpu_src1);
4139 gen_movl_reg_TN(rs2, cpu_src2);
4140 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4141 gen_movl_TN_reg(rd, cpu_dst);
4142 break;
4143 case 0x009: /* VIS II edge32n */
4144 CHECK_FPU_FEATURE(dc, VIS2);
4145 gen_movl_reg_TN(rs1, cpu_src1);
4146 gen_movl_reg_TN(rs2, cpu_src2);
4147 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4148 gen_movl_TN_reg(rd, cpu_dst);
4149 break;
4150 case 0x00a: /* VIS I edge32lcc */
4151 CHECK_FPU_FEATURE(dc, VIS1);
4152 gen_movl_reg_TN(rs1, cpu_src1);
4153 gen_movl_reg_TN(rs2, cpu_src2);
4154 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4155 gen_movl_TN_reg(rd, cpu_dst);
4156 break;
4157 case 0x00b: /* VIS II edge32ln */
4158 CHECK_FPU_FEATURE(dc, VIS2);
4159 gen_movl_reg_TN(rs1, cpu_src1);
4160 gen_movl_reg_TN(rs2, cpu_src2);
4161 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4162 gen_movl_TN_reg(rd, cpu_dst);
4163 break;
4164 case 0x010: /* VIS I array8 */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 cpu_src1 = get_src1(insn, cpu_src1);
4167 gen_movl_reg_TN(rs2, cpu_src2);
4168 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4169 gen_movl_TN_reg(rd, cpu_dst);
4170 break;
4171 case 0x012: /* VIS I array16 */
4172 CHECK_FPU_FEATURE(dc, VIS1);
4173 cpu_src1 = get_src1(insn, cpu_src1);
4174 gen_movl_reg_TN(rs2, cpu_src2);
4175 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4176 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4177 gen_movl_TN_reg(rd, cpu_dst);
4178 break;
4179 case 0x014: /* VIS I array32 */
4180 CHECK_FPU_FEATURE(dc, VIS1);
4181 cpu_src1 = get_src1(insn, cpu_src1);
4182 gen_movl_reg_TN(rs2, cpu_src2);
4183 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4184 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4185 gen_movl_TN_reg(rd, cpu_dst);
4186 break;
4187 case 0x018: /* VIS I alignaddr */
4188 CHECK_FPU_FEATURE(dc, VIS1);
4189 cpu_src1 = get_src1(insn, cpu_src1);
4190 gen_movl_reg_TN(rs2, cpu_src2);
4191 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4192 gen_movl_TN_reg(rd, cpu_dst);
4193 break;
4194 case 0x01a: /* VIS I alignaddrl */
4195 CHECK_FPU_FEATURE(dc, VIS1);
4196 cpu_src1 = get_src1(insn, cpu_src1);
4197 gen_movl_reg_TN(rs2, cpu_src2);
4198 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4199 gen_movl_TN_reg(rd, cpu_dst);
4200 break;
4201 case 0x019: /* VIS II bmask */
4202 CHECK_FPU_FEATURE(dc, VIS2);
4203 cpu_src1 = get_src1(insn, cpu_src1);
4204 cpu_src2 = get_src1(insn, cpu_src2);
4205 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4206 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4207 gen_movl_TN_reg(rd, cpu_dst);
4208 break;
4209 case 0x020: /* VIS I fcmple16 */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4212 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4213 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4214 gen_movl_TN_reg(rd, cpu_dst);
4215 break;
4216 case 0x022: /* VIS I fcmpne16 */
4217 CHECK_FPU_FEATURE(dc, VIS1);
4218 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4219 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4220 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4221 gen_movl_TN_reg(rd, cpu_dst);
4222 break;
4223 case 0x024: /* VIS I fcmple32 */
4224 CHECK_FPU_FEATURE(dc, VIS1);
4225 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4226 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4227 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4228 gen_movl_TN_reg(rd, cpu_dst);
4229 break;
4230 case 0x026: /* VIS I fcmpne32 */
4231 CHECK_FPU_FEATURE(dc, VIS1);
4232 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4233 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4234 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4235 gen_movl_TN_reg(rd, cpu_dst);
4236 break;
4237 case 0x028: /* VIS I fcmpgt16 */
4238 CHECK_FPU_FEATURE(dc, VIS1);
4239 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4240 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4241 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4242 gen_movl_TN_reg(rd, cpu_dst);
4243 break;
4244 case 0x02a: /* VIS I fcmpeq16 */
4245 CHECK_FPU_FEATURE(dc, VIS1);
4246 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4247 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4248 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4249 gen_movl_TN_reg(rd, cpu_dst);
4250 break;
4251 case 0x02c: /* VIS I fcmpgt32 */
4252 CHECK_FPU_FEATURE(dc, VIS1);
4253 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4254 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4255 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4256 gen_movl_TN_reg(rd, cpu_dst);
4257 break;
4258 case 0x02e: /* VIS I fcmpeq32 */
4259 CHECK_FPU_FEATURE(dc, VIS1);
4260 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4261 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4262 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4263 gen_movl_TN_reg(rd, cpu_dst);
4264 break;
4265 case 0x031: /* VIS I fmul8x16 */
4266 CHECK_FPU_FEATURE(dc, VIS1);
4267 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4268 break;
4269 case 0x033: /* VIS I fmul8x16au */
4270 CHECK_FPU_FEATURE(dc, VIS1);
4271 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4272 break;
4273 case 0x035: /* VIS I fmul8x16al */
4274 CHECK_FPU_FEATURE(dc, VIS1);
4275 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4276 break;
4277 case 0x036: /* VIS I fmul8sux16 */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4280 break;
4281 case 0x037: /* VIS I fmul8ulx16 */
4282 CHECK_FPU_FEATURE(dc, VIS1);
4283 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4284 break;
4285 case 0x038: /* VIS I fmuld8sux16 */
4286 CHECK_FPU_FEATURE(dc, VIS1);
4287 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4288 break;
4289 case 0x039: /* VIS I fmuld8ulx16 */
4290 CHECK_FPU_FEATURE(dc, VIS1);
4291 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4292 break;
4293 case 0x03a: /* VIS I fpack32 */
4294 CHECK_FPU_FEATURE(dc, VIS1);
4295 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4296 break;
4297 case 0x03b: /* VIS I fpack16 */
4298 CHECK_FPU_FEATURE(dc, VIS1);
4299 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4300 cpu_dst_32 = gen_dest_fpr_F();
4301 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4302 gen_store_fpr_F(dc, rd, cpu_dst_32);
4303 break;
4304 case 0x03d: /* VIS I fpackfix */
4305 CHECK_FPU_FEATURE(dc, VIS1);
4306 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4307 cpu_dst_32 = gen_dest_fpr_F();
4308 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4309 gen_store_fpr_F(dc, rd, cpu_dst_32);
4310 break;
4311 case 0x03e: /* VIS I pdist */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4314 break;
4315 case 0x048: /* VIS I faligndata */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4318 break;
4319 case 0x04b: /* VIS I fpmerge */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4322 break;
4323 case 0x04c: /* VIS II bshuffle */
4324 CHECK_FPU_FEATURE(dc, VIS2);
4325 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4326 break;
4327 case 0x04d: /* VIS I fexpand */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4330 break;
4331 case 0x050: /* VIS I fpadd16 */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4334 break;
4335 case 0x051: /* VIS I fpadd16s */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4338 break;
4339 case 0x052: /* VIS I fpadd32 */
4340 CHECK_FPU_FEATURE(dc, VIS1);
4341 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4342 break;
4343 case 0x053: /* VIS I fpadd32s */
4344 CHECK_FPU_FEATURE(dc, VIS1);
4345 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4346 break;
4347 case 0x054: /* VIS I fpsub16 */
4348 CHECK_FPU_FEATURE(dc, VIS1);
4349 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4350 break;
4351 case 0x055: /* VIS I fpsub16s */
4352 CHECK_FPU_FEATURE(dc, VIS1);
4353 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4354 break;
4355 case 0x056: /* VIS I fpsub32 */
4356 CHECK_FPU_FEATURE(dc, VIS1);
4357 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4358 break;
4359 case 0x057: /* VIS I fpsub32s */
4360 CHECK_FPU_FEATURE(dc, VIS1);
4361 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4362 break;
4363 case 0x060: /* VIS I fzero */
4364 CHECK_FPU_FEATURE(dc, VIS1);
4365 cpu_dst_64 = gen_dest_fpr_D();
4366 tcg_gen_movi_i64(cpu_dst_64, 0);
4367 gen_store_fpr_D(dc, rd, cpu_dst_64);
4368 break;
4369 case 0x061: /* VIS I fzeros */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 cpu_dst_32 = gen_dest_fpr_F();
4372 tcg_gen_movi_i32(cpu_dst_32, 0);
4373 gen_store_fpr_F(dc, rd, cpu_dst_32);
4374 break;
4375 case 0x062: /* VIS I fnor */
4376 CHECK_FPU_FEATURE(dc, VIS1);
4377 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4378 break;
4379 case 0x063: /* VIS I fnors */
4380 CHECK_FPU_FEATURE(dc, VIS1);
4381 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4382 break;
4383 case 0x064: /* VIS I fandnot2 */
4384 CHECK_FPU_FEATURE(dc, VIS1);
4385 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4386 break;
4387 case 0x065: /* VIS I fandnot2s */
4388 CHECK_FPU_FEATURE(dc, VIS1);
4389 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4390 break;
4391 case 0x066: /* VIS I fnot2 */
4392 CHECK_FPU_FEATURE(dc, VIS1);
4393 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4394 break;
4395 case 0x067: /* VIS I fnot2s */
4396 CHECK_FPU_FEATURE(dc, VIS1);
4397 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4398 break;
4399 case 0x068: /* VIS I fandnot1 */
4400 CHECK_FPU_FEATURE(dc, VIS1);
4401 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4402 break;
4403 case 0x069: /* VIS I fandnot1s */
4404 CHECK_FPU_FEATURE(dc, VIS1);
4405 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4406 break;
4407 case 0x06a: /* VIS I fnot1 */
4408 CHECK_FPU_FEATURE(dc, VIS1);
4409 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4410 break;
4411 case 0x06b: /* VIS I fnot1s */
4412 CHECK_FPU_FEATURE(dc, VIS1);
4413 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4414 break;
4415 case 0x06c: /* VIS I fxor */
4416 CHECK_FPU_FEATURE(dc, VIS1);
4417 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4418 break;
4419 case 0x06d: /* VIS I fxors */
4420 CHECK_FPU_FEATURE(dc, VIS1);
4421 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4422 break;
4423 case 0x06e: /* VIS I fnand */
4424 CHECK_FPU_FEATURE(dc, VIS1);
4425 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4426 break;
4427 case 0x06f: /* VIS I fnands */
4428 CHECK_FPU_FEATURE(dc, VIS1);
4429 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4430 break;
4431 case 0x070: /* VIS I fand */
4432 CHECK_FPU_FEATURE(dc, VIS1);
4433 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4434 break;
4435 case 0x071: /* VIS I fands */
4436 CHECK_FPU_FEATURE(dc, VIS1);
4437 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4438 break;
4439 case 0x072: /* VIS I fxnor */
4440 CHECK_FPU_FEATURE(dc, VIS1);
4441 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4442 break;
4443 case 0x073: /* VIS I fxnors */
4444 CHECK_FPU_FEATURE(dc, VIS1);
4445 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4446 break;
4447 case 0x074: /* VIS I fsrc1 */
4448 CHECK_FPU_FEATURE(dc, VIS1);
4449 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4450 gen_store_fpr_D(dc, rd, cpu_src1_64);
4451 break;
4452 case 0x075: /* VIS I fsrc1s */
4453 CHECK_FPU_FEATURE(dc, VIS1);
4454 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4455 gen_store_fpr_F(dc, rd, cpu_src1_32);
4456 break;
4457 case 0x076: /* VIS I fornot2 */
4458 CHECK_FPU_FEATURE(dc, VIS1);
4459 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4460 break;
4461 case 0x077: /* VIS I fornot2s */
4462 CHECK_FPU_FEATURE(dc, VIS1);
4463 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4464 break;
4465 case 0x078: /* VIS I fsrc2 */
4466 CHECK_FPU_FEATURE(dc, VIS1);
4467 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4468 gen_store_fpr_D(dc, rd, cpu_src1_64);
4469 break;
4470 case 0x079: /* VIS I fsrc2s */
4471 CHECK_FPU_FEATURE(dc, VIS1);
4472 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4473 gen_store_fpr_F(dc, rd, cpu_src1_32);
4474 break;
4475 case 0x07a: /* VIS I fornot1 */
4476 CHECK_FPU_FEATURE(dc, VIS1);
4477 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4478 break;
4479 case 0x07b: /* VIS I fornot1s */
4480 CHECK_FPU_FEATURE(dc, VIS1);
4481 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4482 break;
4483 case 0x07c: /* VIS I for */
4484 CHECK_FPU_FEATURE(dc, VIS1);
4485 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4486 break;
4487 case 0x07d: /* VIS I fors */
4488 CHECK_FPU_FEATURE(dc, VIS1);
4489 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4490 break;
4491 case 0x07e: /* VIS I fone */
4492 CHECK_FPU_FEATURE(dc, VIS1);
4493 cpu_dst_64 = gen_dest_fpr_D();
4494 tcg_gen_movi_i64(cpu_dst_64, -1);
4495 gen_store_fpr_D(dc, rd, cpu_dst_64);
4496 break;
4497 case 0x07f: /* VIS I fones */
4498 CHECK_FPU_FEATURE(dc, VIS1);
4499 cpu_dst_32 = gen_dest_fpr_F();
4500 tcg_gen_movi_i32(cpu_dst_32, -1);
4501 gen_store_fpr_F(dc, rd, cpu_dst_32);
4502 break;
4503 case 0x080: /* VIS I shutdown */
4504 case 0x081: /* VIS II siam */
4505 // XXX
4506 goto illegal_insn;
4507 default:
4508 goto illegal_insn;
4510 #else
4511 goto ncp_insn;
4512 #endif
4513 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4514 #ifdef TARGET_SPARC64
4515 goto illegal_insn;
4516 #else
4517 goto ncp_insn;
4518 #endif
4519 #ifdef TARGET_SPARC64
4520 } else if (xop == 0x39) { /* V9 return */
4521 TCGv_i32 r_const;
4523 save_state(dc);
4524 cpu_src1 = get_src1(insn, cpu_src1);
4525 if (IS_IMM) { /* immediate */
4526 simm = GET_FIELDs(insn, 19, 31);
4527 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4528 } else { /* register */
4529 rs2 = GET_FIELD(insn, 27, 31);
4530 if (rs2) {
4531 gen_movl_reg_TN(rs2, cpu_src2);
4532 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4533 } else
4534 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4536 gen_helper_restore(cpu_env);
4537 gen_mov_pc_npc(dc);
4538 r_const = tcg_const_i32(3);
4539 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4540 tcg_temp_free_i32(r_const);
4541 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4542 dc->npc = DYNAMIC_PC;
4543 goto jmp_insn;
4544 #endif
4545 } else {
4546 cpu_src1 = get_src1(insn, cpu_src1);
4547 if (IS_IMM) { /* immediate */
4548 simm = GET_FIELDs(insn, 19, 31);
4549 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4550 } else { /* register */
4551 rs2 = GET_FIELD(insn, 27, 31);
4552 if (rs2) {
4553 gen_movl_reg_TN(rs2, cpu_src2);
4554 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4555 } else
4556 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4558 switch (xop) {
4559 case 0x38: /* jmpl */
4561 TCGv r_pc;
4562 TCGv_i32 r_const;
4564 r_pc = tcg_const_tl(dc->pc);
4565 gen_movl_TN_reg(rd, r_pc);
4566 tcg_temp_free(r_pc);
4567 gen_mov_pc_npc(dc);
4568 r_const = tcg_const_i32(3);
4569 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4570 tcg_temp_free_i32(r_const);
4571 gen_address_mask(dc, cpu_dst);
4572 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4573 dc->npc = DYNAMIC_PC;
4575 goto jmp_insn;
4576 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4577 case 0x39: /* rett, V9 return */
4579 TCGv_i32 r_const;
4581 if (!supervisor(dc))
4582 goto priv_insn;
4583 gen_mov_pc_npc(dc);
4584 r_const = tcg_const_i32(3);
4585 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4586 tcg_temp_free_i32(r_const);
4587 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4588 dc->npc = DYNAMIC_PC;
4589 gen_helper_rett(cpu_env);
4591 goto jmp_insn;
4592 #endif
4593 case 0x3b: /* flush */
4594 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4595 goto unimp_flush;
4596 /* nop */
4597 break;
4598 case 0x3c: /* save */
4599 save_state(dc);
4600 gen_helper_save(cpu_env);
4601 gen_movl_TN_reg(rd, cpu_dst);
4602 break;
4603 case 0x3d: /* restore */
4604 save_state(dc);
4605 gen_helper_restore(cpu_env);
4606 gen_movl_TN_reg(rd, cpu_dst);
4607 break;
4608 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4609 case 0x3e: /* V9 done/retry */
4611 switch (rd) {
4612 case 0:
4613 if (!supervisor(dc))
4614 goto priv_insn;
4615 dc->npc = DYNAMIC_PC;
4616 dc->pc = DYNAMIC_PC;
4617 gen_helper_done(cpu_env);
4618 goto jmp_insn;
4619 case 1:
4620 if (!supervisor(dc))
4621 goto priv_insn;
4622 dc->npc = DYNAMIC_PC;
4623 dc->pc = DYNAMIC_PC;
4624 gen_helper_retry(cpu_env);
4625 goto jmp_insn;
4626 default:
4627 goto illegal_insn;
4630 break;
4631 #endif
4632 default:
4633 goto illegal_insn;
4636 break;
4638 break;
4639 case 3: /* load/store instructions */
4641 unsigned int xop = GET_FIELD(insn, 7, 12);
4643 /* flush pending conditional evaluations before exposing
4644 cpu state */
4645 if (dc->cc_op != CC_OP_FLAGS) {
4646 dc->cc_op = CC_OP_FLAGS;
4647 gen_helper_compute_psr(cpu_env);
4649 cpu_src1 = get_src1(insn, cpu_src1);
4650 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4651 rs2 = GET_FIELD(insn, 27, 31);
4652 gen_movl_reg_TN(rs2, cpu_src2);
4653 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4654 } else if (IS_IMM) { /* immediate */
4655 simm = GET_FIELDs(insn, 19, 31);
4656 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4657 } else { /* register */
4658 rs2 = GET_FIELD(insn, 27, 31);
4659 if (rs2 != 0) {
4660 gen_movl_reg_TN(rs2, cpu_src2);
4661 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4662 } else
4663 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4665 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4666 (xop > 0x17 && xop <= 0x1d ) ||
4667 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4668 switch (xop) {
4669 case 0x0: /* ld, V9 lduw, load unsigned word */
4670 gen_address_mask(dc, cpu_addr);
4671 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4672 break;
4673 case 0x1: /* ldub, load unsigned byte */
4674 gen_address_mask(dc, cpu_addr);
4675 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4676 break;
4677 case 0x2: /* lduh, load unsigned halfword */
4678 gen_address_mask(dc, cpu_addr);
4679 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4680 break;
4681 case 0x3: /* ldd, load double word */
4682 if (rd & 1)
4683 goto illegal_insn;
4684 else {
4685 TCGv_i32 r_const;
4687 save_state(dc);
4688 r_const = tcg_const_i32(7);
4689 /* XXX remove alignment check */
4690 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4691 tcg_temp_free_i32(r_const);
4692 gen_address_mask(dc, cpu_addr);
4693 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4694 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4695 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4696 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4697 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4698 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4699 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4701 break;
4702 case 0x9: /* ldsb, load signed byte */
4703 gen_address_mask(dc, cpu_addr);
4704 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4705 break;
4706 case 0xa: /* ldsh, load signed halfword */
4707 gen_address_mask(dc, cpu_addr);
4708 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4709 break;
4710 case 0xd: /* ldstub -- XXX: should be atomically */
4712 TCGv r_const;
4714 gen_address_mask(dc, cpu_addr);
4715 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4716 r_const = tcg_const_tl(0xff);
4717 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4718 tcg_temp_free(r_const);
4720 break;
4721 case 0x0f: /* swap, swap register with memory. Also
4722 atomically */
4723 CHECK_IU_FEATURE(dc, SWAP);
4724 gen_movl_reg_TN(rd, cpu_val);
4725 gen_address_mask(dc, cpu_addr);
4726 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4727 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4728 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4729 break;
4730 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4731 case 0x10: /* lda, V9 lduwa, load word alternate */
4732 #ifndef TARGET_SPARC64
4733 if (IS_IMM)
4734 goto illegal_insn;
4735 if (!supervisor(dc))
4736 goto priv_insn;
4737 #endif
4738 save_state(dc);
4739 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4740 break;
4741 case 0x11: /* lduba, load unsigned byte alternate */
4742 #ifndef TARGET_SPARC64
4743 if (IS_IMM)
4744 goto illegal_insn;
4745 if (!supervisor(dc))
4746 goto priv_insn;
4747 #endif
4748 save_state(dc);
4749 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4750 break;
4751 case 0x12: /* lduha, load unsigned halfword alternate */
4752 #ifndef TARGET_SPARC64
4753 if (IS_IMM)
4754 goto illegal_insn;
4755 if (!supervisor(dc))
4756 goto priv_insn;
4757 #endif
4758 save_state(dc);
4759 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4760 break;
4761 case 0x13: /* ldda, load double word alternate */
4762 #ifndef TARGET_SPARC64
4763 if (IS_IMM)
4764 goto illegal_insn;
4765 if (!supervisor(dc))
4766 goto priv_insn;
4767 #endif
4768 if (rd & 1)
4769 goto illegal_insn;
4770 save_state(dc);
4771 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4772 goto skip_move;
4773 case 0x19: /* ldsba, load signed byte alternate */
4774 #ifndef TARGET_SPARC64
4775 if (IS_IMM)
4776 goto illegal_insn;
4777 if (!supervisor(dc))
4778 goto priv_insn;
4779 #endif
4780 save_state(dc);
4781 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4782 break;
4783 case 0x1a: /* ldsha, load signed halfword alternate */
4784 #ifndef TARGET_SPARC64
4785 if (IS_IMM)
4786 goto illegal_insn;
4787 if (!supervisor(dc))
4788 goto priv_insn;
4789 #endif
4790 save_state(dc);
4791 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4792 break;
4793 case 0x1d: /* ldstuba -- XXX: should be atomically */
4794 #ifndef TARGET_SPARC64
4795 if (IS_IMM)
4796 goto illegal_insn;
4797 if (!supervisor(dc))
4798 goto priv_insn;
4799 #endif
4800 save_state(dc);
4801 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4802 break;
4803 case 0x1f: /* swapa, swap reg with alt. memory. Also
4804 atomically */
4805 CHECK_IU_FEATURE(dc, SWAP);
4806 #ifndef TARGET_SPARC64
4807 if (IS_IMM)
4808 goto illegal_insn;
4809 if (!supervisor(dc))
4810 goto priv_insn;
4811 #endif
4812 save_state(dc);
4813 gen_movl_reg_TN(rd, cpu_val);
4814 gen_swap_asi(cpu_val, cpu_addr, insn);
4815 break;
4817 #ifndef TARGET_SPARC64
4818 case 0x30: /* ldc */
4819 case 0x31: /* ldcsr */
4820 case 0x33: /* lddc */
4821 goto ncp_insn;
4822 #endif
4823 #endif
4824 #ifdef TARGET_SPARC64
4825 case 0x08: /* V9 ldsw */
4826 gen_address_mask(dc, cpu_addr);
4827 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4828 break;
4829 case 0x0b: /* V9 ldx */
4830 gen_address_mask(dc, cpu_addr);
4831 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4832 break;
4833 case 0x18: /* V9 ldswa */
4834 save_state(dc);
4835 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4836 break;
4837 case 0x1b: /* V9 ldxa */
4838 save_state(dc);
4839 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4840 break;
4841 case 0x2d: /* V9 prefetch, no effect */
4842 goto skip_move;
4843 case 0x30: /* V9 ldfa */
4844 if (gen_trap_ifnofpu(dc)) {
4845 goto jmp_insn;
4847 save_state(dc);
4848 gen_ldf_asi(cpu_addr, insn, 4, rd);
4849 gen_update_fprs_dirty(rd);
4850 goto skip_move;
4851 case 0x33: /* V9 lddfa */
4852 if (gen_trap_ifnofpu(dc)) {
4853 goto jmp_insn;
4855 save_state(dc);
4856 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4857 gen_update_fprs_dirty(DFPREG(rd));
4858 goto skip_move;
4859 case 0x3d: /* V9 prefetcha, no effect */
4860 goto skip_move;
4861 case 0x32: /* V9 ldqfa */
4862 CHECK_FPU_FEATURE(dc, FLOAT128);
4863 if (gen_trap_ifnofpu(dc)) {
4864 goto jmp_insn;
4866 save_state(dc);
4867 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4868 gen_update_fprs_dirty(QFPREG(rd));
4869 goto skip_move;
4870 #endif
4871 default:
4872 goto illegal_insn;
4874 gen_movl_TN_reg(rd, cpu_val);
4875 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4876 skip_move: ;
4877 #endif
4878 } else if (xop >= 0x20 && xop < 0x24) {
4879 if (gen_trap_ifnofpu(dc)) {
4880 goto jmp_insn;
4882 save_state(dc);
4883 switch (xop) {
4884 case 0x20: /* ldf, load fpreg */
4885 gen_address_mask(dc, cpu_addr);
4886 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4887 cpu_dst_32 = gen_dest_fpr_F();
4888 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4889 gen_store_fpr_F(dc, rd, cpu_dst_32);
4890 break;
4891 case 0x21: /* ldfsr, V9 ldxfsr */
4892 #ifdef TARGET_SPARC64
4893 gen_address_mask(dc, cpu_addr);
4894 if (rd == 1) {
4895 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4896 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4897 } else {
4898 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4899 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4900 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4902 #else
4904 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4905 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4907 #endif
4908 break;
4909 case 0x22: /* ldqf, load quad fpreg */
4911 TCGv_i32 r_const;
4913 CHECK_FPU_FEATURE(dc, FLOAT128);
4914 r_const = tcg_const_i32(dc->mem_idx);
4915 gen_address_mask(dc, cpu_addr);
4916 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4917 tcg_temp_free_i32(r_const);
4918 gen_op_store_QT0_fpr(QFPREG(rd));
4919 gen_update_fprs_dirty(QFPREG(rd));
4921 break;
4922 case 0x23: /* lddf, load double fpreg */
4923 gen_address_mask(dc, cpu_addr);
4924 cpu_dst_64 = gen_dest_fpr_D();
4925 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4926 gen_store_fpr_D(dc, rd, cpu_dst_64);
4927 break;
4928 default:
4929 goto illegal_insn;
4931 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4932 xop == 0xe || xop == 0x1e) {
4933 gen_movl_reg_TN(rd, cpu_val);
4934 switch (xop) {
4935 case 0x4: /* st, store word */
4936 gen_address_mask(dc, cpu_addr);
4937 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4938 break;
4939 case 0x5: /* stb, store byte */
4940 gen_address_mask(dc, cpu_addr);
4941 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4942 break;
4943 case 0x6: /* sth, store halfword */
4944 gen_address_mask(dc, cpu_addr);
4945 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4946 break;
4947 case 0x7: /* std, store double word */
4948 if (rd & 1)
4949 goto illegal_insn;
4950 else {
4951 TCGv_i32 r_const;
4953 save_state(dc);
4954 gen_address_mask(dc, cpu_addr);
4955 r_const = tcg_const_i32(7);
4956 /* XXX remove alignment check */
4957 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4958 tcg_temp_free_i32(r_const);
4959 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4960 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4961 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4963 break;
4964 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4965 case 0x14: /* sta, V9 stwa, store word alternate */
4966 #ifndef TARGET_SPARC64
4967 if (IS_IMM)
4968 goto illegal_insn;
4969 if (!supervisor(dc))
4970 goto priv_insn;
4971 #endif
4972 save_state(dc);
4973 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4974 dc->npc = DYNAMIC_PC;
4975 break;
4976 case 0x15: /* stba, store byte alternate */
4977 #ifndef TARGET_SPARC64
4978 if (IS_IMM)
4979 goto illegal_insn;
4980 if (!supervisor(dc))
4981 goto priv_insn;
4982 #endif
4983 save_state(dc);
4984 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4985 dc->npc = DYNAMIC_PC;
4986 break;
4987 case 0x16: /* stha, store halfword alternate */
4988 #ifndef TARGET_SPARC64
4989 if (IS_IMM)
4990 goto illegal_insn;
4991 if (!supervisor(dc))
4992 goto priv_insn;
4993 #endif
4994 save_state(dc);
4995 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4996 dc->npc = DYNAMIC_PC;
4997 break;
4998 case 0x17: /* stda, store double word alternate */
4999 #ifndef TARGET_SPARC64
5000 if (IS_IMM)
5001 goto illegal_insn;
5002 if (!supervisor(dc))
5003 goto priv_insn;
5004 #endif
5005 if (rd & 1)
5006 goto illegal_insn;
5007 else {
5008 save_state(dc);
5009 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5011 break;
5012 #endif
5013 #ifdef TARGET_SPARC64
5014 case 0x0e: /* V9 stx */
5015 gen_address_mask(dc, cpu_addr);
5016 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5017 break;
5018 case 0x1e: /* V9 stxa */
5019 save_state(dc);
5020 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5021 dc->npc = DYNAMIC_PC;
5022 break;
5023 #endif
5024 default:
5025 goto illegal_insn;
5027 } else if (xop > 0x23 && xop < 0x28) {
5028 if (gen_trap_ifnofpu(dc)) {
5029 goto jmp_insn;
5031 save_state(dc);
5032 switch (xop) {
5033 case 0x24: /* stf, store fpreg */
5034 gen_address_mask(dc, cpu_addr);
5035 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5036 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5037 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5038 break;
5039 case 0x25: /* stfsr, V9 stxfsr */
5040 #ifdef TARGET_SPARC64
5041 gen_address_mask(dc, cpu_addr);
5042 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5043 if (rd == 1)
5044 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5045 else
5046 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5047 #else
5048 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5049 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5050 #endif
5051 break;
5052 case 0x26:
5053 #ifdef TARGET_SPARC64
5054 /* V9 stqf, store quad fpreg */
5056 TCGv_i32 r_const;
5058 CHECK_FPU_FEATURE(dc, FLOAT128);
5059 gen_op_load_fpr_QT0(QFPREG(rd));
5060 r_const = tcg_const_i32(dc->mem_idx);
5061 gen_address_mask(dc, cpu_addr);
5062 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5063 tcg_temp_free_i32(r_const);
5065 break;
5066 #else /* !TARGET_SPARC64 */
5067 /* stdfq, store floating point queue */
5068 #if defined(CONFIG_USER_ONLY)
5069 goto illegal_insn;
5070 #else
5071 if (!supervisor(dc))
5072 goto priv_insn;
5073 if (gen_trap_ifnofpu(dc)) {
5074 goto jmp_insn;
5076 goto nfq_insn;
5077 #endif
5078 #endif
5079 case 0x27: /* stdf, store double fpreg */
5080 gen_address_mask(dc, cpu_addr);
5081 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5082 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5083 break;
5084 default:
5085 goto illegal_insn;
5087 } else if (xop > 0x33 && xop < 0x3f) {
5088 save_state(dc);
5089 switch (xop) {
5090 #ifdef TARGET_SPARC64
5091 case 0x34: /* V9 stfa */
5092 if (gen_trap_ifnofpu(dc)) {
5093 goto jmp_insn;
5095 gen_stf_asi(cpu_addr, insn, 4, rd);
5096 break;
5097 case 0x36: /* V9 stqfa */
5099 TCGv_i32 r_const;
5101 CHECK_FPU_FEATURE(dc, FLOAT128);
5102 if (gen_trap_ifnofpu(dc)) {
5103 goto jmp_insn;
5105 r_const = tcg_const_i32(7);
5106 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5107 tcg_temp_free_i32(r_const);
5108 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5110 break;
5111 case 0x37: /* V9 stdfa */
5112 if (gen_trap_ifnofpu(dc)) {
5113 goto jmp_insn;
5115 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5116 break;
5117 case 0x3c: /* V9 casa */
5118 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5119 gen_movl_TN_reg(rd, cpu_val);
5120 break;
5121 case 0x3e: /* V9 casxa */
5122 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5123 gen_movl_TN_reg(rd, cpu_val);
5124 break;
5125 #else
5126 case 0x34: /* stc */
5127 case 0x35: /* stcsr */
5128 case 0x36: /* stdcq */
5129 case 0x37: /* stdc */
5130 goto ncp_insn;
5131 #endif
5132 default:
5133 goto illegal_insn;
5135 } else
5136 goto illegal_insn;
5138 break;
5140 /* default case for non jump instructions */
5141 if (dc->npc == DYNAMIC_PC) {
5142 dc->pc = DYNAMIC_PC;
5143 gen_op_next_insn();
5144 } else if (dc->npc == JUMP_PC) {
5145 /* we can do a static jump */
5146 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5147 dc->is_br = 1;
5148 } else {
5149 dc->pc = dc->npc;
5150 dc->npc = dc->npc + 4;
5152 jmp_insn:
5153 goto egress;
5154 illegal_insn:
5156 TCGv_i32 r_const;
5158 save_state(dc);
5159 r_const = tcg_const_i32(TT_ILL_INSN);
5160 gen_helper_raise_exception(cpu_env, r_const);
5161 tcg_temp_free_i32(r_const);
5162 dc->is_br = 1;
5164 goto egress;
5165 unimp_flush:
5167 TCGv_i32 r_const;
5169 save_state(dc);
5170 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5171 gen_helper_raise_exception(cpu_env, r_const);
5172 tcg_temp_free_i32(r_const);
5173 dc->is_br = 1;
5175 goto egress;
5176 #if !defined(CONFIG_USER_ONLY)
5177 priv_insn:
5179 TCGv_i32 r_const;
5181 save_state(dc);
5182 r_const = tcg_const_i32(TT_PRIV_INSN);
5183 gen_helper_raise_exception(cpu_env, r_const);
5184 tcg_temp_free_i32(r_const);
5185 dc->is_br = 1;
5187 goto egress;
5188 #endif
5189 nfpu_insn:
5190 save_state(dc);
5191 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5192 dc->is_br = 1;
5193 goto egress;
5194 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5195 nfq_insn:
5196 save_state(dc);
5197 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5198 dc->is_br = 1;
5199 goto egress;
5200 #endif
5201 #ifndef TARGET_SPARC64
5202 ncp_insn:
5204 TCGv r_const;
5206 save_state(dc);
5207 r_const = tcg_const_i32(TT_NCP_INSN);
5208 gen_helper_raise_exception(cpu_env, r_const);
5209 tcg_temp_free(r_const);
5210 dc->is_br = 1;
5212 goto egress;
5213 #endif
5214 egress:
5215 tcg_temp_free(cpu_tmp1);
5216 tcg_temp_free(cpu_tmp2);
5217 if (dc->n_t32 != 0) {
5218 int i;
5219 for (i = dc->n_t32 - 1; i >= 0; --i) {
5220 tcg_temp_free_i32(dc->t32[i]);
5222 dc->n_t32 = 0;
5226 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5227 int spc, CPUSPARCState *env)
5229 target_ulong pc_start, last_pc;
5230 uint16_t *gen_opc_end;
5231 DisasContext dc1, *dc = &dc1;
5232 CPUBreakpoint *bp;
5233 int j, lj = -1;
5234 int num_insns;
5235 int max_insns;
5236 unsigned int insn;
5238 memset(dc, 0, sizeof(DisasContext));
5239 dc->tb = tb;
5240 pc_start = tb->pc;
5241 dc->pc = pc_start;
5242 last_pc = dc->pc;
5243 dc->npc = (target_ulong) tb->cs_base;
5244 dc->cc_op = CC_OP_DYNAMIC;
5245 dc->mem_idx = cpu_mmu_index(env);
5246 dc->def = env->def;
5247 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5248 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5249 dc->singlestep = (env->singlestep_enabled || singlestep);
5250 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5252 num_insns = 0;
5253 max_insns = tb->cflags & CF_COUNT_MASK;
5254 if (max_insns == 0)
5255 max_insns = CF_COUNT_MASK;
5256 gen_icount_start();
5257 do {
5258 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5259 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5260 if (bp->pc == dc->pc) {
5261 if (dc->pc != pc_start)
5262 save_state(dc);
5263 gen_helper_debug(cpu_env);
5264 tcg_gen_exit_tb(0);
5265 dc->is_br = 1;
5266 goto exit_gen_loop;
5270 if (spc) {
5271 qemu_log("Search PC...\n");
5272 j = gen_opc_ptr - gen_opc_buf;
5273 if (lj < j) {
5274 lj++;
5275 while (lj < j)
5276 gen_opc_instr_start[lj++] = 0;
5277 gen_opc_pc[lj] = dc->pc;
5278 gen_opc_npc[lj] = dc->npc;
5279 gen_opc_instr_start[lj] = 1;
5280 gen_opc_icount[lj] = num_insns;
5283 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5284 gen_io_start();
5285 last_pc = dc->pc;
5286 insn = cpu_ldl_code(env, dc->pc);
5288 cpu_tmp0 = tcg_temp_new();
5289 cpu_tmp32 = tcg_temp_new_i32();
5290 cpu_tmp64 = tcg_temp_new_i64();
5291 cpu_dst = tcg_temp_new();
5292 cpu_val = tcg_temp_new();
5293 cpu_addr = tcg_temp_new();
5295 disas_sparc_insn(dc, insn);
5296 num_insns++;
5298 tcg_temp_free(cpu_addr);
5299 tcg_temp_free(cpu_val);
5300 tcg_temp_free(cpu_dst);
5301 tcg_temp_free_i64(cpu_tmp64);
5302 tcg_temp_free_i32(cpu_tmp32);
5303 tcg_temp_free(cpu_tmp0);
5305 if (dc->is_br)
5306 break;
5307 /* if the next PC is different, we abort now */
5308 if (dc->pc != (last_pc + 4))
5309 break;
5310 /* if we reach a page boundary, we stop generation so that the
5311 PC of a TT_TFAULT exception is always in the right page */
5312 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5313 break;
5314 /* if single step mode, we generate only one instruction and
5315 generate an exception */
5316 if (dc->singlestep) {
5317 break;
5319 } while ((gen_opc_ptr < gen_opc_end) &&
5320 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5321 num_insns < max_insns);
5323 exit_gen_loop:
5324 if (tb->cflags & CF_LAST_IO) {
5325 gen_io_end();
5327 if (!dc->is_br) {
5328 if (dc->pc != DYNAMIC_PC &&
5329 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5330 /* static PC and NPC: we can use direct chaining */
5331 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5332 } else {
5333 if (dc->pc != DYNAMIC_PC) {
5334 tcg_gen_movi_tl(cpu_pc, dc->pc);
5336 save_npc(dc);
5337 tcg_gen_exit_tb(0);
5340 gen_icount_end(tb, num_insns);
5341 *gen_opc_ptr = INDEX_op_end;
5342 if (spc) {
5343 j = gen_opc_ptr - gen_opc_buf;
5344 lj++;
5345 while (lj <= j)
5346 gen_opc_instr_start[lj++] = 0;
5347 #if 0
5348 log_page_dump();
5349 #endif
5350 gen_opc_jump_pc[0] = dc->jump_pc[0];
5351 gen_opc_jump_pc[1] = dc->jump_pc[1];
5352 } else {
5353 tb->size = last_pc + 4 - pc_start;
5354 tb->icount = num_insns;
5356 #ifdef DEBUG_DISAS
5357 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5358 qemu_log("--------------\n");
5359 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5360 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5361 qemu_log("\n");
5363 #endif
5366 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5368 gen_intermediate_code_internal(tb, 0, env);
5371 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5373 gen_intermediate_code_internal(tb, 1, env);
5376 void gen_intermediate_code_init(CPUSPARCState *env)
5378 unsigned int i;
5379 static int inited;
5380 static const char * const gregnames[8] = {
5381 NULL, // g0 not used
5382 "g1",
5383 "g2",
5384 "g3",
5385 "g4",
5386 "g5",
5387 "g6",
5388 "g7",
5390 static const char * const fregnames[32] = {
5391 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5392 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5393 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5394 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5397 /* init various static tables */
5398 if (!inited) {
5399 inited = 1;
5401 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5402 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5403 offsetof(CPUSPARCState, regwptr),
5404 "regwptr");
5405 #ifdef TARGET_SPARC64
5406 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5407 "xcc");
5408 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5409 "asi");
5410 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5411 "fprs");
5412 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5413 "gsr");
5414 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5415 offsetof(CPUSPARCState, tick_cmpr),
5416 "tick_cmpr");
5417 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5418 offsetof(CPUSPARCState, stick_cmpr),
5419 "stick_cmpr");
5420 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5421 offsetof(CPUSPARCState, hstick_cmpr),
5422 "hstick_cmpr");
5423 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5424 "hintp");
5425 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5426 "htba");
5427 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5428 "hver");
5429 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5430 offsetof(CPUSPARCState, ssr), "ssr");
5431 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5432 offsetof(CPUSPARCState, version), "ver");
5433 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5434 offsetof(CPUSPARCState, softint),
5435 "softint");
5436 #else
5437 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5438 "wim");
5439 #endif
5440 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5441 "cond");
5442 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5443 "cc_src");
5444 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5445 offsetof(CPUSPARCState, cc_src2),
5446 "cc_src2");
5447 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5448 "cc_dst");
5449 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5450 "cc_op");
5451 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5452 "psr");
5453 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5454 "fsr");
5455 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5456 "pc");
5457 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5458 "npc");
5459 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5460 #ifndef CONFIG_USER_ONLY
5461 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5462 "tbr");
5463 #endif
5464 for (i = 1; i < 8; i++) {
5465 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5466 offsetof(CPUSPARCState, gregs[i]),
5467 gregnames[i]);
5469 for (i = 0; i < TARGET_DPREGS; i++) {
5470 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5471 offsetof(CPUSPARCState, fpr[i]),
5472 fregnames[i]);
5475 /* register helpers */
5477 #define GEN_HELPER 2
5478 #include "helper.h"
5482 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5484 target_ulong npc;
5485 env->pc = gen_opc_pc[pc_pos];
5486 npc = gen_opc_npc[pc_pos];
5487 if (npc == 1) {
5488 /* dynamic NPC: already stored */
5489 } else if (npc == 2) {
5490 /* jump PC: use 'cond' and the jump targets of the translation */
5491 if (env->cond) {
5492 env->npc = gen_opc_jump_pc[0];
5493 } else {
5494 env->npc = gen_opc_jump_pc[1];
5496 } else {
5497 env->npc = npc;
5500 /* flush pending conditional evaluations before exposing cpu state */
5501 if (CC_OP != CC_OP_FLAGS) {
5502 helper_compute_psr(env);