vnc: really call zlib if we want zlib
[qemu.git] / target-sparc / translate.c
blob72ca0b4dceee2c01b0d210c9a4911e3f895e5d15
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70 static target_ulong gen_opc_jump_pc[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext {
75 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int is_br;
79 int mem_idx;
80 int fpu_enabled;
81 int address_mask_32bit;
82 int singlestep;
83 uint32_t cc_op; /* current CC operation */
84 struct TranslationBlock *tb;
85 sparc_def_t *def;
86 } DisasContext;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #ifdef TARGET_SPARC64
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #else
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x, int len)
112 len = 32 - len;
113 return (x << len) >> len;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src)
121 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122 offsetof(CPU_DoubleU, l.upper));
123 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.lower));
127 static void gen_op_load_fpr_DT1(unsigned int src)
129 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130 offsetof(CPU_DoubleU, l.upper));
131 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132 offsetof(CPU_DoubleU, l.lower));
135 static void gen_op_store_DT0_fpr(unsigned int dst)
137 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138 offsetof(CPU_DoubleU, l.upper));
139 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140 offsetof(CPU_DoubleU, l.lower));
143 static void gen_op_load_fpr_QT0(unsigned int src)
145 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.upmost));
147 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.upper));
149 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150 offsetof(CPU_QuadU, l.lower));
151 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152 offsetof(CPU_QuadU, l.lowest));
155 static void gen_op_load_fpr_QT1(unsigned int src)
157 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.upmost));
159 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.upper));
161 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162 offsetof(CPU_QuadU, l.lower));
163 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164 offsetof(CPU_QuadU, l.lowest));
167 static void gen_op_store_QT0_fpr(unsigned int dst)
169 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.upmost));
171 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.upper));
173 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174 offsetof(CPU_QuadU, l.lower));
175 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176 offsetof(CPU_QuadU, l.lowest));
179 /* moves */
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
184 #endif
185 #else
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
189 #else
190 #endif
191 #endif
193 #ifdef TARGET_SPARC64
194 #ifndef TARGET_ABI32
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #else
197 #define AM_CHECK(dc) (1)
198 #endif
199 #endif
201 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
203 #ifdef TARGET_SPARC64
204 if (AM_CHECK(dc))
205 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206 #endif
209 static inline void gen_movl_reg_TN(int reg, TCGv tn)
211 if (reg == 0)
212 tcg_gen_movi_tl(tn, 0);
213 else if (reg < 8)
214 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215 else {
216 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
220 static inline void gen_movl_TN_reg(int reg, TCGv tn)
222 if (reg == 0)
223 return;
224 else if (reg < 8)
225 tcg_gen_mov_tl(cpu_gregs[reg], tn);
226 else {
227 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231 static inline void gen_goto_tb(DisasContext *s, int tb_num,
232 target_ulong pc, target_ulong npc)
234 TranslationBlock *tb;
236 tb = s->tb;
237 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 !s->singlestep) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
284 TCGv r_temp;
285 TCGv_i32 r_const;
286 int l1;
288 l1 = gen_new_label();
290 r_temp = tcg_temp_new();
291 tcg_gen_xor_tl(r_temp, src1, src2);
292 tcg_gen_not_tl(r_temp, r_temp);
293 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297 r_const = tcg_const_i32(TT_TOVF);
298 gen_helper_raise_exception(r_const);
299 tcg_temp_free_i32(r_const);
300 gen_set_label(l1);
301 tcg_temp_free(r_temp);
304 static inline void gen_tag_tv(TCGv src1, TCGv src2)
306 int l1;
307 TCGv_i32 r_const;
309 l1 = gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0, src1, src2);
311 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313 r_const = tcg_const_i32(TT_TOVF);
314 gen_helper_raise_exception(r_const);
315 tcg_temp_free_i32(r_const);
316 gen_set_label(l1);
319 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
321 tcg_gen_mov_tl(cpu_cc_src, src1);
322 tcg_gen_movi_tl(cpu_cc_src2, src2);
323 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324 tcg_gen_mov_tl(dst, cpu_cc_dst);
327 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
329 tcg_gen_mov_tl(cpu_cc_src, src1);
330 tcg_gen_mov_tl(cpu_cc_src2, src2);
331 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332 tcg_gen_mov_tl(dst, cpu_cc_dst);
335 static TCGv_i32 gen_add32_carry32(void)
337 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32 = tcg_temp_new_i32();
342 cc_src2_32 = tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
344 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
345 #else
346 cc_src1_32 = cpu_cc_dst;
347 cc_src2_32 = cpu_cc_src;
348 #endif
350 carry_32 = tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32);
355 tcg_temp_free_i32(cc_src2_32);
356 #endif
358 return carry_32;
361 static TCGv_i32 gen_sub32_carry32(void)
363 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32 = tcg_temp_new_i32();
368 cc_src2_32 = tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
370 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
371 #else
372 cc_src1_32 = cpu_cc_src;
373 cc_src2_32 = cpu_cc_src2;
374 #endif
376 carry_32 = tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32);
381 tcg_temp_free_i32(cc_src2_32);
382 #endif
384 return carry_32;
387 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
388 TCGv src2, int update_cc)
390 TCGv_i32 carry_32;
391 TCGv carry;
393 switch (dc->cc_op) {
394 case CC_OP_DIV:
395 case CC_OP_LOGIC:
396 /* Carry is known to be zero. Fall back to plain ADD. */
397 if (update_cc) {
398 gen_op_add_cc(dst, src1, src2);
399 } else {
400 tcg_gen_add_tl(dst, src1, src2);
402 return;
404 case CC_OP_ADD:
405 case CC_OP_TADD:
406 case CC_OP_TADDTV:
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low = tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
415 cpu_cc_src, src1, cpu_cc_src2, src2);
416 tcg_temp_free(dst_low);
417 goto add_done;
419 #endif
420 carry_32 = gen_add32_carry32();
421 break;
423 case CC_OP_SUB:
424 case CC_OP_TSUB:
425 case CC_OP_TSUBTV:
426 carry_32 = gen_sub32_carry32();
427 break;
429 default:
430 /* We need external help to produce the carry. */
431 carry_32 = tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32);
433 break;
436 #if TARGET_LONG_BITS == 64
437 carry = tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry, carry_32);
439 #else
440 carry = carry_32;
441 #endif
443 tcg_gen_add_tl(dst, src1, src2);
444 tcg_gen_add_tl(dst, dst, carry);
446 tcg_temp_free_i32(carry_32);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry);
449 #endif
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452 add_done:
453 #endif
454 if (update_cc) {
455 tcg_gen_mov_tl(cpu_cc_src, src1);
456 tcg_gen_mov_tl(cpu_cc_src2, src2);
457 tcg_gen_mov_tl(cpu_cc_dst, dst);
458 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
459 dc->cc_op = CC_OP_ADDX;
463 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
465 tcg_gen_mov_tl(cpu_cc_src, src1);
466 tcg_gen_mov_tl(cpu_cc_src2, src2);
467 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 tcg_gen_mov_tl(dst, cpu_cc_dst);
471 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
473 tcg_gen_mov_tl(cpu_cc_src, src1);
474 tcg_gen_mov_tl(cpu_cc_src2, src2);
475 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
476 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
483 TCGv r_temp;
484 TCGv_i32 r_const;
485 int l1;
487 l1 = gen_new_label();
489 r_temp = tcg_temp_new();
490 tcg_gen_xor_tl(r_temp, src1, src2);
491 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
492 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
493 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
495 r_const = tcg_const_i32(TT_TOVF);
496 gen_helper_raise_exception(r_const);
497 tcg_temp_free_i32(r_const);
498 gen_set_label(l1);
499 tcg_temp_free(r_temp);
502 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
504 tcg_gen_mov_tl(cpu_cc_src, src1);
505 tcg_gen_movi_tl(cpu_cc_src2, src2);
506 if (src2 == 0) {
507 tcg_gen_mov_tl(cpu_cc_dst, src1);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509 dc->cc_op = CC_OP_LOGIC;
510 } else {
511 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513 dc->cc_op = CC_OP_SUB;
515 tcg_gen_mov_tl(dst, cpu_cc_dst);
518 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527 TCGv src2, int update_cc)
529 TCGv_i32 carry_32;
530 TCGv carry;
532 switch (dc->cc_op) {
533 case CC_OP_DIV:
534 case CC_OP_LOGIC:
535 /* Carry is known to be zero. Fall back to plain SUB. */
536 if (update_cc) {
537 gen_op_sub_cc(dst, src1, src2);
538 } else {
539 tcg_gen_sub_tl(dst, src1, src2);
541 return;
543 case CC_OP_ADD:
544 case CC_OP_TADD:
545 case CC_OP_TADDTV:
546 carry_32 = gen_add32_carry32();
547 break;
549 case CC_OP_SUB:
550 case CC_OP_TSUB:
551 case CC_OP_TSUBTV:
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low = tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
560 cpu_cc_src, src1, cpu_cc_src2, src2);
561 tcg_temp_free(dst_low);
562 goto sub_done;
564 #endif
565 carry_32 = gen_sub32_carry32();
566 break;
568 default:
569 /* We need external help to produce the carry. */
570 carry_32 = tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32);
572 break;
575 #if TARGET_LONG_BITS == 64
576 carry = tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry, carry_32);
578 #else
579 carry = carry_32;
580 #endif
582 tcg_gen_sub_tl(dst, src1, src2);
583 tcg_gen_sub_tl(dst, dst, carry);
585 tcg_temp_free_i32(carry_32);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry);
588 #endif
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
591 sub_done:
592 #endif
593 if (update_cc) {
594 tcg_gen_mov_tl(cpu_cc_src, src1);
595 tcg_gen_mov_tl(cpu_cc_src2, src2);
596 tcg_gen_mov_tl(cpu_cc_dst, dst);
597 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
598 dc->cc_op = CC_OP_SUBX;
602 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
604 tcg_gen_mov_tl(cpu_cc_src, src1);
605 tcg_gen_mov_tl(cpu_cc_src2, src2);
606 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
607 tcg_gen_mov_tl(dst, cpu_cc_dst);
610 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
612 tcg_gen_mov_tl(cpu_cc_src, src1);
613 tcg_gen_mov_tl(cpu_cc_src2, src2);
614 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
615 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 tcg_gen_mov_tl(dst, cpu_cc_dst);
620 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
622 TCGv r_temp;
623 int l1;
625 l1 = gen_new_label();
626 r_temp = tcg_temp_new();
628 /* old op:
629 if (!(env->y & 1))
630 T1 = 0;
632 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
633 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
636 tcg_gen_movi_tl(cpu_cc_src2, 0);
637 gen_set_label(l1);
639 // b2 = T0 & 1;
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
642 tcg_gen_shli_tl(r_temp, r_temp, 31);
643 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
644 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
646 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
648 // b1 = N ^ V;
649 gen_mov_reg_N(cpu_tmp0, cpu_psr);
650 gen_mov_reg_V(r_temp, cpu_psr);
651 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
652 tcg_temp_free(r_temp);
654 // T0 = (b1 << 31) | (T0 >> 1);
655 // src1 = T0;
656 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
657 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
658 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
660 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
665 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
667 TCGv_i64 r_temp, r_temp2;
669 r_temp = tcg_temp_new_i64();
670 r_temp2 = tcg_temp_new_i64();
672 tcg_gen_extu_tl_i64(r_temp, src2);
673 tcg_gen_extu_tl_i64(r_temp2, src1);
674 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
676 tcg_gen_shri_i64(r_temp, r_temp2, 32);
677 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
678 tcg_temp_free_i64(r_temp);
679 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
680 #ifdef TARGET_SPARC64
681 tcg_gen_mov_i64(dst, r_temp2);
682 #else
683 tcg_gen_trunc_i64_tl(dst, r_temp2);
684 #endif
685 tcg_temp_free_i64(r_temp2);
688 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
690 TCGv_i64 r_temp, r_temp2;
692 r_temp = tcg_temp_new_i64();
693 r_temp2 = tcg_temp_new_i64();
695 tcg_gen_ext_tl_i64(r_temp, src2);
696 tcg_gen_ext_tl_i64(r_temp2, src1);
697 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
699 tcg_gen_shri_i64(r_temp, r_temp2, 32);
700 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
701 tcg_temp_free_i64(r_temp);
702 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
703 #ifdef TARGET_SPARC64
704 tcg_gen_mov_i64(dst, r_temp2);
705 #else
706 tcg_gen_trunc_i64_tl(dst, r_temp2);
707 #endif
708 tcg_temp_free_i64(r_temp2);
711 #ifdef TARGET_SPARC64
712 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
714 TCGv_i32 r_const;
715 int l1;
717 l1 = gen_new_label();
718 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
719 r_const = tcg_const_i32(TT_DIV_ZERO);
720 gen_helper_raise_exception(r_const);
721 tcg_temp_free_i32(r_const);
722 gen_set_label(l1);
725 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
727 int l1, l2;
729 l1 = gen_new_label();
730 l2 = gen_new_label();
731 tcg_gen_mov_tl(cpu_cc_src, src1);
732 tcg_gen_mov_tl(cpu_cc_src2, src2);
733 gen_trap_ifdivzero_tl(cpu_cc_src2);
734 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
735 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
736 tcg_gen_movi_i64(dst, INT64_MIN);
737 tcg_gen_br(l2);
738 gen_set_label(l1);
739 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
740 gen_set_label(l2);
742 #endif
744 // 1
745 static inline void gen_op_eval_ba(TCGv dst)
747 tcg_gen_movi_tl(dst, 1);
750 // Z
751 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
753 gen_mov_reg_Z(dst, src);
756 // Z | (N ^ V)
757 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
759 gen_mov_reg_N(cpu_tmp0, src);
760 gen_mov_reg_V(dst, src);
761 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
762 gen_mov_reg_Z(cpu_tmp0, src);
763 tcg_gen_or_tl(dst, dst, cpu_tmp0);
766 // N ^ V
767 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
769 gen_mov_reg_V(cpu_tmp0, src);
770 gen_mov_reg_N(dst, src);
771 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
774 // C | Z
775 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
777 gen_mov_reg_Z(cpu_tmp0, src);
778 gen_mov_reg_C(dst, src);
779 tcg_gen_or_tl(dst, dst, cpu_tmp0);
782 // C
783 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
785 gen_mov_reg_C(dst, src);
788 // V
789 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
791 gen_mov_reg_V(dst, src);
794 // 0
795 static inline void gen_op_eval_bn(TCGv dst)
797 tcg_gen_movi_tl(dst, 0);
800 // N
801 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
803 gen_mov_reg_N(dst, src);
806 // !Z
807 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
809 gen_mov_reg_Z(dst, src);
810 tcg_gen_xori_tl(dst, dst, 0x1);
813 // !(Z | (N ^ V))
814 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
816 gen_mov_reg_N(cpu_tmp0, src);
817 gen_mov_reg_V(dst, src);
818 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
819 gen_mov_reg_Z(cpu_tmp0, src);
820 tcg_gen_or_tl(dst, dst, cpu_tmp0);
821 tcg_gen_xori_tl(dst, dst, 0x1);
824 // !(N ^ V)
825 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830 tcg_gen_xori_tl(dst, dst, 0x1);
833 // !(C | Z)
834 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
836 gen_mov_reg_Z(cpu_tmp0, src);
837 gen_mov_reg_C(dst, src);
838 tcg_gen_or_tl(dst, dst, cpu_tmp0);
839 tcg_gen_xori_tl(dst, dst, 0x1);
842 // !C
843 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
845 gen_mov_reg_C(dst, src);
846 tcg_gen_xori_tl(dst, dst, 0x1);
849 // !N
850 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
852 gen_mov_reg_N(dst, src);
853 tcg_gen_xori_tl(dst, dst, 0x1);
856 // !V
857 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
859 gen_mov_reg_V(dst, src);
860 tcg_gen_xori_tl(dst, dst, 0x1);
864 FPSR bit field FCC1 | FCC0:
868 3 unordered
870 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
871 unsigned int fcc_offset)
873 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
874 tcg_gen_andi_tl(reg, reg, 0x1);
877 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
878 unsigned int fcc_offset)
880 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
881 tcg_gen_andi_tl(reg, reg, 0x1);
884 // !0: FCC0 | FCC1
885 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
886 unsigned int fcc_offset)
888 gen_mov_reg_FCC0(dst, src, fcc_offset);
889 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
890 tcg_gen_or_tl(dst, dst, cpu_tmp0);
893 // 1 or 2: FCC0 ^ FCC1
894 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
895 unsigned int fcc_offset)
897 gen_mov_reg_FCC0(dst, src, fcc_offset);
898 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
899 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
902 // 1 or 3: FCC0
903 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
904 unsigned int fcc_offset)
906 gen_mov_reg_FCC0(dst, src, fcc_offset);
909 // 1: FCC0 & !FCC1
910 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
914 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
915 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
916 tcg_gen_and_tl(dst, dst, cpu_tmp0);
919 // 2 or 3: FCC1
920 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
921 unsigned int fcc_offset)
923 gen_mov_reg_FCC1(dst, src, fcc_offset);
926 // 2: !FCC0 & FCC1
927 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
928 unsigned int fcc_offset)
930 gen_mov_reg_FCC0(dst, src, fcc_offset);
931 tcg_gen_xori_tl(dst, dst, 0x1);
932 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
933 tcg_gen_and_tl(dst, dst, cpu_tmp0);
936 // 3: FCC0 & FCC1
937 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
938 unsigned int fcc_offset)
940 gen_mov_reg_FCC0(dst, src, fcc_offset);
941 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
942 tcg_gen_and_tl(dst, dst, cpu_tmp0);
945 // 0: !(FCC0 | FCC1)
946 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
947 unsigned int fcc_offset)
949 gen_mov_reg_FCC0(dst, src, fcc_offset);
950 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
951 tcg_gen_or_tl(dst, dst, cpu_tmp0);
952 tcg_gen_xori_tl(dst, dst, 0x1);
955 // 0 or 3: !(FCC0 ^ FCC1)
956 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
957 unsigned int fcc_offset)
959 gen_mov_reg_FCC0(dst, src, fcc_offset);
960 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
961 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
962 tcg_gen_xori_tl(dst, dst, 0x1);
965 // 0 or 2: !FCC0
966 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
967 unsigned int fcc_offset)
969 gen_mov_reg_FCC0(dst, src, fcc_offset);
970 tcg_gen_xori_tl(dst, dst, 0x1);
973 // !1: !(FCC0 & !FCC1)
974 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
975 unsigned int fcc_offset)
977 gen_mov_reg_FCC0(dst, src, fcc_offset);
978 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
979 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
980 tcg_gen_and_tl(dst, dst, cpu_tmp0);
981 tcg_gen_xori_tl(dst, dst, 0x1);
984 // 0 or 1: !FCC1
985 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
988 gen_mov_reg_FCC1(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
992 // !2: !(!FCC0 & FCC1)
993 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
994 unsigned int fcc_offset)
996 gen_mov_reg_FCC0(dst, src, fcc_offset);
997 tcg_gen_xori_tl(dst, dst, 0x1);
998 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
999 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1000 tcg_gen_xori_tl(dst, dst, 0x1);
1003 // !3: !(FCC0 & FCC1)
1004 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1013 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1014 target_ulong pc2, TCGv r_cond)
1016 int l1;
1018 l1 = gen_new_label();
1020 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1022 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1024 gen_set_label(l1);
1025 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1028 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1029 target_ulong pc2, TCGv r_cond)
1031 int l1;
1033 l1 = gen_new_label();
1035 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1037 gen_goto_tb(dc, 0, pc2, pc1);
1039 gen_set_label(l1);
1040 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1043 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1044 TCGv r_cond)
1046 int l1, l2;
1048 l1 = gen_new_label();
1049 l2 = gen_new_label();
1051 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1053 tcg_gen_movi_tl(cpu_npc, npc1);
1054 tcg_gen_br(l2);
1056 gen_set_label(l1);
1057 tcg_gen_movi_tl(cpu_npc, npc2);
1058 gen_set_label(l2);
1061 /* call this function before using the condition register as it may
1062 have been set for a jump */
1063 static inline void flush_cond(DisasContext *dc, TCGv cond)
1065 if (dc->npc == JUMP_PC) {
1066 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1067 dc->npc = DYNAMIC_PC;
1071 static inline void save_npc(DisasContext *dc, TCGv cond)
1073 if (dc->npc == JUMP_PC) {
1074 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1075 dc->npc = DYNAMIC_PC;
1076 } else if (dc->npc != DYNAMIC_PC) {
1077 tcg_gen_movi_tl(cpu_npc, dc->npc);
1081 static inline void save_state(DisasContext *dc, TCGv cond)
1083 tcg_gen_movi_tl(cpu_pc, dc->pc);
1084 /* flush pending conditional evaluations before exposing cpu state */
1085 if (dc->cc_op != CC_OP_FLAGS) {
1086 dc->cc_op = CC_OP_FLAGS;
1087 gen_helper_compute_psr();
1089 save_npc(dc, cond);
1092 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1094 if (dc->npc == JUMP_PC) {
1095 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1096 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1097 dc->pc = DYNAMIC_PC;
1098 } else if (dc->npc == DYNAMIC_PC) {
1099 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100 dc->pc = DYNAMIC_PC;
1101 } else {
1102 dc->pc = dc->npc;
1106 static inline void gen_op_next_insn(void)
1108 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1109 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1112 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1113 DisasContext *dc)
1115 TCGv_i32 r_src;
1117 #ifdef TARGET_SPARC64
1118 if (cc)
1119 r_src = cpu_xcc;
1120 else
1121 r_src = cpu_psr;
1122 #else
1123 r_src = cpu_psr;
1124 #endif
1125 switch (dc->cc_op) {
1126 case CC_OP_FLAGS:
1127 break;
1128 default:
1129 gen_helper_compute_psr();
1130 dc->cc_op = CC_OP_FLAGS;
1131 break;
1133 switch (cond) {
1134 case 0x0:
1135 gen_op_eval_bn(r_dst);
1136 break;
1137 case 0x1:
1138 gen_op_eval_be(r_dst, r_src);
1139 break;
1140 case 0x2:
1141 gen_op_eval_ble(r_dst, r_src);
1142 break;
1143 case 0x3:
1144 gen_op_eval_bl(r_dst, r_src);
1145 break;
1146 case 0x4:
1147 gen_op_eval_bleu(r_dst, r_src);
1148 break;
1149 case 0x5:
1150 gen_op_eval_bcs(r_dst, r_src);
1151 break;
1152 case 0x6:
1153 gen_op_eval_bneg(r_dst, r_src);
1154 break;
1155 case 0x7:
1156 gen_op_eval_bvs(r_dst, r_src);
1157 break;
1158 case 0x8:
1159 gen_op_eval_ba(r_dst);
1160 break;
1161 case 0x9:
1162 gen_op_eval_bne(r_dst, r_src);
1163 break;
1164 case 0xa:
1165 gen_op_eval_bg(r_dst, r_src);
1166 break;
1167 case 0xb:
1168 gen_op_eval_bge(r_dst, r_src);
1169 break;
1170 case 0xc:
1171 gen_op_eval_bgu(r_dst, r_src);
1172 break;
1173 case 0xd:
1174 gen_op_eval_bcc(r_dst, r_src);
1175 break;
1176 case 0xe:
1177 gen_op_eval_bpos(r_dst, r_src);
1178 break;
1179 case 0xf:
1180 gen_op_eval_bvc(r_dst, r_src);
1181 break;
1185 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1187 unsigned int offset;
1189 switch (cc) {
1190 default:
1191 case 0x0:
1192 offset = 0;
1193 break;
1194 case 0x1:
1195 offset = 32 - 10;
1196 break;
1197 case 0x2:
1198 offset = 34 - 10;
1199 break;
1200 case 0x3:
1201 offset = 36 - 10;
1202 break;
1205 switch (cond) {
1206 case 0x0:
1207 gen_op_eval_bn(r_dst);
1208 break;
1209 case 0x1:
1210 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1211 break;
1212 case 0x2:
1213 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1214 break;
1215 case 0x3:
1216 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1217 break;
1218 case 0x4:
1219 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1220 break;
1221 case 0x5:
1222 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1223 break;
1224 case 0x6:
1225 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1226 break;
1227 case 0x7:
1228 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1229 break;
1230 case 0x8:
1231 gen_op_eval_ba(r_dst);
1232 break;
1233 case 0x9:
1234 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1235 break;
1236 case 0xa:
1237 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1238 break;
1239 case 0xb:
1240 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1241 break;
1242 case 0xc:
1243 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1244 break;
1245 case 0xd:
1246 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1247 break;
1248 case 0xe:
1249 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1250 break;
1251 case 0xf:
1252 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1253 break;
1257 #ifdef TARGET_SPARC64
1258 // Inverted logic
1259 static const int gen_tcg_cond_reg[8] = {
1261 TCG_COND_NE,
1262 TCG_COND_GT,
1263 TCG_COND_GE,
1265 TCG_COND_EQ,
1266 TCG_COND_LE,
1267 TCG_COND_LT,
1270 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1272 int l1;
1274 l1 = gen_new_label();
1275 tcg_gen_movi_tl(r_dst, 0);
1276 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1277 tcg_gen_movi_tl(r_dst, 1);
1278 gen_set_label(l1);
1280 #endif
1282 /* XXX: potentially incorrect if dynamic npc */
1283 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1284 TCGv r_cond)
1286 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1287 target_ulong target = dc->pc + offset;
1289 if (cond == 0x0) {
1290 /* unconditional not taken */
1291 if (a) {
1292 dc->pc = dc->npc + 4;
1293 dc->npc = dc->pc + 4;
1294 } else {
1295 dc->pc = dc->npc;
1296 dc->npc = dc->pc + 4;
1298 } else if (cond == 0x8) {
1299 /* unconditional taken */
1300 if (a) {
1301 dc->pc = target;
1302 dc->npc = dc->pc + 4;
1303 } else {
1304 dc->pc = dc->npc;
1305 dc->npc = target;
1306 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1308 } else {
1309 flush_cond(dc, r_cond);
1310 gen_cond(r_cond, cc, cond, dc);
1311 if (a) {
1312 gen_branch_a(dc, target, dc->npc, r_cond);
1313 dc->is_br = 1;
1314 } else {
1315 dc->pc = dc->npc;
1316 dc->jump_pc[0] = target;
1317 dc->jump_pc[1] = dc->npc + 4;
1318 dc->npc = JUMP_PC;
1323 /* XXX: potentially incorrect if dynamic npc */
1324 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1325 TCGv r_cond)
1327 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1328 target_ulong target = dc->pc + offset;
1330 if (cond == 0x0) {
1331 /* unconditional not taken */
1332 if (a) {
1333 dc->pc = dc->npc + 4;
1334 dc->npc = dc->pc + 4;
1335 } else {
1336 dc->pc = dc->npc;
1337 dc->npc = dc->pc + 4;
1339 } else if (cond == 0x8) {
1340 /* unconditional taken */
1341 if (a) {
1342 dc->pc = target;
1343 dc->npc = dc->pc + 4;
1344 } else {
1345 dc->pc = dc->npc;
1346 dc->npc = target;
1347 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1349 } else {
1350 flush_cond(dc, r_cond);
1351 gen_fcond(r_cond, cc, cond);
1352 if (a) {
1353 gen_branch_a(dc, target, dc->npc, r_cond);
1354 dc->is_br = 1;
1355 } else {
1356 dc->pc = dc->npc;
1357 dc->jump_pc[0] = target;
1358 dc->jump_pc[1] = dc->npc + 4;
1359 dc->npc = JUMP_PC;
1364 #ifdef TARGET_SPARC64
1365 /* XXX: potentially incorrect if dynamic npc */
1366 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1367 TCGv r_cond, TCGv r_reg)
1369 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1370 target_ulong target = dc->pc + offset;
1372 flush_cond(dc, r_cond);
1373 gen_cond_reg(r_cond, cond, r_reg);
1374 if (a) {
1375 gen_branch_a(dc, target, dc->npc, r_cond);
1376 dc->is_br = 1;
1377 } else {
1378 dc->pc = dc->npc;
1379 dc->jump_pc[0] = target;
1380 dc->jump_pc[1] = dc->npc + 4;
1381 dc->npc = JUMP_PC;
1385 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1387 switch (fccno) {
1388 case 0:
1389 gen_helper_fcmps(r_rs1, r_rs2);
1390 break;
1391 case 1:
1392 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1393 break;
1394 case 2:
1395 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1396 break;
1397 case 3:
1398 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1399 break;
1403 static inline void gen_op_fcmpd(int fccno)
1405 switch (fccno) {
1406 case 0:
1407 gen_helper_fcmpd();
1408 break;
1409 case 1:
1410 gen_helper_fcmpd_fcc1();
1411 break;
1412 case 2:
1413 gen_helper_fcmpd_fcc2();
1414 break;
1415 case 3:
1416 gen_helper_fcmpd_fcc3();
1417 break;
1421 static inline void gen_op_fcmpq(int fccno)
1423 switch (fccno) {
1424 case 0:
1425 gen_helper_fcmpq();
1426 break;
1427 case 1:
1428 gen_helper_fcmpq_fcc1();
1429 break;
1430 case 2:
1431 gen_helper_fcmpq_fcc2();
1432 break;
1433 case 3:
1434 gen_helper_fcmpq_fcc3();
1435 break;
1439 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1441 switch (fccno) {
1442 case 0:
1443 gen_helper_fcmpes(r_rs1, r_rs2);
1444 break;
1445 case 1:
1446 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1447 break;
1448 case 2:
1449 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1450 break;
1451 case 3:
1452 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1453 break;
1457 static inline void gen_op_fcmped(int fccno)
1459 switch (fccno) {
1460 case 0:
1461 gen_helper_fcmped();
1462 break;
1463 case 1:
1464 gen_helper_fcmped_fcc1();
1465 break;
1466 case 2:
1467 gen_helper_fcmped_fcc2();
1468 break;
1469 case 3:
1470 gen_helper_fcmped_fcc3();
1471 break;
1475 static inline void gen_op_fcmpeq(int fccno)
1477 switch (fccno) {
1478 case 0:
1479 gen_helper_fcmpeq();
1480 break;
1481 case 1:
1482 gen_helper_fcmpeq_fcc1();
1483 break;
1484 case 2:
1485 gen_helper_fcmpeq_fcc2();
1486 break;
1487 case 3:
1488 gen_helper_fcmpeq_fcc3();
1489 break;
1493 #else
1495 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1497 gen_helper_fcmps(r_rs1, r_rs2);
1500 static inline void gen_op_fcmpd(int fccno)
1502 gen_helper_fcmpd();
1505 static inline void gen_op_fcmpq(int fccno)
1507 gen_helper_fcmpq();
1510 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1512 gen_helper_fcmpes(r_rs1, r_rs2);
1515 static inline void gen_op_fcmped(int fccno)
1517 gen_helper_fcmped();
1520 static inline void gen_op_fcmpeq(int fccno)
1522 gen_helper_fcmpeq();
1524 #endif
1526 static inline void gen_op_fpexception_im(int fsr_flags)
1528 TCGv_i32 r_const;
1530 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1531 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1532 r_const = tcg_const_i32(TT_FP_EXCP);
1533 gen_helper_raise_exception(r_const);
1534 tcg_temp_free_i32(r_const);
1537 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1539 #if !defined(CONFIG_USER_ONLY)
1540 if (!dc->fpu_enabled) {
1541 TCGv_i32 r_const;
1543 save_state(dc, r_cond);
1544 r_const = tcg_const_i32(TT_NFPU_INSN);
1545 gen_helper_raise_exception(r_const);
1546 tcg_temp_free_i32(r_const);
1547 dc->is_br = 1;
1548 return 1;
1550 #endif
1551 return 0;
1554 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1556 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1559 static inline void gen_clear_float_exceptions(void)
1561 gen_helper_clear_float_exceptions();
1564 /* asi moves */
1565 #ifdef TARGET_SPARC64
1566 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1568 int asi;
1569 TCGv_i32 r_asi;
1571 if (IS_IMM) {
1572 r_asi = tcg_temp_new_i32();
1573 tcg_gen_mov_i32(r_asi, cpu_asi);
1574 } else {
1575 asi = GET_FIELD(insn, 19, 26);
1576 r_asi = tcg_const_i32(asi);
1578 return r_asi;
1581 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1582 int sign)
1584 TCGv_i32 r_asi, r_size, r_sign;
1586 r_asi = gen_get_asi(insn, addr);
1587 r_size = tcg_const_i32(size);
1588 r_sign = tcg_const_i32(sign);
1589 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1590 tcg_temp_free_i32(r_sign);
1591 tcg_temp_free_i32(r_size);
1592 tcg_temp_free_i32(r_asi);
1595 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1597 TCGv_i32 r_asi, r_size;
1599 r_asi = gen_get_asi(insn, addr);
1600 r_size = tcg_const_i32(size);
1601 gen_helper_st_asi(addr, src, r_asi, r_size);
1602 tcg_temp_free_i32(r_size);
1603 tcg_temp_free_i32(r_asi);
1606 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1608 TCGv_i32 r_asi, r_size, r_rd;
1610 r_asi = gen_get_asi(insn, addr);
1611 r_size = tcg_const_i32(size);
1612 r_rd = tcg_const_i32(rd);
1613 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1614 tcg_temp_free_i32(r_rd);
1615 tcg_temp_free_i32(r_size);
1616 tcg_temp_free_i32(r_asi);
1619 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1621 TCGv_i32 r_asi, r_size, r_rd;
1623 r_asi = gen_get_asi(insn, addr);
1624 r_size = tcg_const_i32(size);
1625 r_rd = tcg_const_i32(rd);
1626 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1627 tcg_temp_free_i32(r_rd);
1628 tcg_temp_free_i32(r_size);
1629 tcg_temp_free_i32(r_asi);
1632 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1634 TCGv_i32 r_asi, r_size, r_sign;
1636 r_asi = gen_get_asi(insn, addr);
1637 r_size = tcg_const_i32(4);
1638 r_sign = tcg_const_i32(0);
1639 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1640 tcg_temp_free_i32(r_sign);
1641 gen_helper_st_asi(addr, dst, r_asi, r_size);
1642 tcg_temp_free_i32(r_size);
1643 tcg_temp_free_i32(r_asi);
1644 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1647 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1649 TCGv_i32 r_asi, r_rd;
1651 r_asi = gen_get_asi(insn, addr);
1652 r_rd = tcg_const_i32(rd);
1653 gen_helper_ldda_asi(addr, r_asi, r_rd);
1654 tcg_temp_free_i32(r_rd);
1655 tcg_temp_free_i32(r_asi);
1658 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1660 TCGv_i32 r_asi, r_size;
1662 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1663 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1664 r_asi = gen_get_asi(insn, addr);
1665 r_size = tcg_const_i32(8);
1666 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1667 tcg_temp_free_i32(r_size);
1668 tcg_temp_free_i32(r_asi);
1671 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1672 int rd)
1674 TCGv r_val1;
1675 TCGv_i32 r_asi;
1677 r_val1 = tcg_temp_new();
1678 gen_movl_reg_TN(rd, r_val1);
1679 r_asi = gen_get_asi(insn, addr);
1680 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1681 tcg_temp_free_i32(r_asi);
1682 tcg_temp_free(r_val1);
1685 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1686 int rd)
1688 TCGv_i32 r_asi;
1690 gen_movl_reg_TN(rd, cpu_tmp64);
1691 r_asi = gen_get_asi(insn, addr);
1692 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1693 tcg_temp_free_i32(r_asi);
1696 #elif !defined(CONFIG_USER_ONLY)
1698 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1699 int sign)
1701 TCGv_i32 r_asi, r_size, r_sign;
1703 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1704 r_size = tcg_const_i32(size);
1705 r_sign = tcg_const_i32(sign);
1706 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1707 tcg_temp_free(r_sign);
1708 tcg_temp_free(r_size);
1709 tcg_temp_free(r_asi);
1710 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1713 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1715 TCGv_i32 r_asi, r_size;
1717 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1718 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1719 r_size = tcg_const_i32(size);
1720 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1721 tcg_temp_free(r_size);
1722 tcg_temp_free(r_asi);
1725 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1727 TCGv_i32 r_asi, r_size, r_sign;
1728 TCGv_i64 r_val;
1730 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1731 r_size = tcg_const_i32(4);
1732 r_sign = tcg_const_i32(0);
1733 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1734 tcg_temp_free(r_sign);
1735 r_val = tcg_temp_new_i64();
1736 tcg_gen_extu_tl_i64(r_val, dst);
1737 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1738 tcg_temp_free_i64(r_val);
1739 tcg_temp_free(r_size);
1740 tcg_temp_free(r_asi);
1741 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1744 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1746 TCGv_i32 r_asi, r_size, r_sign;
1748 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1749 r_size = tcg_const_i32(8);
1750 r_sign = tcg_const_i32(0);
1751 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1752 tcg_temp_free(r_sign);
1753 tcg_temp_free(r_size);
1754 tcg_temp_free(r_asi);
1755 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1756 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1757 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1758 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1759 gen_movl_TN_reg(rd, hi);
1762 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1764 TCGv_i32 r_asi, r_size;
1766 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1767 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1768 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1769 r_size = tcg_const_i32(8);
1770 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1771 tcg_temp_free(r_size);
1772 tcg_temp_free(r_asi);
1774 #endif
1776 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1777 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1779 TCGv_i64 r_val;
1780 TCGv_i32 r_asi, r_size;
1782 gen_ld_asi(dst, addr, insn, 1, 0);
1784 r_val = tcg_const_i64(0xffULL);
1785 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1786 r_size = tcg_const_i32(1);
1787 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1788 tcg_temp_free_i32(r_size);
1789 tcg_temp_free_i32(r_asi);
1790 tcg_temp_free_i64(r_val);
1792 #endif
1794 static inline TCGv get_src1(unsigned int insn, TCGv def)
1796 TCGv r_rs1 = def;
1797 unsigned int rs1;
1799 rs1 = GET_FIELD(insn, 13, 17);
1800 if (rs1 == 0) {
1801 tcg_gen_movi_tl(def, 0);
1802 } else if (rs1 < 8) {
1803 r_rs1 = cpu_gregs[rs1];
1804 } else {
1805 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1807 return r_rs1;
1810 static inline TCGv get_src2(unsigned int insn, TCGv def)
1812 TCGv r_rs2 = def;
1814 if (IS_IMM) { /* immediate */
1815 target_long simm = GET_FIELDs(insn, 19, 31);
1816 tcg_gen_movi_tl(def, simm);
1817 } else { /* register */
1818 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1819 if (rs2 == 0) {
1820 tcg_gen_movi_tl(def, 0);
1821 } else if (rs2 < 8) {
1822 r_rs2 = cpu_gregs[rs2];
1823 } else {
1824 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1827 return r_rs2;
1830 #ifdef TARGET_SPARC64
1831 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1833 TCGv_i32 r_tl = tcg_temp_new_i32();
1835 /* load env->tl into r_tl */
1836 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1838 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1839 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1841 /* calculate offset to current trap state from env->ts, reuse r_tl */
1842 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1843 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1845 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1847 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1848 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1849 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1850 tcg_temp_free_ptr(r_tl_tmp);
1853 tcg_temp_free_i32(r_tl);
1855 #endif
1857 #define CHECK_IU_FEATURE(dc, FEATURE) \
1858 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1859 goto illegal_insn;
1860 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1862 goto nfpu_insn;
1864 /* before an instruction, dc->pc must be static */
1865 static void disas_sparc_insn(DisasContext * dc)
1867 unsigned int insn, opc, rs1, rs2, rd;
1868 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1869 target_long simm;
1871 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1872 tcg_gen_debug_insn_start(dc->pc);
1873 insn = ldl_code(dc->pc);
1874 opc = GET_FIELD(insn, 0, 1);
1876 rd = GET_FIELD(insn, 2, 6);
1878 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1879 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1881 switch (opc) {
1882 case 0: /* branches/sethi */
1884 unsigned int xop = GET_FIELD(insn, 7, 9);
1885 int32_t target;
1886 switch (xop) {
1887 #ifdef TARGET_SPARC64
1888 case 0x1: /* V9 BPcc */
1890 int cc;
1892 target = GET_FIELD_SP(insn, 0, 18);
1893 target = sign_extend(target, 18);
1894 target <<= 2;
1895 cc = GET_FIELD_SP(insn, 20, 21);
1896 if (cc == 0)
1897 do_branch(dc, target, insn, 0, cpu_cond);
1898 else if (cc == 2)
1899 do_branch(dc, target, insn, 1, cpu_cond);
1900 else
1901 goto illegal_insn;
1902 goto jmp_insn;
1904 case 0x3: /* V9 BPr */
1906 target = GET_FIELD_SP(insn, 0, 13) |
1907 (GET_FIELD_SP(insn, 20, 21) << 14);
1908 target = sign_extend(target, 16);
1909 target <<= 2;
1910 cpu_src1 = get_src1(insn, cpu_src1);
1911 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1912 goto jmp_insn;
1914 case 0x5: /* V9 FBPcc */
1916 int cc = GET_FIELD_SP(insn, 20, 21);
1917 if (gen_trap_ifnofpu(dc, cpu_cond))
1918 goto jmp_insn;
1919 target = GET_FIELD_SP(insn, 0, 18);
1920 target = sign_extend(target, 19);
1921 target <<= 2;
1922 do_fbranch(dc, target, insn, cc, cpu_cond);
1923 goto jmp_insn;
1925 #else
1926 case 0x7: /* CBN+x */
1928 goto ncp_insn;
1930 #endif
1931 case 0x2: /* BN+x */
1933 target = GET_FIELD(insn, 10, 31);
1934 target = sign_extend(target, 22);
1935 target <<= 2;
1936 do_branch(dc, target, insn, 0, cpu_cond);
1937 goto jmp_insn;
1939 case 0x6: /* FBN+x */
1941 if (gen_trap_ifnofpu(dc, cpu_cond))
1942 goto jmp_insn;
1943 target = GET_FIELD(insn, 10, 31);
1944 target = sign_extend(target, 22);
1945 target <<= 2;
1946 do_fbranch(dc, target, insn, 0, cpu_cond);
1947 goto jmp_insn;
1949 case 0x4: /* SETHI */
1950 if (rd) { // nop
1951 uint32_t value = GET_FIELD(insn, 10, 31);
1952 TCGv r_const;
1954 r_const = tcg_const_tl(value << 10);
1955 gen_movl_TN_reg(rd, r_const);
1956 tcg_temp_free(r_const);
1958 break;
1959 case 0x0: /* UNIMPL */
1960 default:
1961 goto illegal_insn;
1963 break;
1965 break;
1966 case 1: /*CALL*/
1968 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1969 TCGv r_const;
1971 r_const = tcg_const_tl(dc->pc);
1972 gen_movl_TN_reg(15, r_const);
1973 tcg_temp_free(r_const);
1974 target += dc->pc;
1975 gen_mov_pc_npc(dc, cpu_cond);
1976 dc->npc = target;
1978 goto jmp_insn;
1979 case 2: /* FPU & Logical Operations */
1981 unsigned int xop = GET_FIELD(insn, 7, 12);
1982 if (xop == 0x3a) { /* generate trap */
1983 int cond;
1985 cpu_src1 = get_src1(insn, cpu_src1);
1986 if (IS_IMM) {
1987 rs2 = GET_FIELD(insn, 25, 31);
1988 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1989 } else {
1990 rs2 = GET_FIELD(insn, 27, 31);
1991 if (rs2 != 0) {
1992 gen_movl_reg_TN(rs2, cpu_src2);
1993 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1994 } else
1995 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1997 cond = GET_FIELD(insn, 3, 6);
1998 if (cond == 0x8) {
1999 save_state(dc, cpu_cond);
2000 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2001 supervisor(dc))
2002 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2003 else
2004 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2005 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2006 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2007 gen_helper_raise_exception(cpu_tmp32);
2008 } else if (cond != 0) {
2009 TCGv r_cond = tcg_temp_new();
2010 int l1;
2011 #ifdef TARGET_SPARC64
2012 /* V9 icc/xcc */
2013 int cc = GET_FIELD_SP(insn, 11, 12);
2015 save_state(dc, cpu_cond);
2016 if (cc == 0)
2017 gen_cond(r_cond, 0, cond, dc);
2018 else if (cc == 2)
2019 gen_cond(r_cond, 1, cond, dc);
2020 else
2021 goto illegal_insn;
2022 #else
2023 save_state(dc, cpu_cond);
2024 gen_cond(r_cond, 0, cond, dc);
2025 #endif
2026 l1 = gen_new_label();
2027 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2029 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2030 supervisor(dc))
2031 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2032 else
2033 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2034 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2035 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2036 gen_helper_raise_exception(cpu_tmp32);
2038 gen_set_label(l1);
2039 tcg_temp_free(r_cond);
2041 gen_op_next_insn();
2042 tcg_gen_exit_tb(0);
2043 dc->is_br = 1;
2044 goto jmp_insn;
2045 } else if (xop == 0x28) {
2046 rs1 = GET_FIELD(insn, 13, 17);
2047 switch(rs1) {
2048 case 0: /* rdy */
2049 #ifndef TARGET_SPARC64
2050 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2051 manual, rdy on the microSPARC
2052 II */
2053 case 0x0f: /* stbar in the SPARCv8 manual,
2054 rdy on the microSPARC II */
2055 case 0x10 ... 0x1f: /* implementation-dependent in the
2056 SPARCv8 manual, rdy on the
2057 microSPARC II */
2058 #endif
2059 gen_movl_TN_reg(rd, cpu_y);
2060 break;
2061 #ifdef TARGET_SPARC64
2062 case 0x2: /* V9 rdccr */
2063 gen_helper_compute_psr();
2064 gen_helper_rdccr(cpu_dst);
2065 gen_movl_TN_reg(rd, cpu_dst);
2066 break;
2067 case 0x3: /* V9 rdasi */
2068 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2069 gen_movl_TN_reg(rd, cpu_dst);
2070 break;
2071 case 0x4: /* V9 rdtick */
2073 TCGv_ptr r_tickptr;
2075 r_tickptr = tcg_temp_new_ptr();
2076 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2077 offsetof(CPUState, tick));
2078 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2079 tcg_temp_free_ptr(r_tickptr);
2080 gen_movl_TN_reg(rd, cpu_dst);
2082 break;
2083 case 0x5: /* V9 rdpc */
2085 TCGv r_const;
2087 r_const = tcg_const_tl(dc->pc);
2088 gen_movl_TN_reg(rd, r_const);
2089 tcg_temp_free(r_const);
2091 break;
2092 case 0x6: /* V9 rdfprs */
2093 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2094 gen_movl_TN_reg(rd, cpu_dst);
2095 break;
2096 case 0xf: /* V9 membar */
2097 break; /* no effect */
2098 case 0x13: /* Graphics Status */
2099 if (gen_trap_ifnofpu(dc, cpu_cond))
2100 goto jmp_insn;
2101 gen_movl_TN_reg(rd, cpu_gsr);
2102 break;
2103 case 0x16: /* Softint */
2104 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2105 gen_movl_TN_reg(rd, cpu_dst);
2106 break;
2107 case 0x17: /* Tick compare */
2108 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2109 break;
2110 case 0x18: /* System tick */
2112 TCGv_ptr r_tickptr;
2114 r_tickptr = tcg_temp_new_ptr();
2115 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2116 offsetof(CPUState, stick));
2117 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2118 tcg_temp_free_ptr(r_tickptr);
2119 gen_movl_TN_reg(rd, cpu_dst);
2121 break;
2122 case 0x19: /* System tick compare */
2123 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2124 break;
2125 case 0x10: /* Performance Control */
2126 case 0x11: /* Performance Instrumentation Counter */
2127 case 0x12: /* Dispatch Control */
2128 case 0x14: /* Softint set, WO */
2129 case 0x15: /* Softint clear, WO */
2130 #endif
2131 default:
2132 goto illegal_insn;
2134 #if !defined(CONFIG_USER_ONLY)
2135 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2136 #ifndef TARGET_SPARC64
2137 if (!supervisor(dc))
2138 goto priv_insn;
2139 gen_helper_compute_psr();
2140 dc->cc_op = CC_OP_FLAGS;
2141 gen_helper_rdpsr(cpu_dst);
2142 #else
2143 CHECK_IU_FEATURE(dc, HYPV);
2144 if (!hypervisor(dc))
2145 goto priv_insn;
2146 rs1 = GET_FIELD(insn, 13, 17);
2147 switch (rs1) {
2148 case 0: // hpstate
2149 // gen_op_rdhpstate();
2150 break;
2151 case 1: // htstate
2152 // gen_op_rdhtstate();
2153 break;
2154 case 3: // hintp
2155 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2156 break;
2157 case 5: // htba
2158 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2159 break;
2160 case 6: // hver
2161 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2162 break;
2163 case 31: // hstick_cmpr
2164 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2165 break;
2166 default:
2167 goto illegal_insn;
2169 #endif
2170 gen_movl_TN_reg(rd, cpu_dst);
2171 break;
2172 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2173 if (!supervisor(dc))
2174 goto priv_insn;
2175 #ifdef TARGET_SPARC64
2176 rs1 = GET_FIELD(insn, 13, 17);
2177 switch (rs1) {
2178 case 0: // tpc
2180 TCGv_ptr r_tsptr;
2182 r_tsptr = tcg_temp_new_ptr();
2183 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2184 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2185 offsetof(trap_state, tpc));
2186 tcg_temp_free_ptr(r_tsptr);
2188 break;
2189 case 1: // tnpc
2191 TCGv_ptr r_tsptr;
2193 r_tsptr = tcg_temp_new_ptr();
2194 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2195 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2196 offsetof(trap_state, tnpc));
2197 tcg_temp_free_ptr(r_tsptr);
2199 break;
2200 case 2: // tstate
2202 TCGv_ptr r_tsptr;
2204 r_tsptr = tcg_temp_new_ptr();
2205 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2206 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2207 offsetof(trap_state, tstate));
2208 tcg_temp_free_ptr(r_tsptr);
2210 break;
2211 case 3: // tt
2213 TCGv_ptr r_tsptr;
2215 r_tsptr = tcg_temp_new_ptr();
2216 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2217 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2218 offsetof(trap_state, tt));
2219 tcg_temp_free_ptr(r_tsptr);
2220 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2222 break;
2223 case 4: // tick
2225 TCGv_ptr r_tickptr;
2227 r_tickptr = tcg_temp_new_ptr();
2228 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2229 offsetof(CPUState, tick));
2230 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2231 gen_movl_TN_reg(rd, cpu_tmp0);
2232 tcg_temp_free_ptr(r_tickptr);
2234 break;
2235 case 5: // tba
2236 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2237 break;
2238 case 6: // pstate
2239 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2240 offsetof(CPUSPARCState, pstate));
2241 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2242 break;
2243 case 7: // tl
2244 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2245 offsetof(CPUSPARCState, tl));
2246 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2247 break;
2248 case 8: // pil
2249 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2250 offsetof(CPUSPARCState, psrpil));
2251 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2252 break;
2253 case 9: // cwp
2254 gen_helper_rdcwp(cpu_tmp0);
2255 break;
2256 case 10: // cansave
2257 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2258 offsetof(CPUSPARCState, cansave));
2259 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2260 break;
2261 case 11: // canrestore
2262 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2263 offsetof(CPUSPARCState, canrestore));
2264 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2265 break;
2266 case 12: // cleanwin
2267 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268 offsetof(CPUSPARCState, cleanwin));
2269 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2270 break;
2271 case 13: // otherwin
2272 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273 offsetof(CPUSPARCState, otherwin));
2274 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2275 break;
2276 case 14: // wstate
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, wstate));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2280 break;
2281 case 16: // UA2005 gl
2282 CHECK_IU_FEATURE(dc, GL);
2283 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2284 offsetof(CPUSPARCState, gl));
2285 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286 break;
2287 case 26: // UA2005 strand status
2288 CHECK_IU_FEATURE(dc, HYPV);
2289 if (!hypervisor(dc))
2290 goto priv_insn;
2291 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2292 break;
2293 case 31: // ver
2294 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2295 break;
2296 case 15: // fq
2297 default:
2298 goto illegal_insn;
2300 #else
2301 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2302 #endif
2303 gen_movl_TN_reg(rd, cpu_tmp0);
2304 break;
2305 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2306 #ifdef TARGET_SPARC64
2307 save_state(dc, cpu_cond);
2308 gen_helper_flushw();
2309 #else
2310 if (!supervisor(dc))
2311 goto priv_insn;
2312 gen_movl_TN_reg(rd, cpu_tbr);
2313 #endif
2314 break;
2315 #endif
2316 } else if (xop == 0x34) { /* FPU Operations */
2317 if (gen_trap_ifnofpu(dc, cpu_cond))
2318 goto jmp_insn;
2319 gen_op_clear_ieee_excp_and_FTT();
2320 rs1 = GET_FIELD(insn, 13, 17);
2321 rs2 = GET_FIELD(insn, 27, 31);
2322 xop = GET_FIELD(insn, 18, 26);
2323 save_state(dc, cpu_cond);
2324 switch (xop) {
2325 case 0x1: /* fmovs */
2326 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2327 break;
2328 case 0x5: /* fnegs */
2329 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2330 break;
2331 case 0x9: /* fabss */
2332 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2333 break;
2334 case 0x29: /* fsqrts */
2335 CHECK_FPU_FEATURE(dc, FSQRT);
2336 gen_clear_float_exceptions();
2337 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2338 gen_helper_check_ieee_exceptions();
2339 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2340 break;
2341 case 0x2a: /* fsqrtd */
2342 CHECK_FPU_FEATURE(dc, FSQRT);
2343 gen_op_load_fpr_DT1(DFPREG(rs2));
2344 gen_clear_float_exceptions();
2345 gen_helper_fsqrtd();
2346 gen_helper_check_ieee_exceptions();
2347 gen_op_store_DT0_fpr(DFPREG(rd));
2348 break;
2349 case 0x2b: /* fsqrtq */
2350 CHECK_FPU_FEATURE(dc, FLOAT128);
2351 gen_op_load_fpr_QT1(QFPREG(rs2));
2352 gen_clear_float_exceptions();
2353 gen_helper_fsqrtq();
2354 gen_helper_check_ieee_exceptions();
2355 gen_op_store_QT0_fpr(QFPREG(rd));
2356 break;
2357 case 0x41: /* fadds */
2358 gen_clear_float_exceptions();
2359 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0x42: /* faddd */
2364 gen_op_load_fpr_DT0(DFPREG(rs1));
2365 gen_op_load_fpr_DT1(DFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 gen_helper_faddd();
2368 gen_helper_check_ieee_exceptions();
2369 gen_op_store_DT0_fpr(DFPREG(rd));
2370 break;
2371 case 0x43: /* faddq */
2372 CHECK_FPU_FEATURE(dc, FLOAT128);
2373 gen_op_load_fpr_QT0(QFPREG(rs1));
2374 gen_op_load_fpr_QT1(QFPREG(rs2));
2375 gen_clear_float_exceptions();
2376 gen_helper_faddq();
2377 gen_helper_check_ieee_exceptions();
2378 gen_op_store_QT0_fpr(QFPREG(rd));
2379 break;
2380 case 0x45: /* fsubs */
2381 gen_clear_float_exceptions();
2382 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2383 gen_helper_check_ieee_exceptions();
2384 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2385 break;
2386 case 0x46: /* fsubd */
2387 gen_op_load_fpr_DT0(DFPREG(rs1));
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_clear_float_exceptions();
2390 gen_helper_fsubd();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2393 break;
2394 case 0x47: /* fsubq */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT0(QFPREG(rs1));
2397 gen_op_load_fpr_QT1(QFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 gen_helper_fsubq();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_QT0_fpr(QFPREG(rd));
2402 break;
2403 case 0x49: /* fmuls */
2404 CHECK_FPU_FEATURE(dc, FMUL);
2405 gen_clear_float_exceptions();
2406 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2407 gen_helper_check_ieee_exceptions();
2408 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2409 break;
2410 case 0x4a: /* fmuld */
2411 CHECK_FPU_FEATURE(dc, FMUL);
2412 gen_op_load_fpr_DT0(DFPREG(rs1));
2413 gen_op_load_fpr_DT1(DFPREG(rs2));
2414 gen_clear_float_exceptions();
2415 gen_helper_fmuld();
2416 gen_helper_check_ieee_exceptions();
2417 gen_op_store_DT0_fpr(DFPREG(rd));
2418 break;
2419 case 0x4b: /* fmulq */
2420 CHECK_FPU_FEATURE(dc, FLOAT128);
2421 CHECK_FPU_FEATURE(dc, FMUL);
2422 gen_op_load_fpr_QT0(QFPREG(rs1));
2423 gen_op_load_fpr_QT1(QFPREG(rs2));
2424 gen_clear_float_exceptions();
2425 gen_helper_fmulq();
2426 gen_helper_check_ieee_exceptions();
2427 gen_op_store_QT0_fpr(QFPREG(rd));
2428 break;
2429 case 0x4d: /* fdivs */
2430 gen_clear_float_exceptions();
2431 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2432 gen_helper_check_ieee_exceptions();
2433 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2434 break;
2435 case 0x4e: /* fdivd */
2436 gen_op_load_fpr_DT0(DFPREG(rs1));
2437 gen_op_load_fpr_DT1(DFPREG(rs2));
2438 gen_clear_float_exceptions();
2439 gen_helper_fdivd();
2440 gen_helper_check_ieee_exceptions();
2441 gen_op_store_DT0_fpr(DFPREG(rd));
2442 break;
2443 case 0x4f: /* fdivq */
2444 CHECK_FPU_FEATURE(dc, FLOAT128);
2445 gen_op_load_fpr_QT0(QFPREG(rs1));
2446 gen_op_load_fpr_QT1(QFPREG(rs2));
2447 gen_clear_float_exceptions();
2448 gen_helper_fdivq();
2449 gen_helper_check_ieee_exceptions();
2450 gen_op_store_QT0_fpr(QFPREG(rd));
2451 break;
2452 case 0x69: /* fsmuld */
2453 CHECK_FPU_FEATURE(dc, FSMULD);
2454 gen_clear_float_exceptions();
2455 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_DT0_fpr(DFPREG(rd));
2458 break;
2459 case 0x6e: /* fdmulq */
2460 CHECK_FPU_FEATURE(dc, FLOAT128);
2461 gen_op_load_fpr_DT0(DFPREG(rs1));
2462 gen_op_load_fpr_DT1(DFPREG(rs2));
2463 gen_clear_float_exceptions();
2464 gen_helper_fdmulq();
2465 gen_helper_check_ieee_exceptions();
2466 gen_op_store_QT0_fpr(QFPREG(rd));
2467 break;
2468 case 0xc4: /* fitos */
2469 gen_clear_float_exceptions();
2470 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2471 gen_helper_check_ieee_exceptions();
2472 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2473 break;
2474 case 0xc6: /* fdtos */
2475 gen_op_load_fpr_DT1(DFPREG(rs2));
2476 gen_clear_float_exceptions();
2477 gen_helper_fdtos(cpu_tmp32);
2478 gen_helper_check_ieee_exceptions();
2479 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2480 break;
2481 case 0xc7: /* fqtos */
2482 CHECK_FPU_FEATURE(dc, FLOAT128);
2483 gen_op_load_fpr_QT1(QFPREG(rs2));
2484 gen_clear_float_exceptions();
2485 gen_helper_fqtos(cpu_tmp32);
2486 gen_helper_check_ieee_exceptions();
2487 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2488 break;
2489 case 0xc8: /* fitod */
2490 gen_helper_fitod(cpu_fpr[rs2]);
2491 gen_op_store_DT0_fpr(DFPREG(rd));
2492 break;
2493 case 0xc9: /* fstod */
2494 gen_helper_fstod(cpu_fpr[rs2]);
2495 gen_op_store_DT0_fpr(DFPREG(rd));
2496 break;
2497 case 0xcb: /* fqtod */
2498 CHECK_FPU_FEATURE(dc, FLOAT128);
2499 gen_op_load_fpr_QT1(QFPREG(rs2));
2500 gen_clear_float_exceptions();
2501 gen_helper_fqtod();
2502 gen_helper_check_ieee_exceptions();
2503 gen_op_store_DT0_fpr(DFPREG(rd));
2504 break;
2505 case 0xcc: /* fitoq */
2506 CHECK_FPU_FEATURE(dc, FLOAT128);
2507 gen_helper_fitoq(cpu_fpr[rs2]);
2508 gen_op_store_QT0_fpr(QFPREG(rd));
2509 break;
2510 case 0xcd: /* fstoq */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_helper_fstoq(cpu_fpr[rs2]);
2513 gen_op_store_QT0_fpr(QFPREG(rd));
2514 break;
2515 case 0xce: /* fdtoq */
2516 CHECK_FPU_FEATURE(dc, FLOAT128);
2517 gen_op_load_fpr_DT1(DFPREG(rs2));
2518 gen_helper_fdtoq();
2519 gen_op_store_QT0_fpr(QFPREG(rd));
2520 break;
2521 case 0xd1: /* fstoi */
2522 gen_clear_float_exceptions();
2523 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2524 gen_helper_check_ieee_exceptions();
2525 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2526 break;
2527 case 0xd2: /* fdtoi */
2528 gen_op_load_fpr_DT1(DFPREG(rs2));
2529 gen_clear_float_exceptions();
2530 gen_helper_fdtoi(cpu_tmp32);
2531 gen_helper_check_ieee_exceptions();
2532 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2533 break;
2534 case 0xd3: /* fqtoi */
2535 CHECK_FPU_FEATURE(dc, FLOAT128);
2536 gen_op_load_fpr_QT1(QFPREG(rs2));
2537 gen_clear_float_exceptions();
2538 gen_helper_fqtoi(cpu_tmp32);
2539 gen_helper_check_ieee_exceptions();
2540 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2541 break;
2542 #ifdef TARGET_SPARC64
2543 case 0x2: /* V9 fmovd */
2544 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2545 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2546 cpu_fpr[DFPREG(rs2) + 1]);
2547 break;
2548 case 0x3: /* V9 fmovq */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2551 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2552 cpu_fpr[QFPREG(rs2) + 1]);
2553 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2554 cpu_fpr[QFPREG(rs2) + 2]);
2555 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2556 cpu_fpr[QFPREG(rs2) + 3]);
2557 break;
2558 case 0x6: /* V9 fnegd */
2559 gen_op_load_fpr_DT1(DFPREG(rs2));
2560 gen_helper_fnegd();
2561 gen_op_store_DT0_fpr(DFPREG(rd));
2562 break;
2563 case 0x7: /* V9 fnegq */
2564 CHECK_FPU_FEATURE(dc, FLOAT128);
2565 gen_op_load_fpr_QT1(QFPREG(rs2));
2566 gen_helper_fnegq();
2567 gen_op_store_QT0_fpr(QFPREG(rd));
2568 break;
2569 case 0xa: /* V9 fabsd */
2570 gen_op_load_fpr_DT1(DFPREG(rs2));
2571 gen_helper_fabsd();
2572 gen_op_store_DT0_fpr(DFPREG(rd));
2573 break;
2574 case 0xb: /* V9 fabsq */
2575 CHECK_FPU_FEATURE(dc, FLOAT128);
2576 gen_op_load_fpr_QT1(QFPREG(rs2));
2577 gen_helper_fabsq();
2578 gen_op_store_QT0_fpr(QFPREG(rd));
2579 break;
2580 case 0x81: /* V9 fstox */
2581 gen_clear_float_exceptions();
2582 gen_helper_fstox(cpu_fpr[rs2]);
2583 gen_helper_check_ieee_exceptions();
2584 gen_op_store_DT0_fpr(DFPREG(rd));
2585 break;
2586 case 0x82: /* V9 fdtox */
2587 gen_op_load_fpr_DT1(DFPREG(rs2));
2588 gen_clear_float_exceptions();
2589 gen_helper_fdtox();
2590 gen_helper_check_ieee_exceptions();
2591 gen_op_store_DT0_fpr(DFPREG(rd));
2592 break;
2593 case 0x83: /* V9 fqtox */
2594 CHECK_FPU_FEATURE(dc, FLOAT128);
2595 gen_op_load_fpr_QT1(QFPREG(rs2));
2596 gen_clear_float_exceptions();
2597 gen_helper_fqtox();
2598 gen_helper_check_ieee_exceptions();
2599 gen_op_store_DT0_fpr(DFPREG(rd));
2600 break;
2601 case 0x84: /* V9 fxtos */
2602 gen_op_load_fpr_DT1(DFPREG(rs2));
2603 gen_clear_float_exceptions();
2604 gen_helper_fxtos(cpu_tmp32);
2605 gen_helper_check_ieee_exceptions();
2606 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2607 break;
2608 case 0x88: /* V9 fxtod */
2609 gen_op_load_fpr_DT1(DFPREG(rs2));
2610 gen_clear_float_exceptions();
2611 gen_helper_fxtod();
2612 gen_helper_check_ieee_exceptions();
2613 gen_op_store_DT0_fpr(DFPREG(rd));
2614 break;
2615 case 0x8c: /* V9 fxtoq */
2616 CHECK_FPU_FEATURE(dc, FLOAT128);
2617 gen_op_load_fpr_DT1(DFPREG(rs2));
2618 gen_clear_float_exceptions();
2619 gen_helper_fxtoq();
2620 gen_helper_check_ieee_exceptions();
2621 gen_op_store_QT0_fpr(QFPREG(rd));
2622 break;
2623 #endif
2624 default:
2625 goto illegal_insn;
2627 } else if (xop == 0x35) { /* FPU Operations */
2628 #ifdef TARGET_SPARC64
2629 int cond;
2630 #endif
2631 if (gen_trap_ifnofpu(dc, cpu_cond))
2632 goto jmp_insn;
2633 gen_op_clear_ieee_excp_and_FTT();
2634 rs1 = GET_FIELD(insn, 13, 17);
2635 rs2 = GET_FIELD(insn, 27, 31);
2636 xop = GET_FIELD(insn, 18, 26);
2637 save_state(dc, cpu_cond);
2638 #ifdef TARGET_SPARC64
2639 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2640 int l1;
2642 l1 = gen_new_label();
2643 cond = GET_FIELD_SP(insn, 14, 17);
2644 cpu_src1 = get_src1(insn, cpu_src1);
2645 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2646 0, l1);
2647 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2648 gen_set_label(l1);
2649 break;
2650 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2651 int l1;
2653 l1 = gen_new_label();
2654 cond = GET_FIELD_SP(insn, 14, 17);
2655 cpu_src1 = get_src1(insn, cpu_src1);
2656 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2657 0, l1);
2658 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2659 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2660 gen_set_label(l1);
2661 break;
2662 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2663 int l1;
2665 CHECK_FPU_FEATURE(dc, FLOAT128);
2666 l1 = gen_new_label();
2667 cond = GET_FIELD_SP(insn, 14, 17);
2668 cpu_src1 = get_src1(insn, cpu_src1);
2669 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2670 0, l1);
2671 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2672 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2673 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2674 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2675 gen_set_label(l1);
2676 break;
2678 #endif
2679 switch (xop) {
2680 #ifdef TARGET_SPARC64
2681 #define FMOVSCC(fcc) \
2683 TCGv r_cond; \
2684 int l1; \
2686 l1 = gen_new_label(); \
2687 r_cond = tcg_temp_new(); \
2688 cond = GET_FIELD_SP(insn, 14, 17); \
2689 gen_fcond(r_cond, fcc, cond); \
2690 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2691 0, l1); \
2692 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2693 gen_set_label(l1); \
2694 tcg_temp_free(r_cond); \
2696 #define FMOVDCC(fcc) \
2698 TCGv r_cond; \
2699 int l1; \
2701 l1 = gen_new_label(); \
2702 r_cond = tcg_temp_new(); \
2703 cond = GET_FIELD_SP(insn, 14, 17); \
2704 gen_fcond(r_cond, fcc, cond); \
2705 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2706 0, l1); \
2707 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2708 cpu_fpr[DFPREG(rs2)]); \
2709 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2710 cpu_fpr[DFPREG(rs2) + 1]); \
2711 gen_set_label(l1); \
2712 tcg_temp_free(r_cond); \
2714 #define FMOVQCC(fcc) \
2716 TCGv r_cond; \
2717 int l1; \
2719 l1 = gen_new_label(); \
2720 r_cond = tcg_temp_new(); \
2721 cond = GET_FIELD_SP(insn, 14, 17); \
2722 gen_fcond(r_cond, fcc, cond); \
2723 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2724 0, l1); \
2725 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2726 cpu_fpr[QFPREG(rs2)]); \
2727 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2728 cpu_fpr[QFPREG(rs2) + 1]); \
2729 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2730 cpu_fpr[QFPREG(rs2) + 2]); \
2731 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2732 cpu_fpr[QFPREG(rs2) + 3]); \
2733 gen_set_label(l1); \
2734 tcg_temp_free(r_cond); \
2736 case 0x001: /* V9 fmovscc %fcc0 */
2737 FMOVSCC(0);
2738 break;
2739 case 0x002: /* V9 fmovdcc %fcc0 */
2740 FMOVDCC(0);
2741 break;
2742 case 0x003: /* V9 fmovqcc %fcc0 */
2743 CHECK_FPU_FEATURE(dc, FLOAT128);
2744 FMOVQCC(0);
2745 break;
2746 case 0x041: /* V9 fmovscc %fcc1 */
2747 FMOVSCC(1);
2748 break;
2749 case 0x042: /* V9 fmovdcc %fcc1 */
2750 FMOVDCC(1);
2751 break;
2752 case 0x043: /* V9 fmovqcc %fcc1 */
2753 CHECK_FPU_FEATURE(dc, FLOAT128);
2754 FMOVQCC(1);
2755 break;
2756 case 0x081: /* V9 fmovscc %fcc2 */
2757 FMOVSCC(2);
2758 break;
2759 case 0x082: /* V9 fmovdcc %fcc2 */
2760 FMOVDCC(2);
2761 break;
2762 case 0x083: /* V9 fmovqcc %fcc2 */
2763 CHECK_FPU_FEATURE(dc, FLOAT128);
2764 FMOVQCC(2);
2765 break;
2766 case 0x0c1: /* V9 fmovscc %fcc3 */
2767 FMOVSCC(3);
2768 break;
2769 case 0x0c2: /* V9 fmovdcc %fcc3 */
2770 FMOVDCC(3);
2771 break;
2772 case 0x0c3: /* V9 fmovqcc %fcc3 */
2773 CHECK_FPU_FEATURE(dc, FLOAT128);
2774 FMOVQCC(3);
2775 break;
2776 #undef FMOVSCC
2777 #undef FMOVDCC
2778 #undef FMOVQCC
2779 #define FMOVSCC(icc) \
2781 TCGv r_cond; \
2782 int l1; \
2784 l1 = gen_new_label(); \
2785 r_cond = tcg_temp_new(); \
2786 cond = GET_FIELD_SP(insn, 14, 17); \
2787 gen_cond(r_cond, icc, cond, dc); \
2788 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2789 0, l1); \
2790 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2791 gen_set_label(l1); \
2792 tcg_temp_free(r_cond); \
2794 #define FMOVDCC(icc) \
2796 TCGv r_cond; \
2797 int l1; \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_cond(r_cond, icc, cond, dc); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2804 0, l1); \
2805 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2806 cpu_fpr[DFPREG(rs2)]); \
2807 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2808 cpu_fpr[DFPREG(rs2) + 1]); \
2809 gen_set_label(l1); \
2810 tcg_temp_free(r_cond); \
2812 #define FMOVQCC(icc) \
2814 TCGv r_cond; \
2815 int l1; \
2817 l1 = gen_new_label(); \
2818 r_cond = tcg_temp_new(); \
2819 cond = GET_FIELD_SP(insn, 14, 17); \
2820 gen_cond(r_cond, icc, cond, dc); \
2821 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2822 0, l1); \
2823 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2824 cpu_fpr[QFPREG(rs2)]); \
2825 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2826 cpu_fpr[QFPREG(rs2) + 1]); \
2827 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2828 cpu_fpr[QFPREG(rs2) + 2]); \
2829 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2830 cpu_fpr[QFPREG(rs2) + 3]); \
2831 gen_set_label(l1); \
2832 tcg_temp_free(r_cond); \
2835 case 0x101: /* V9 fmovscc %icc */
2836 FMOVSCC(0);
2837 break;
2838 case 0x102: /* V9 fmovdcc %icc */
2839 FMOVDCC(0);
2840 case 0x103: /* V9 fmovqcc %icc */
2841 CHECK_FPU_FEATURE(dc, FLOAT128);
2842 FMOVQCC(0);
2843 break;
2844 case 0x181: /* V9 fmovscc %xcc */
2845 FMOVSCC(1);
2846 break;
2847 case 0x182: /* V9 fmovdcc %xcc */
2848 FMOVDCC(1);
2849 break;
2850 case 0x183: /* V9 fmovqcc %xcc */
2851 CHECK_FPU_FEATURE(dc, FLOAT128);
2852 FMOVQCC(1);
2853 break;
2854 #undef FMOVSCC
2855 #undef FMOVDCC
2856 #undef FMOVQCC
2857 #endif
2858 case 0x51: /* fcmps, V9 %fcc */
2859 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2860 break;
2861 case 0x52: /* fcmpd, V9 %fcc */
2862 gen_op_load_fpr_DT0(DFPREG(rs1));
2863 gen_op_load_fpr_DT1(DFPREG(rs2));
2864 gen_op_fcmpd(rd & 3);
2865 break;
2866 case 0x53: /* fcmpq, V9 %fcc */
2867 CHECK_FPU_FEATURE(dc, FLOAT128);
2868 gen_op_load_fpr_QT0(QFPREG(rs1));
2869 gen_op_load_fpr_QT1(QFPREG(rs2));
2870 gen_op_fcmpq(rd & 3);
2871 break;
2872 case 0x55: /* fcmpes, V9 %fcc */
2873 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2874 break;
2875 case 0x56: /* fcmped, V9 %fcc */
2876 gen_op_load_fpr_DT0(DFPREG(rs1));
2877 gen_op_load_fpr_DT1(DFPREG(rs2));
2878 gen_op_fcmped(rd & 3);
2879 break;
2880 case 0x57: /* fcmpeq, V9 %fcc */
2881 CHECK_FPU_FEATURE(dc, FLOAT128);
2882 gen_op_load_fpr_QT0(QFPREG(rs1));
2883 gen_op_load_fpr_QT1(QFPREG(rs2));
2884 gen_op_fcmpeq(rd & 3);
2885 break;
2886 default:
2887 goto illegal_insn;
2889 } else if (xop == 0x2) {
2890 // clr/mov shortcut
2892 rs1 = GET_FIELD(insn, 13, 17);
2893 if (rs1 == 0) {
2894 // or %g0, x, y -> mov T0, x; mov y, T0
2895 if (IS_IMM) { /* immediate */
2896 TCGv r_const;
2898 simm = GET_FIELDs(insn, 19, 31);
2899 r_const = tcg_const_tl(simm);
2900 gen_movl_TN_reg(rd, r_const);
2901 tcg_temp_free(r_const);
2902 } else { /* register */
2903 rs2 = GET_FIELD(insn, 27, 31);
2904 gen_movl_reg_TN(rs2, cpu_dst);
2905 gen_movl_TN_reg(rd, cpu_dst);
2907 } else {
2908 cpu_src1 = get_src1(insn, cpu_src1);
2909 if (IS_IMM) { /* immediate */
2910 simm = GET_FIELDs(insn, 19, 31);
2911 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2912 gen_movl_TN_reg(rd, cpu_dst);
2913 } else { /* register */
2914 // or x, %g0, y -> mov T1, x; mov y, T1
2915 rs2 = GET_FIELD(insn, 27, 31);
2916 if (rs2 != 0) {
2917 gen_movl_reg_TN(rs2, cpu_src2);
2918 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2919 gen_movl_TN_reg(rd, cpu_dst);
2920 } else
2921 gen_movl_TN_reg(rd, cpu_src1);
2924 #ifdef TARGET_SPARC64
2925 } else if (xop == 0x25) { /* sll, V9 sllx */
2926 cpu_src1 = get_src1(insn, cpu_src1);
2927 if (IS_IMM) { /* immediate */
2928 simm = GET_FIELDs(insn, 20, 31);
2929 if (insn & (1 << 12)) {
2930 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2931 } else {
2932 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2934 } else { /* register */
2935 rs2 = GET_FIELD(insn, 27, 31);
2936 gen_movl_reg_TN(rs2, cpu_src2);
2937 if (insn & (1 << 12)) {
2938 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2939 } else {
2940 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2942 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2944 gen_movl_TN_reg(rd, cpu_dst);
2945 } else if (xop == 0x26) { /* srl, V9 srlx */
2946 cpu_src1 = get_src1(insn, cpu_src1);
2947 if (IS_IMM) { /* immediate */
2948 simm = GET_FIELDs(insn, 20, 31);
2949 if (insn & (1 << 12)) {
2950 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2951 } else {
2952 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2953 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2955 } else { /* register */
2956 rs2 = GET_FIELD(insn, 27, 31);
2957 gen_movl_reg_TN(rs2, cpu_src2);
2958 if (insn & (1 << 12)) {
2959 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2960 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2961 } else {
2962 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2963 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2964 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2967 gen_movl_TN_reg(rd, cpu_dst);
2968 } else if (xop == 0x27) { /* sra, V9 srax */
2969 cpu_src1 = get_src1(insn, cpu_src1);
2970 if (IS_IMM) { /* immediate */
2971 simm = GET_FIELDs(insn, 20, 31);
2972 if (insn & (1 << 12)) {
2973 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2974 } else {
2975 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2976 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2977 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2979 } else { /* register */
2980 rs2 = GET_FIELD(insn, 27, 31);
2981 gen_movl_reg_TN(rs2, cpu_src2);
2982 if (insn & (1 << 12)) {
2983 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2984 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2985 } else {
2986 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2987 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2988 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2989 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2992 gen_movl_TN_reg(rd, cpu_dst);
2993 #endif
2994 } else if (xop < 0x36) {
2995 if (xop < 0x20) {
2996 cpu_src1 = get_src1(insn, cpu_src1);
2997 cpu_src2 = get_src2(insn, cpu_src2);
2998 switch (xop & ~0x10) {
2999 case 0x0: /* add */
3000 if (IS_IMM) {
3001 simm = GET_FIELDs(insn, 19, 31);
3002 if (xop & 0x10) {
3003 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3004 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3005 dc->cc_op = CC_OP_ADD;
3006 } else {
3007 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3009 } else {
3010 if (xop & 0x10) {
3011 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3012 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3013 dc->cc_op = CC_OP_ADD;
3014 } else {
3015 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3018 break;
3019 case 0x1: /* and */
3020 if (IS_IMM) {
3021 simm = GET_FIELDs(insn, 19, 31);
3022 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3023 } else {
3024 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3026 if (xop & 0x10) {
3027 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3028 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3029 dc->cc_op = CC_OP_LOGIC;
3031 break;
3032 case 0x2: /* or */
3033 if (IS_IMM) {
3034 simm = GET_FIELDs(insn, 19, 31);
3035 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3036 } else {
3037 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3039 if (xop & 0x10) {
3040 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3041 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3042 dc->cc_op = CC_OP_LOGIC;
3044 break;
3045 case 0x3: /* xor */
3046 if (IS_IMM) {
3047 simm = GET_FIELDs(insn, 19, 31);
3048 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3049 } else {
3050 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3052 if (xop & 0x10) {
3053 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3054 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3055 dc->cc_op = CC_OP_LOGIC;
3057 break;
3058 case 0x4: /* sub */
3059 if (IS_IMM) {
3060 simm = GET_FIELDs(insn, 19, 31);
3061 if (xop & 0x10) {
3062 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3063 } else {
3064 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3066 } else {
3067 if (xop & 0x10) {
3068 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3069 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3070 dc->cc_op = CC_OP_SUB;
3071 } else {
3072 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3075 break;
3076 case 0x5: /* andn */
3077 if (IS_IMM) {
3078 simm = GET_FIELDs(insn, 19, 31);
3079 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3080 } else {
3081 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3083 if (xop & 0x10) {
3084 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3085 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3086 dc->cc_op = CC_OP_LOGIC;
3088 break;
3089 case 0x6: /* orn */
3090 if (IS_IMM) {
3091 simm = GET_FIELDs(insn, 19, 31);
3092 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3093 } else {
3094 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3096 if (xop & 0x10) {
3097 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3098 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3099 dc->cc_op = CC_OP_LOGIC;
3101 break;
3102 case 0x7: /* xorn */
3103 if (IS_IMM) {
3104 simm = GET_FIELDs(insn, 19, 31);
3105 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3106 } else {
3107 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3108 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3110 if (xop & 0x10) {
3111 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3113 dc->cc_op = CC_OP_LOGIC;
3115 break;
3116 case 0x8: /* addx, V9 addc */
3117 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3118 (xop & 0x10));
3119 break;
3120 #ifdef TARGET_SPARC64
3121 case 0x9: /* V9 mulx */
3122 if (IS_IMM) {
3123 simm = GET_FIELDs(insn, 19, 31);
3124 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3125 } else {
3126 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3128 break;
3129 #endif
3130 case 0xa: /* umul */
3131 CHECK_IU_FEATURE(dc, MUL);
3132 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3133 if (xop & 0x10) {
3134 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3135 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3136 dc->cc_op = CC_OP_LOGIC;
3138 break;
3139 case 0xb: /* smul */
3140 CHECK_IU_FEATURE(dc, MUL);
3141 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3142 if (xop & 0x10) {
3143 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3144 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3145 dc->cc_op = CC_OP_LOGIC;
3147 break;
3148 case 0xc: /* subx, V9 subc */
3149 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3150 (xop & 0x10));
3151 break;
3152 #ifdef TARGET_SPARC64
3153 case 0xd: /* V9 udivx */
3154 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3155 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3156 gen_trap_ifdivzero_tl(cpu_cc_src2);
3157 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3158 break;
3159 #endif
3160 case 0xe: /* udiv */
3161 CHECK_IU_FEATURE(dc, DIV);
3162 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3163 if (xop & 0x10) {
3164 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3165 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3166 dc->cc_op = CC_OP_DIV;
3168 break;
3169 case 0xf: /* sdiv */
3170 CHECK_IU_FEATURE(dc, DIV);
3171 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3172 if (xop & 0x10) {
3173 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3174 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3175 dc->cc_op = CC_OP_DIV;
3177 break;
3178 default:
3179 goto illegal_insn;
3181 gen_movl_TN_reg(rd, cpu_dst);
3182 } else {
3183 cpu_src1 = get_src1(insn, cpu_src1);
3184 cpu_src2 = get_src2(insn, cpu_src2);
3185 switch (xop) {
3186 case 0x20: /* taddcc */
3187 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3188 gen_movl_TN_reg(rd, cpu_dst);
3189 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3190 dc->cc_op = CC_OP_TADD;
3191 break;
3192 case 0x21: /* tsubcc */
3193 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3194 gen_movl_TN_reg(rd, cpu_dst);
3195 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3196 dc->cc_op = CC_OP_TSUB;
3197 break;
3198 case 0x22: /* taddcctv */
3199 save_state(dc, cpu_cond);
3200 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3201 gen_movl_TN_reg(rd, cpu_dst);
3202 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3203 dc->cc_op = CC_OP_TADDTV;
3204 break;
3205 case 0x23: /* tsubcctv */
3206 save_state(dc, cpu_cond);
3207 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3208 gen_movl_TN_reg(rd, cpu_dst);
3209 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3210 dc->cc_op = CC_OP_TSUBTV;
3211 break;
3212 case 0x24: /* mulscc */
3213 gen_helper_compute_psr();
3214 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3215 gen_movl_TN_reg(rd, cpu_dst);
3216 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3217 dc->cc_op = CC_OP_ADD;
3218 break;
3219 #ifndef TARGET_SPARC64
3220 case 0x25: /* sll */
3221 if (IS_IMM) { /* immediate */
3222 simm = GET_FIELDs(insn, 20, 31);
3223 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3224 } else { /* register */
3225 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3226 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3228 gen_movl_TN_reg(rd, cpu_dst);
3229 break;
3230 case 0x26: /* srl */
3231 if (IS_IMM) { /* immediate */
3232 simm = GET_FIELDs(insn, 20, 31);
3233 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3234 } else { /* register */
3235 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3236 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3238 gen_movl_TN_reg(rd, cpu_dst);
3239 break;
3240 case 0x27: /* sra */
3241 if (IS_IMM) { /* immediate */
3242 simm = GET_FIELDs(insn, 20, 31);
3243 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3244 } else { /* register */
3245 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3246 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3248 gen_movl_TN_reg(rd, cpu_dst);
3249 break;
3250 #endif
3251 case 0x30:
3253 switch(rd) {
3254 case 0: /* wry */
3255 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3256 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3257 break;
3258 #ifndef TARGET_SPARC64
3259 case 0x01 ... 0x0f: /* undefined in the
3260 SPARCv8 manual, nop
3261 on the microSPARC
3262 II */
3263 case 0x10 ... 0x1f: /* implementation-dependent
3264 in the SPARCv8
3265 manual, nop on the
3266 microSPARC II */
3267 break;
3268 #else
3269 case 0x2: /* V9 wrccr */
3270 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3271 gen_helper_wrccr(cpu_dst);
3272 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3273 dc->cc_op = CC_OP_FLAGS;
3274 break;
3275 case 0x3: /* V9 wrasi */
3276 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3277 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3278 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3279 break;
3280 case 0x6: /* V9 wrfprs */
3281 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3282 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3283 save_state(dc, cpu_cond);
3284 gen_op_next_insn();
3285 tcg_gen_exit_tb(0);
3286 dc->is_br = 1;
3287 break;
3288 case 0xf: /* V9 sir, nop if user */
3289 #if !defined(CONFIG_USER_ONLY)
3290 if (supervisor(dc)) {
3291 ; // XXX
3293 #endif
3294 break;
3295 case 0x13: /* Graphics Status */
3296 if (gen_trap_ifnofpu(dc, cpu_cond))
3297 goto jmp_insn;
3298 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3299 break;
3300 case 0x14: /* Softint set */
3301 if (!supervisor(dc))
3302 goto illegal_insn;
3303 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3304 gen_helper_set_softint(cpu_tmp64);
3305 break;
3306 case 0x15: /* Softint clear */
3307 if (!supervisor(dc))
3308 goto illegal_insn;
3309 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3310 gen_helper_clear_softint(cpu_tmp64);
3311 break;
3312 case 0x16: /* Softint write */
3313 if (!supervisor(dc))
3314 goto illegal_insn;
3315 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3316 gen_helper_write_softint(cpu_tmp64);
3317 break;
3318 case 0x17: /* Tick compare */
3319 #if !defined(CONFIG_USER_ONLY)
3320 if (!supervisor(dc))
3321 goto illegal_insn;
3322 #endif
3324 TCGv_ptr r_tickptr;
3326 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3327 cpu_src2);
3328 r_tickptr = tcg_temp_new_ptr();
3329 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3330 offsetof(CPUState, tick));
3331 gen_helper_tick_set_limit(r_tickptr,
3332 cpu_tick_cmpr);
3333 tcg_temp_free_ptr(r_tickptr);
3335 break;
3336 case 0x18: /* System tick */
3337 #if !defined(CONFIG_USER_ONLY)
3338 if (!supervisor(dc))
3339 goto illegal_insn;
3340 #endif
3342 TCGv_ptr r_tickptr;
3344 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3345 cpu_src2);
3346 r_tickptr = tcg_temp_new_ptr();
3347 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3348 offsetof(CPUState, stick));
3349 gen_helper_tick_set_count(r_tickptr,
3350 cpu_dst);
3351 tcg_temp_free_ptr(r_tickptr);
3353 break;
3354 case 0x19: /* System tick compare */
3355 #if !defined(CONFIG_USER_ONLY)
3356 if (!supervisor(dc))
3357 goto illegal_insn;
3358 #endif
3360 TCGv_ptr r_tickptr;
3362 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3363 cpu_src2);
3364 r_tickptr = tcg_temp_new_ptr();
3365 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3366 offsetof(CPUState, stick));
3367 gen_helper_tick_set_limit(r_tickptr,
3368 cpu_stick_cmpr);
3369 tcg_temp_free_ptr(r_tickptr);
3371 break;
3373 case 0x10: /* Performance Control */
3374 case 0x11: /* Performance Instrumentation
3375 Counter */
3376 case 0x12: /* Dispatch Control */
3377 #endif
3378 default:
3379 goto illegal_insn;
3382 break;
3383 #if !defined(CONFIG_USER_ONLY)
3384 case 0x31: /* wrpsr, V9 saved, restored */
3386 if (!supervisor(dc))
3387 goto priv_insn;
3388 #ifdef TARGET_SPARC64
3389 switch (rd) {
3390 case 0:
3391 gen_helper_saved();
3392 break;
3393 case 1:
3394 gen_helper_restored();
3395 break;
3396 case 2: /* UA2005 allclean */
3397 case 3: /* UA2005 otherw */
3398 case 4: /* UA2005 normalw */
3399 case 5: /* UA2005 invalw */
3400 // XXX
3401 default:
3402 goto illegal_insn;
3404 #else
3405 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3406 gen_helper_wrpsr(cpu_dst);
3407 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3408 dc->cc_op = CC_OP_FLAGS;
3409 save_state(dc, cpu_cond);
3410 gen_op_next_insn();
3411 tcg_gen_exit_tb(0);
3412 dc->is_br = 1;
3413 #endif
3415 break;
3416 case 0x32: /* wrwim, V9 wrpr */
3418 if (!supervisor(dc))
3419 goto priv_insn;
3420 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3421 #ifdef TARGET_SPARC64
3422 switch (rd) {
3423 case 0: // tpc
3425 TCGv_ptr r_tsptr;
3427 r_tsptr = tcg_temp_new_ptr();
3428 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3429 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3430 offsetof(trap_state, tpc));
3431 tcg_temp_free_ptr(r_tsptr);
3433 break;
3434 case 1: // tnpc
3436 TCGv_ptr r_tsptr;
3438 r_tsptr = tcg_temp_new_ptr();
3439 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3440 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3441 offsetof(trap_state, tnpc));
3442 tcg_temp_free_ptr(r_tsptr);
3444 break;
3445 case 2: // tstate
3447 TCGv_ptr r_tsptr;
3449 r_tsptr = tcg_temp_new_ptr();
3450 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3451 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3452 offsetof(trap_state,
3453 tstate));
3454 tcg_temp_free_ptr(r_tsptr);
3456 break;
3457 case 3: // tt
3459 TCGv_ptr r_tsptr;
3461 r_tsptr = tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3463 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3464 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3465 offsetof(trap_state, tt));
3466 tcg_temp_free_ptr(r_tsptr);
3468 break;
3469 case 4: // tick
3471 TCGv_ptr r_tickptr;
3473 r_tickptr = tcg_temp_new_ptr();
3474 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3475 offsetof(CPUState, tick));
3476 gen_helper_tick_set_count(r_tickptr,
3477 cpu_tmp0);
3478 tcg_temp_free_ptr(r_tickptr);
3480 break;
3481 case 5: // tba
3482 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3483 break;
3484 case 6: // pstate
3485 save_state(dc, cpu_cond);
3486 gen_helper_wrpstate(cpu_tmp0);
3487 dc->npc = DYNAMIC_PC;
3488 break;
3489 case 7: // tl
3490 save_state(dc, cpu_cond);
3491 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3492 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3493 offsetof(CPUSPARCState, tl));
3494 dc->npc = DYNAMIC_PC;
3495 break;
3496 case 8: // pil
3497 gen_helper_wrpil(cpu_tmp0);
3498 break;
3499 case 9: // cwp
3500 gen_helper_wrcwp(cpu_tmp0);
3501 break;
3502 case 10: // cansave
3503 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3504 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3505 offsetof(CPUSPARCState,
3506 cansave));
3507 break;
3508 case 11: // canrestore
3509 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3510 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3511 offsetof(CPUSPARCState,
3512 canrestore));
3513 break;
3514 case 12: // cleanwin
3515 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3516 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3517 offsetof(CPUSPARCState,
3518 cleanwin));
3519 break;
3520 case 13: // otherwin
3521 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3522 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3523 offsetof(CPUSPARCState,
3524 otherwin));
3525 break;
3526 case 14: // wstate
3527 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3528 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3529 offsetof(CPUSPARCState,
3530 wstate));
3531 break;
3532 case 16: // UA2005 gl
3533 CHECK_IU_FEATURE(dc, GL);
3534 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3535 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3536 offsetof(CPUSPARCState, gl));
3537 break;
3538 case 26: // UA2005 strand status
3539 CHECK_IU_FEATURE(dc, HYPV);
3540 if (!hypervisor(dc))
3541 goto priv_insn;
3542 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3543 break;
3544 default:
3545 goto illegal_insn;
3547 #else
3548 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3549 if (dc->def->nwindows != 32)
3550 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3551 (1 << dc->def->nwindows) - 1);
3552 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3553 #endif
3555 break;
3556 case 0x33: /* wrtbr, UA2005 wrhpr */
3558 #ifndef TARGET_SPARC64
3559 if (!supervisor(dc))
3560 goto priv_insn;
3561 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3562 #else
3563 CHECK_IU_FEATURE(dc, HYPV);
3564 if (!hypervisor(dc))
3565 goto priv_insn;
3566 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3567 switch (rd) {
3568 case 0: // hpstate
3569 // XXX gen_op_wrhpstate();
3570 save_state(dc, cpu_cond);
3571 gen_op_next_insn();
3572 tcg_gen_exit_tb(0);
3573 dc->is_br = 1;
3574 break;
3575 case 1: // htstate
3576 // XXX gen_op_wrhtstate();
3577 break;
3578 case 3: // hintp
3579 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3580 break;
3581 case 5: // htba
3582 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3583 break;
3584 case 31: // hstick_cmpr
3586 TCGv_ptr r_tickptr;
3588 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3589 r_tickptr = tcg_temp_new_ptr();
3590 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3591 offsetof(CPUState, hstick));
3592 gen_helper_tick_set_limit(r_tickptr,
3593 cpu_hstick_cmpr);
3594 tcg_temp_free_ptr(r_tickptr);
3596 break;
3597 case 6: // hver readonly
3598 default:
3599 goto illegal_insn;
3601 #endif
3603 break;
3604 #endif
3605 #ifdef TARGET_SPARC64
3606 case 0x2c: /* V9 movcc */
3608 int cc = GET_FIELD_SP(insn, 11, 12);
3609 int cond = GET_FIELD_SP(insn, 14, 17);
3610 TCGv r_cond;
3611 int l1;
3613 r_cond = tcg_temp_new();
3614 if (insn & (1 << 18)) {
3615 if (cc == 0)
3616 gen_cond(r_cond, 0, cond, dc);
3617 else if (cc == 2)
3618 gen_cond(r_cond, 1, cond, dc);
3619 else
3620 goto illegal_insn;
3621 } else {
3622 gen_fcond(r_cond, cc, cond);
3625 l1 = gen_new_label();
3627 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3628 if (IS_IMM) { /* immediate */
3629 TCGv r_const;
3631 simm = GET_FIELD_SPs(insn, 0, 10);
3632 r_const = tcg_const_tl(simm);
3633 gen_movl_TN_reg(rd, r_const);
3634 tcg_temp_free(r_const);
3635 } else {
3636 rs2 = GET_FIELD_SP(insn, 0, 4);
3637 gen_movl_reg_TN(rs2, cpu_tmp0);
3638 gen_movl_TN_reg(rd, cpu_tmp0);
3640 gen_set_label(l1);
3641 tcg_temp_free(r_cond);
3642 break;
3644 case 0x2d: /* V9 sdivx */
3645 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3646 gen_movl_TN_reg(rd, cpu_dst);
3647 break;
3648 case 0x2e: /* V9 popc */
3650 cpu_src2 = get_src2(insn, cpu_src2);
3651 gen_helper_popc(cpu_dst, cpu_src2);
3652 gen_movl_TN_reg(rd, cpu_dst);
3654 case 0x2f: /* V9 movr */
3656 int cond = GET_FIELD_SP(insn, 10, 12);
3657 int l1;
3659 cpu_src1 = get_src1(insn, cpu_src1);
3661 l1 = gen_new_label();
3663 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3664 cpu_src1, 0, l1);
3665 if (IS_IMM) { /* immediate */
3666 TCGv r_const;
3668 simm = GET_FIELD_SPs(insn, 0, 9);
3669 r_const = tcg_const_tl(simm);
3670 gen_movl_TN_reg(rd, r_const);
3671 tcg_temp_free(r_const);
3672 } else {
3673 rs2 = GET_FIELD_SP(insn, 0, 4);
3674 gen_movl_reg_TN(rs2, cpu_tmp0);
3675 gen_movl_TN_reg(rd, cpu_tmp0);
3677 gen_set_label(l1);
3678 break;
3680 #endif
3681 default:
3682 goto illegal_insn;
3685 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3686 #ifdef TARGET_SPARC64
3687 int opf = GET_FIELD_SP(insn, 5, 13);
3688 rs1 = GET_FIELD(insn, 13, 17);
3689 rs2 = GET_FIELD(insn, 27, 31);
3690 if (gen_trap_ifnofpu(dc, cpu_cond))
3691 goto jmp_insn;
3693 switch (opf) {
3694 case 0x000: /* VIS I edge8cc */
3695 case 0x001: /* VIS II edge8n */
3696 case 0x002: /* VIS I edge8lcc */
3697 case 0x003: /* VIS II edge8ln */
3698 case 0x004: /* VIS I edge16cc */
3699 case 0x005: /* VIS II edge16n */
3700 case 0x006: /* VIS I edge16lcc */
3701 case 0x007: /* VIS II edge16ln */
3702 case 0x008: /* VIS I edge32cc */
3703 case 0x009: /* VIS II edge32n */
3704 case 0x00a: /* VIS I edge32lcc */
3705 case 0x00b: /* VIS II edge32ln */
3706 // XXX
3707 goto illegal_insn;
3708 case 0x010: /* VIS I array8 */
3709 CHECK_FPU_FEATURE(dc, VIS1);
3710 cpu_src1 = get_src1(insn, cpu_src1);
3711 gen_movl_reg_TN(rs2, cpu_src2);
3712 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3713 gen_movl_TN_reg(rd, cpu_dst);
3714 break;
3715 case 0x012: /* VIS I array16 */
3716 CHECK_FPU_FEATURE(dc, VIS1);
3717 cpu_src1 = get_src1(insn, cpu_src1);
3718 gen_movl_reg_TN(rs2, cpu_src2);
3719 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3720 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3721 gen_movl_TN_reg(rd, cpu_dst);
3722 break;
3723 case 0x014: /* VIS I array32 */
3724 CHECK_FPU_FEATURE(dc, VIS1);
3725 cpu_src1 = get_src1(insn, cpu_src1);
3726 gen_movl_reg_TN(rs2, cpu_src2);
3727 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3728 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3729 gen_movl_TN_reg(rd, cpu_dst);
3730 break;
3731 case 0x018: /* VIS I alignaddr */
3732 CHECK_FPU_FEATURE(dc, VIS1);
3733 cpu_src1 = get_src1(insn, cpu_src1);
3734 gen_movl_reg_TN(rs2, cpu_src2);
3735 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3736 gen_movl_TN_reg(rd, cpu_dst);
3737 break;
3738 case 0x019: /* VIS II bmask */
3739 case 0x01a: /* VIS I alignaddrl */
3740 // XXX
3741 goto illegal_insn;
3742 case 0x020: /* VIS I fcmple16 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 gen_helper_fcmple16();
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x022: /* VIS I fcmpne16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 gen_helper_fcmpne16();
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x024: /* VIS I fcmple32 */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_op_load_fpr_DT0(DFPREG(rs1));
3759 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 gen_helper_fcmple32();
3761 gen_op_store_DT0_fpr(DFPREG(rd));
3762 break;
3763 case 0x026: /* VIS I fcmpne32 */
3764 CHECK_FPU_FEATURE(dc, VIS1);
3765 gen_op_load_fpr_DT0(DFPREG(rs1));
3766 gen_op_load_fpr_DT1(DFPREG(rs2));
3767 gen_helper_fcmpne32();
3768 gen_op_store_DT0_fpr(DFPREG(rd));
3769 break;
3770 case 0x028: /* VIS I fcmpgt16 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 gen_op_load_fpr_DT0(DFPREG(rs1));
3773 gen_op_load_fpr_DT1(DFPREG(rs2));
3774 gen_helper_fcmpgt16();
3775 gen_op_store_DT0_fpr(DFPREG(rd));
3776 break;
3777 case 0x02a: /* VIS I fcmpeq16 */
3778 CHECK_FPU_FEATURE(dc, VIS1);
3779 gen_op_load_fpr_DT0(DFPREG(rs1));
3780 gen_op_load_fpr_DT1(DFPREG(rs2));
3781 gen_helper_fcmpeq16();
3782 gen_op_store_DT0_fpr(DFPREG(rd));
3783 break;
3784 case 0x02c: /* VIS I fcmpgt32 */
3785 CHECK_FPU_FEATURE(dc, VIS1);
3786 gen_op_load_fpr_DT0(DFPREG(rs1));
3787 gen_op_load_fpr_DT1(DFPREG(rs2));
3788 gen_helper_fcmpgt32();
3789 gen_op_store_DT0_fpr(DFPREG(rd));
3790 break;
3791 case 0x02e: /* VIS I fcmpeq32 */
3792 CHECK_FPU_FEATURE(dc, VIS1);
3793 gen_op_load_fpr_DT0(DFPREG(rs1));
3794 gen_op_load_fpr_DT1(DFPREG(rs2));
3795 gen_helper_fcmpeq32();
3796 gen_op_store_DT0_fpr(DFPREG(rd));
3797 break;
3798 case 0x031: /* VIS I fmul8x16 */
3799 CHECK_FPU_FEATURE(dc, VIS1);
3800 gen_op_load_fpr_DT0(DFPREG(rs1));
3801 gen_op_load_fpr_DT1(DFPREG(rs2));
3802 gen_helper_fmul8x16();
3803 gen_op_store_DT0_fpr(DFPREG(rd));
3804 break;
3805 case 0x033: /* VIS I fmul8x16au */
3806 CHECK_FPU_FEATURE(dc, VIS1);
3807 gen_op_load_fpr_DT0(DFPREG(rs1));
3808 gen_op_load_fpr_DT1(DFPREG(rs2));
3809 gen_helper_fmul8x16au();
3810 gen_op_store_DT0_fpr(DFPREG(rd));
3811 break;
3812 case 0x035: /* VIS I fmul8x16al */
3813 CHECK_FPU_FEATURE(dc, VIS1);
3814 gen_op_load_fpr_DT0(DFPREG(rs1));
3815 gen_op_load_fpr_DT1(DFPREG(rs2));
3816 gen_helper_fmul8x16al();
3817 gen_op_store_DT0_fpr(DFPREG(rd));
3818 break;
3819 case 0x036: /* VIS I fmul8sux16 */
3820 CHECK_FPU_FEATURE(dc, VIS1);
3821 gen_op_load_fpr_DT0(DFPREG(rs1));
3822 gen_op_load_fpr_DT1(DFPREG(rs2));
3823 gen_helper_fmul8sux16();
3824 gen_op_store_DT0_fpr(DFPREG(rd));
3825 break;
3826 case 0x037: /* VIS I fmul8ulx16 */
3827 CHECK_FPU_FEATURE(dc, VIS1);
3828 gen_op_load_fpr_DT0(DFPREG(rs1));
3829 gen_op_load_fpr_DT1(DFPREG(rs2));
3830 gen_helper_fmul8ulx16();
3831 gen_op_store_DT0_fpr(DFPREG(rd));
3832 break;
3833 case 0x038: /* VIS I fmuld8sux16 */
3834 CHECK_FPU_FEATURE(dc, VIS1);
3835 gen_op_load_fpr_DT0(DFPREG(rs1));
3836 gen_op_load_fpr_DT1(DFPREG(rs2));
3837 gen_helper_fmuld8sux16();
3838 gen_op_store_DT0_fpr(DFPREG(rd));
3839 break;
3840 case 0x039: /* VIS I fmuld8ulx16 */
3841 CHECK_FPU_FEATURE(dc, VIS1);
3842 gen_op_load_fpr_DT0(DFPREG(rs1));
3843 gen_op_load_fpr_DT1(DFPREG(rs2));
3844 gen_helper_fmuld8ulx16();
3845 gen_op_store_DT0_fpr(DFPREG(rd));
3846 break;
3847 case 0x03a: /* VIS I fpack32 */
3848 case 0x03b: /* VIS I fpack16 */
3849 case 0x03d: /* VIS I fpackfix */
3850 case 0x03e: /* VIS I pdist */
3851 // XXX
3852 goto illegal_insn;
3853 case 0x048: /* VIS I faligndata */
3854 CHECK_FPU_FEATURE(dc, VIS1);
3855 gen_op_load_fpr_DT0(DFPREG(rs1));
3856 gen_op_load_fpr_DT1(DFPREG(rs2));
3857 gen_helper_faligndata();
3858 gen_op_store_DT0_fpr(DFPREG(rd));
3859 break;
3860 case 0x04b: /* VIS I fpmerge */
3861 CHECK_FPU_FEATURE(dc, VIS1);
3862 gen_op_load_fpr_DT0(DFPREG(rs1));
3863 gen_op_load_fpr_DT1(DFPREG(rs2));
3864 gen_helper_fpmerge();
3865 gen_op_store_DT0_fpr(DFPREG(rd));
3866 break;
3867 case 0x04c: /* VIS II bshuffle */
3868 // XXX
3869 goto illegal_insn;
3870 case 0x04d: /* VIS I fexpand */
3871 CHECK_FPU_FEATURE(dc, VIS1);
3872 gen_op_load_fpr_DT0(DFPREG(rs1));
3873 gen_op_load_fpr_DT1(DFPREG(rs2));
3874 gen_helper_fexpand();
3875 gen_op_store_DT0_fpr(DFPREG(rd));
3876 break;
3877 case 0x050: /* VIS I fpadd16 */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 gen_op_load_fpr_DT0(DFPREG(rs1));
3880 gen_op_load_fpr_DT1(DFPREG(rs2));
3881 gen_helper_fpadd16();
3882 gen_op_store_DT0_fpr(DFPREG(rd));
3883 break;
3884 case 0x051: /* VIS I fpadd16s */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 gen_helper_fpadd16s(cpu_fpr[rd],
3887 cpu_fpr[rs1], cpu_fpr[rs2]);
3888 break;
3889 case 0x052: /* VIS I fpadd32 */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 gen_op_load_fpr_DT0(DFPREG(rs1));
3892 gen_op_load_fpr_DT1(DFPREG(rs2));
3893 gen_helper_fpadd32();
3894 gen_op_store_DT0_fpr(DFPREG(rd));
3895 break;
3896 case 0x053: /* VIS I fpadd32s */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_helper_fpadd32s(cpu_fpr[rd],
3899 cpu_fpr[rs1], cpu_fpr[rs2]);
3900 break;
3901 case 0x054: /* VIS I fpsub16 */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 gen_op_load_fpr_DT0(DFPREG(rs1));
3904 gen_op_load_fpr_DT1(DFPREG(rs2));
3905 gen_helper_fpsub16();
3906 gen_op_store_DT0_fpr(DFPREG(rd));
3907 break;
3908 case 0x055: /* VIS I fpsub16s */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 gen_helper_fpsub16s(cpu_fpr[rd],
3911 cpu_fpr[rs1], cpu_fpr[rs2]);
3912 break;
3913 case 0x056: /* VIS I fpsub32 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 gen_op_load_fpr_DT0(DFPREG(rs1));
3916 gen_op_load_fpr_DT1(DFPREG(rs2));
3917 gen_helper_fpsub32();
3918 gen_op_store_DT0_fpr(DFPREG(rd));
3919 break;
3920 case 0x057: /* VIS I fpsub32s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 gen_helper_fpsub32s(cpu_fpr[rd],
3923 cpu_fpr[rs1], cpu_fpr[rs2]);
3924 break;
3925 case 0x060: /* VIS I fzero */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3928 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3929 break;
3930 case 0x061: /* VIS I fzeros */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3933 break;
3934 case 0x062: /* VIS I fnor */
3935 CHECK_FPU_FEATURE(dc, VIS1);
3936 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3937 cpu_fpr[DFPREG(rs2)]);
3938 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3939 cpu_fpr[DFPREG(rs2) + 1]);
3940 break;
3941 case 0x063: /* VIS I fnors */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3944 break;
3945 case 0x064: /* VIS I fandnot2 */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3948 cpu_fpr[DFPREG(rs2)]);
3949 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3950 cpu_fpr[DFPREG(rs1) + 1],
3951 cpu_fpr[DFPREG(rs2) + 1]);
3952 break;
3953 case 0x065: /* VIS I fandnot2s */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3956 break;
3957 case 0x066: /* VIS I fnot2 */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3960 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3961 cpu_fpr[DFPREG(rs2) + 1]);
3962 break;
3963 case 0x067: /* VIS I fnot2s */
3964 CHECK_FPU_FEATURE(dc, VIS1);
3965 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3966 break;
3967 case 0x068: /* VIS I fandnot1 */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3970 cpu_fpr[DFPREG(rs1)]);
3971 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3972 cpu_fpr[DFPREG(rs2) + 1],
3973 cpu_fpr[DFPREG(rs1) + 1]);
3974 break;
3975 case 0x069: /* VIS I fandnot1s */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3978 break;
3979 case 0x06a: /* VIS I fnot1 */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3982 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3983 cpu_fpr[DFPREG(rs1) + 1]);
3984 break;
3985 case 0x06b: /* VIS I fnot1s */
3986 CHECK_FPU_FEATURE(dc, VIS1);
3987 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3988 break;
3989 case 0x06c: /* VIS I fxor */
3990 CHECK_FPU_FEATURE(dc, VIS1);
3991 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3992 cpu_fpr[DFPREG(rs2)]);
3993 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3994 cpu_fpr[DFPREG(rs1) + 1],
3995 cpu_fpr[DFPREG(rs2) + 1]);
3996 break;
3997 case 0x06d: /* VIS I fxors */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4000 break;
4001 case 0x06e: /* VIS I fnand */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4004 cpu_fpr[DFPREG(rs2)]);
4005 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4006 cpu_fpr[DFPREG(rs2) + 1]);
4007 break;
4008 case 0x06f: /* VIS I fnands */
4009 CHECK_FPU_FEATURE(dc, VIS1);
4010 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4011 break;
4012 case 0x070: /* VIS I fand */
4013 CHECK_FPU_FEATURE(dc, VIS1);
4014 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4015 cpu_fpr[DFPREG(rs2)]);
4016 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4017 cpu_fpr[DFPREG(rs1) + 1],
4018 cpu_fpr[DFPREG(rs2) + 1]);
4019 break;
4020 case 0x071: /* VIS I fands */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4023 break;
4024 case 0x072: /* VIS I fxnor */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4027 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4028 cpu_fpr[DFPREG(rs1)]);
4029 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4030 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4031 cpu_fpr[DFPREG(rs1) + 1]);
4032 break;
4033 case 0x073: /* VIS I fxnors */
4034 CHECK_FPU_FEATURE(dc, VIS1);
4035 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4036 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4037 break;
4038 case 0x074: /* VIS I fsrc1 */
4039 CHECK_FPU_FEATURE(dc, VIS1);
4040 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4041 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4042 cpu_fpr[DFPREG(rs1) + 1]);
4043 break;
4044 case 0x075: /* VIS I fsrc1s */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4047 break;
4048 case 0x076: /* VIS I fornot2 */
4049 CHECK_FPU_FEATURE(dc, VIS1);
4050 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4051 cpu_fpr[DFPREG(rs2)]);
4052 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4053 cpu_fpr[DFPREG(rs1) + 1],
4054 cpu_fpr[DFPREG(rs2) + 1]);
4055 break;
4056 case 0x077: /* VIS I fornot2s */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4059 break;
4060 case 0x078: /* VIS I fsrc2 */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 gen_op_load_fpr_DT0(DFPREG(rs2));
4063 gen_op_store_DT0_fpr(DFPREG(rd));
4064 break;
4065 case 0x079: /* VIS I fsrc2s */
4066 CHECK_FPU_FEATURE(dc, VIS1);
4067 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4068 break;
4069 case 0x07a: /* VIS I fornot1 */
4070 CHECK_FPU_FEATURE(dc, VIS1);
4071 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4072 cpu_fpr[DFPREG(rs1)]);
4073 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4074 cpu_fpr[DFPREG(rs2) + 1],
4075 cpu_fpr[DFPREG(rs1) + 1]);
4076 break;
4077 case 0x07b: /* VIS I fornot1s */
4078 CHECK_FPU_FEATURE(dc, VIS1);
4079 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4080 break;
4081 case 0x07c: /* VIS I for */
4082 CHECK_FPU_FEATURE(dc, VIS1);
4083 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4084 cpu_fpr[DFPREG(rs2)]);
4085 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4086 cpu_fpr[DFPREG(rs1) + 1],
4087 cpu_fpr[DFPREG(rs2) + 1]);
4088 break;
4089 case 0x07d: /* VIS I fors */
4090 CHECK_FPU_FEATURE(dc, VIS1);
4091 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4092 break;
4093 case 0x07e: /* VIS I fone */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4096 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4097 break;
4098 case 0x07f: /* VIS I fones */
4099 CHECK_FPU_FEATURE(dc, VIS1);
4100 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4101 break;
4102 case 0x080: /* VIS I shutdown */
4103 case 0x081: /* VIS II siam */
4104 // XXX
4105 goto illegal_insn;
4106 default:
4107 goto illegal_insn;
4109 #else
4110 goto ncp_insn;
4111 #endif
4112 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4113 #ifdef TARGET_SPARC64
4114 goto illegal_insn;
4115 #else
4116 goto ncp_insn;
4117 #endif
4118 #ifdef TARGET_SPARC64
4119 } else if (xop == 0x39) { /* V9 return */
4120 TCGv_i32 r_const;
4122 save_state(dc, cpu_cond);
4123 cpu_src1 = get_src1(insn, cpu_src1);
4124 if (IS_IMM) { /* immediate */
4125 simm = GET_FIELDs(insn, 19, 31);
4126 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4127 } else { /* register */
4128 rs2 = GET_FIELD(insn, 27, 31);
4129 if (rs2) {
4130 gen_movl_reg_TN(rs2, cpu_src2);
4131 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4132 } else
4133 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4135 gen_helper_restore();
4136 gen_mov_pc_npc(dc, cpu_cond);
4137 r_const = tcg_const_i32(3);
4138 gen_helper_check_align(cpu_dst, r_const);
4139 tcg_temp_free_i32(r_const);
4140 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4141 dc->npc = DYNAMIC_PC;
4142 goto jmp_insn;
4143 #endif
4144 } else {
4145 cpu_src1 = get_src1(insn, cpu_src1);
4146 if (IS_IMM) { /* immediate */
4147 simm = GET_FIELDs(insn, 19, 31);
4148 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4149 } else { /* register */
4150 rs2 = GET_FIELD(insn, 27, 31);
4151 if (rs2) {
4152 gen_movl_reg_TN(rs2, cpu_src2);
4153 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4154 } else
4155 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4157 switch (xop) {
4158 case 0x38: /* jmpl */
4160 TCGv r_pc;
4161 TCGv_i32 r_const;
4163 r_pc = tcg_const_tl(dc->pc);
4164 gen_movl_TN_reg(rd, r_pc);
4165 tcg_temp_free(r_pc);
4166 gen_mov_pc_npc(dc, cpu_cond);
4167 r_const = tcg_const_i32(3);
4168 gen_helper_check_align(cpu_dst, r_const);
4169 tcg_temp_free_i32(r_const);
4170 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4171 dc->npc = DYNAMIC_PC;
4173 goto jmp_insn;
4174 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4175 case 0x39: /* rett, V9 return */
4177 TCGv_i32 r_const;
4179 if (!supervisor(dc))
4180 goto priv_insn;
4181 gen_mov_pc_npc(dc, cpu_cond);
4182 r_const = tcg_const_i32(3);
4183 gen_helper_check_align(cpu_dst, r_const);
4184 tcg_temp_free_i32(r_const);
4185 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4186 dc->npc = DYNAMIC_PC;
4187 gen_helper_rett();
4189 goto jmp_insn;
4190 #endif
4191 case 0x3b: /* flush */
4192 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4193 goto unimp_flush;
4194 gen_helper_flush(cpu_dst);
4195 break;
4196 case 0x3c: /* save */
4197 save_state(dc, cpu_cond);
4198 gen_helper_save();
4199 gen_movl_TN_reg(rd, cpu_dst);
4200 break;
4201 case 0x3d: /* restore */
4202 save_state(dc, cpu_cond);
4203 gen_helper_restore();
4204 gen_movl_TN_reg(rd, cpu_dst);
4205 break;
4206 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4207 case 0x3e: /* V9 done/retry */
4209 switch (rd) {
4210 case 0:
4211 if (!supervisor(dc))
4212 goto priv_insn;
4213 dc->npc = DYNAMIC_PC;
4214 dc->pc = DYNAMIC_PC;
4215 gen_helper_done();
4216 goto jmp_insn;
4217 case 1:
4218 if (!supervisor(dc))
4219 goto priv_insn;
4220 dc->npc = DYNAMIC_PC;
4221 dc->pc = DYNAMIC_PC;
4222 gen_helper_retry();
4223 goto jmp_insn;
4224 default:
4225 goto illegal_insn;
4228 break;
4229 #endif
4230 default:
4231 goto illegal_insn;
4234 break;
4236 break;
4237 case 3: /* load/store instructions */
4239 unsigned int xop = GET_FIELD(insn, 7, 12);
4241 /* flush pending conditional evaluations before exposing
4242 cpu state */
4243 if (dc->cc_op != CC_OP_FLAGS) {
4244 dc->cc_op = CC_OP_FLAGS;
4245 gen_helper_compute_psr();
4247 cpu_src1 = get_src1(insn, cpu_src1);
4248 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4249 rs2 = GET_FIELD(insn, 27, 31);
4250 gen_movl_reg_TN(rs2, cpu_src2);
4251 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4252 } else if (IS_IMM) { /* immediate */
4253 simm = GET_FIELDs(insn, 19, 31);
4254 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4255 } else { /* register */
4256 rs2 = GET_FIELD(insn, 27, 31);
4257 if (rs2 != 0) {
4258 gen_movl_reg_TN(rs2, cpu_src2);
4259 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4260 } else
4261 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4263 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4264 (xop > 0x17 && xop <= 0x1d ) ||
4265 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4266 switch (xop) {
4267 case 0x0: /* ld, V9 lduw, load unsigned word */
4268 gen_address_mask(dc, cpu_addr);
4269 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4270 break;
4271 case 0x1: /* ldub, load unsigned byte */
4272 gen_address_mask(dc, cpu_addr);
4273 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4274 break;
4275 case 0x2: /* lduh, load unsigned halfword */
4276 gen_address_mask(dc, cpu_addr);
4277 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4278 break;
4279 case 0x3: /* ldd, load double word */
4280 if (rd & 1)
4281 goto illegal_insn;
4282 else {
4283 TCGv_i32 r_const;
4285 save_state(dc, cpu_cond);
4286 r_const = tcg_const_i32(7);
4287 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4288 tcg_temp_free_i32(r_const);
4289 gen_address_mask(dc, cpu_addr);
4290 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4291 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4292 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4293 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4294 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4295 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4296 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4298 break;
4299 case 0x9: /* ldsb, load signed byte */
4300 gen_address_mask(dc, cpu_addr);
4301 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4302 break;
4303 case 0xa: /* ldsh, load signed halfword */
4304 gen_address_mask(dc, cpu_addr);
4305 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4306 break;
4307 case 0xd: /* ldstub -- XXX: should be atomically */
4309 TCGv r_const;
4311 gen_address_mask(dc, cpu_addr);
4312 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4313 r_const = tcg_const_tl(0xff);
4314 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4315 tcg_temp_free(r_const);
4317 break;
4318 case 0x0f: /* swap, swap register with memory. Also
4319 atomically */
4320 CHECK_IU_FEATURE(dc, SWAP);
4321 gen_movl_reg_TN(rd, cpu_val);
4322 gen_address_mask(dc, cpu_addr);
4323 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4324 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4325 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4326 break;
4327 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4328 case 0x10: /* lda, V9 lduwa, load word alternate */
4329 #ifndef TARGET_SPARC64
4330 if (IS_IMM)
4331 goto illegal_insn;
4332 if (!supervisor(dc))
4333 goto priv_insn;
4334 #endif
4335 save_state(dc, cpu_cond);
4336 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4337 break;
4338 case 0x11: /* lduba, load unsigned byte alternate */
4339 #ifndef TARGET_SPARC64
4340 if (IS_IMM)
4341 goto illegal_insn;
4342 if (!supervisor(dc))
4343 goto priv_insn;
4344 #endif
4345 save_state(dc, cpu_cond);
4346 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4347 break;
4348 case 0x12: /* lduha, load unsigned halfword alternate */
4349 #ifndef TARGET_SPARC64
4350 if (IS_IMM)
4351 goto illegal_insn;
4352 if (!supervisor(dc))
4353 goto priv_insn;
4354 #endif
4355 save_state(dc, cpu_cond);
4356 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4357 break;
4358 case 0x13: /* ldda, load double word alternate */
4359 #ifndef TARGET_SPARC64
4360 if (IS_IMM)
4361 goto illegal_insn;
4362 if (!supervisor(dc))
4363 goto priv_insn;
4364 #endif
4365 if (rd & 1)
4366 goto illegal_insn;
4367 save_state(dc, cpu_cond);
4368 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4369 goto skip_move;
4370 case 0x19: /* ldsba, load signed byte alternate */
4371 #ifndef TARGET_SPARC64
4372 if (IS_IMM)
4373 goto illegal_insn;
4374 if (!supervisor(dc))
4375 goto priv_insn;
4376 #endif
4377 save_state(dc, cpu_cond);
4378 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4379 break;
4380 case 0x1a: /* ldsha, load signed halfword alternate */
4381 #ifndef TARGET_SPARC64
4382 if (IS_IMM)
4383 goto illegal_insn;
4384 if (!supervisor(dc))
4385 goto priv_insn;
4386 #endif
4387 save_state(dc, cpu_cond);
4388 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4389 break;
4390 case 0x1d: /* ldstuba -- XXX: should be atomically */
4391 #ifndef TARGET_SPARC64
4392 if (IS_IMM)
4393 goto illegal_insn;
4394 if (!supervisor(dc))
4395 goto priv_insn;
4396 #endif
4397 save_state(dc, cpu_cond);
4398 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4399 break;
4400 case 0x1f: /* swapa, swap reg with alt. memory. Also
4401 atomically */
4402 CHECK_IU_FEATURE(dc, SWAP);
4403 #ifndef TARGET_SPARC64
4404 if (IS_IMM)
4405 goto illegal_insn;
4406 if (!supervisor(dc))
4407 goto priv_insn;
4408 #endif
4409 save_state(dc, cpu_cond);
4410 gen_movl_reg_TN(rd, cpu_val);
4411 gen_swap_asi(cpu_val, cpu_addr, insn);
4412 break;
4414 #ifndef TARGET_SPARC64
4415 case 0x30: /* ldc */
4416 case 0x31: /* ldcsr */
4417 case 0x33: /* lddc */
4418 goto ncp_insn;
4419 #endif
4420 #endif
4421 #ifdef TARGET_SPARC64
4422 case 0x08: /* V9 ldsw */
4423 gen_address_mask(dc, cpu_addr);
4424 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4425 break;
4426 case 0x0b: /* V9 ldx */
4427 gen_address_mask(dc, cpu_addr);
4428 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4429 break;
4430 case 0x18: /* V9 ldswa */
4431 save_state(dc, cpu_cond);
4432 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4433 break;
4434 case 0x1b: /* V9 ldxa */
4435 save_state(dc, cpu_cond);
4436 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4437 break;
4438 case 0x2d: /* V9 prefetch, no effect */
4439 goto skip_move;
4440 case 0x30: /* V9 ldfa */
4441 save_state(dc, cpu_cond);
4442 gen_ldf_asi(cpu_addr, insn, 4, rd);
4443 goto skip_move;
4444 case 0x33: /* V9 lddfa */
4445 save_state(dc, cpu_cond);
4446 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4447 goto skip_move;
4448 case 0x3d: /* V9 prefetcha, no effect */
4449 goto skip_move;
4450 case 0x32: /* V9 ldqfa */
4451 CHECK_FPU_FEATURE(dc, FLOAT128);
4452 save_state(dc, cpu_cond);
4453 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4454 goto skip_move;
4455 #endif
4456 default:
4457 goto illegal_insn;
4459 gen_movl_TN_reg(rd, cpu_val);
4460 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4461 skip_move: ;
4462 #endif
4463 } else if (xop >= 0x20 && xop < 0x24) {
4464 if (gen_trap_ifnofpu(dc, cpu_cond))
4465 goto jmp_insn;
4466 save_state(dc, cpu_cond);
4467 switch (xop) {
4468 case 0x20: /* ldf, load fpreg */
4469 gen_address_mask(dc, cpu_addr);
4470 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4471 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4472 break;
4473 case 0x21: /* ldfsr, V9 ldxfsr */
4474 #ifdef TARGET_SPARC64
4475 gen_address_mask(dc, cpu_addr);
4476 if (rd == 1) {
4477 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4478 gen_helper_ldxfsr(cpu_tmp64);
4479 } else
4480 #else
4482 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4483 gen_helper_ldfsr(cpu_tmp32);
4485 #endif
4486 break;
4487 case 0x22: /* ldqf, load quad fpreg */
4489 TCGv_i32 r_const;
4491 CHECK_FPU_FEATURE(dc, FLOAT128);
4492 r_const = tcg_const_i32(dc->mem_idx);
4493 gen_helper_ldqf(cpu_addr, r_const);
4494 tcg_temp_free_i32(r_const);
4495 gen_op_store_QT0_fpr(QFPREG(rd));
4497 break;
4498 case 0x23: /* lddf, load double fpreg */
4500 TCGv_i32 r_const;
4502 r_const = tcg_const_i32(dc->mem_idx);
4503 gen_helper_lddf(cpu_addr, r_const);
4504 tcg_temp_free_i32(r_const);
4505 gen_op_store_DT0_fpr(DFPREG(rd));
4507 break;
4508 default:
4509 goto illegal_insn;
4511 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4512 xop == 0xe || xop == 0x1e) {
4513 gen_movl_reg_TN(rd, cpu_val);
4514 switch (xop) {
4515 case 0x4: /* st, store word */
4516 gen_address_mask(dc, cpu_addr);
4517 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4518 break;
4519 case 0x5: /* stb, store byte */
4520 gen_address_mask(dc, cpu_addr);
4521 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4522 break;
4523 case 0x6: /* sth, store halfword */
4524 gen_address_mask(dc, cpu_addr);
4525 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4526 break;
4527 case 0x7: /* std, store double word */
4528 if (rd & 1)
4529 goto illegal_insn;
4530 else {
4531 TCGv_i32 r_const;
4533 save_state(dc, cpu_cond);
4534 gen_address_mask(dc, cpu_addr);
4535 r_const = tcg_const_i32(7);
4536 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4537 tcg_temp_free_i32(r_const);
4538 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4539 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4540 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4542 break;
4543 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4544 case 0x14: /* sta, V9 stwa, store word alternate */
4545 #ifndef TARGET_SPARC64
4546 if (IS_IMM)
4547 goto illegal_insn;
4548 if (!supervisor(dc))
4549 goto priv_insn;
4550 #endif
4551 save_state(dc, cpu_cond);
4552 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4553 dc->npc = DYNAMIC_PC;
4554 break;
4555 case 0x15: /* stba, store byte alternate */
4556 #ifndef TARGET_SPARC64
4557 if (IS_IMM)
4558 goto illegal_insn;
4559 if (!supervisor(dc))
4560 goto priv_insn;
4561 #endif
4562 save_state(dc, cpu_cond);
4563 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4564 dc->npc = DYNAMIC_PC;
4565 break;
4566 case 0x16: /* stha, store halfword alternate */
4567 #ifndef TARGET_SPARC64
4568 if (IS_IMM)
4569 goto illegal_insn;
4570 if (!supervisor(dc))
4571 goto priv_insn;
4572 #endif
4573 save_state(dc, cpu_cond);
4574 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4575 dc->npc = DYNAMIC_PC;
4576 break;
4577 case 0x17: /* stda, store double word alternate */
4578 #ifndef TARGET_SPARC64
4579 if (IS_IMM)
4580 goto illegal_insn;
4581 if (!supervisor(dc))
4582 goto priv_insn;
4583 #endif
4584 if (rd & 1)
4585 goto illegal_insn;
4586 else {
4587 save_state(dc, cpu_cond);
4588 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4590 break;
4591 #endif
4592 #ifdef TARGET_SPARC64
4593 case 0x0e: /* V9 stx */
4594 gen_address_mask(dc, cpu_addr);
4595 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4596 break;
4597 case 0x1e: /* V9 stxa */
4598 save_state(dc, cpu_cond);
4599 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4600 dc->npc = DYNAMIC_PC;
4601 break;
4602 #endif
4603 default:
4604 goto illegal_insn;
4606 } else if (xop > 0x23 && xop < 0x28) {
4607 if (gen_trap_ifnofpu(dc, cpu_cond))
4608 goto jmp_insn;
4609 save_state(dc, cpu_cond);
4610 switch (xop) {
4611 case 0x24: /* stf, store fpreg */
4612 gen_address_mask(dc, cpu_addr);
4613 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4614 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4615 break;
4616 case 0x25: /* stfsr, V9 stxfsr */
4617 #ifdef TARGET_SPARC64
4618 gen_address_mask(dc, cpu_addr);
4619 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4620 if (rd == 1)
4621 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4622 else
4623 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4624 #else
4625 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4626 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4627 #endif
4628 break;
4629 case 0x26:
4630 #ifdef TARGET_SPARC64
4631 /* V9 stqf, store quad fpreg */
4633 TCGv_i32 r_const;
4635 CHECK_FPU_FEATURE(dc, FLOAT128);
4636 gen_op_load_fpr_QT0(QFPREG(rd));
4637 r_const = tcg_const_i32(dc->mem_idx);
4638 gen_helper_stqf(cpu_addr, r_const);
4639 tcg_temp_free_i32(r_const);
4641 break;
4642 #else /* !TARGET_SPARC64 */
4643 /* stdfq, store floating point queue */
4644 #if defined(CONFIG_USER_ONLY)
4645 goto illegal_insn;
4646 #else
4647 if (!supervisor(dc))
4648 goto priv_insn;
4649 if (gen_trap_ifnofpu(dc, cpu_cond))
4650 goto jmp_insn;
4651 goto nfq_insn;
4652 #endif
4653 #endif
4654 case 0x27: /* stdf, store double fpreg */
4656 TCGv_i32 r_const;
4658 gen_op_load_fpr_DT0(DFPREG(rd));
4659 r_const = tcg_const_i32(dc->mem_idx);
4660 gen_helper_stdf(cpu_addr, r_const);
4661 tcg_temp_free_i32(r_const);
4663 break;
4664 default:
4665 goto illegal_insn;
4667 } else if (xop > 0x33 && xop < 0x3f) {
4668 save_state(dc, cpu_cond);
4669 switch (xop) {
4670 #ifdef TARGET_SPARC64
4671 case 0x34: /* V9 stfa */
4672 gen_stf_asi(cpu_addr, insn, 4, rd);
4673 break;
4674 case 0x36: /* V9 stqfa */
4676 TCGv_i32 r_const;
4678 CHECK_FPU_FEATURE(dc, FLOAT128);
4679 r_const = tcg_const_i32(7);
4680 gen_helper_check_align(cpu_addr, r_const);
4681 tcg_temp_free_i32(r_const);
4682 gen_op_load_fpr_QT0(QFPREG(rd));
4683 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4685 break;
4686 case 0x37: /* V9 stdfa */
4687 gen_op_load_fpr_DT0(DFPREG(rd));
4688 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4689 break;
4690 case 0x3c: /* V9 casa */
4691 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4692 gen_movl_TN_reg(rd, cpu_val);
4693 break;
4694 case 0x3e: /* V9 casxa */
4695 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4696 gen_movl_TN_reg(rd, cpu_val);
4697 break;
4698 #else
4699 case 0x34: /* stc */
4700 case 0x35: /* stcsr */
4701 case 0x36: /* stdcq */
4702 case 0x37: /* stdc */
4703 goto ncp_insn;
4704 #endif
4705 default:
4706 goto illegal_insn;
4708 } else
4709 goto illegal_insn;
4711 break;
4713 /* default case for non jump instructions */
4714 if (dc->npc == DYNAMIC_PC) {
4715 dc->pc = DYNAMIC_PC;
4716 gen_op_next_insn();
4717 } else if (dc->npc == JUMP_PC) {
4718 /* we can do a static jump */
4719 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4720 dc->is_br = 1;
4721 } else {
4722 dc->pc = dc->npc;
4723 dc->npc = dc->npc + 4;
4725 jmp_insn:
4726 goto egress;
4727 illegal_insn:
4729 TCGv_i32 r_const;
4731 save_state(dc, cpu_cond);
4732 r_const = tcg_const_i32(TT_ILL_INSN);
4733 gen_helper_raise_exception(r_const);
4734 tcg_temp_free_i32(r_const);
4735 dc->is_br = 1;
4737 goto egress;
4738 unimp_flush:
4740 TCGv_i32 r_const;
4742 save_state(dc, cpu_cond);
4743 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4744 gen_helper_raise_exception(r_const);
4745 tcg_temp_free_i32(r_const);
4746 dc->is_br = 1;
4748 goto egress;
4749 #if !defined(CONFIG_USER_ONLY)
4750 priv_insn:
4752 TCGv_i32 r_const;
4754 save_state(dc, cpu_cond);
4755 r_const = tcg_const_i32(TT_PRIV_INSN);
4756 gen_helper_raise_exception(r_const);
4757 tcg_temp_free_i32(r_const);
4758 dc->is_br = 1;
4760 goto egress;
4761 #endif
4762 nfpu_insn:
4763 save_state(dc, cpu_cond);
4764 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4765 dc->is_br = 1;
4766 goto egress;
4767 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4768 nfq_insn:
4769 save_state(dc, cpu_cond);
4770 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4771 dc->is_br = 1;
4772 goto egress;
4773 #endif
4774 #ifndef TARGET_SPARC64
4775 ncp_insn:
4777 TCGv r_const;
4779 save_state(dc, cpu_cond);
4780 r_const = tcg_const_i32(TT_NCP_INSN);
4781 gen_helper_raise_exception(r_const);
4782 tcg_temp_free(r_const);
4783 dc->is_br = 1;
4785 goto egress;
4786 #endif
4787 egress:
4788 tcg_temp_free(cpu_tmp1);
4789 tcg_temp_free(cpu_tmp2);
4792 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4793 int spc, CPUSPARCState *env)
4795 target_ulong pc_start, last_pc;
4796 uint16_t *gen_opc_end;
4797 DisasContext dc1, *dc = &dc1;
4798 CPUBreakpoint *bp;
4799 int j, lj = -1;
4800 int num_insns;
4801 int max_insns;
4803 memset(dc, 0, sizeof(DisasContext));
4804 dc->tb = tb;
4805 pc_start = tb->pc;
4806 dc->pc = pc_start;
4807 last_pc = dc->pc;
4808 dc->npc = (target_ulong) tb->cs_base;
4809 dc->cc_op = CC_OP_DYNAMIC;
4810 dc->mem_idx = cpu_mmu_index(env);
4811 dc->def = env->def;
4812 if ((dc->def->features & CPU_FEATURE_FLOAT))
4813 dc->fpu_enabled = cpu_fpu_enabled(env);
4814 else
4815 dc->fpu_enabled = 0;
4816 #ifdef TARGET_SPARC64
4817 dc->address_mask_32bit = env->pstate & PS_AM;
4818 #endif
4819 dc->singlestep = (env->singlestep_enabled || singlestep);
4820 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4822 cpu_tmp0 = tcg_temp_new();
4823 cpu_tmp32 = tcg_temp_new_i32();
4824 cpu_tmp64 = tcg_temp_new_i64();
4826 cpu_dst = tcg_temp_local_new();
4828 // loads and stores
4829 cpu_val = tcg_temp_local_new();
4830 cpu_addr = tcg_temp_local_new();
4832 num_insns = 0;
4833 max_insns = tb->cflags & CF_COUNT_MASK;
4834 if (max_insns == 0)
4835 max_insns = CF_COUNT_MASK;
4836 gen_icount_start();
4837 do {
4838 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4839 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4840 if (bp->pc == dc->pc) {
4841 if (dc->pc != pc_start)
4842 save_state(dc, cpu_cond);
4843 gen_helper_debug();
4844 tcg_gen_exit_tb(0);
4845 dc->is_br = 1;
4846 goto exit_gen_loop;
4850 if (spc) {
4851 qemu_log("Search PC...\n");
4852 j = gen_opc_ptr - gen_opc_buf;
4853 if (lj < j) {
4854 lj++;
4855 while (lj < j)
4856 gen_opc_instr_start[lj++] = 0;
4857 gen_opc_pc[lj] = dc->pc;
4858 gen_opc_npc[lj] = dc->npc;
4859 gen_opc_instr_start[lj] = 1;
4860 gen_opc_icount[lj] = num_insns;
4863 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4864 gen_io_start();
4865 last_pc = dc->pc;
4866 disas_sparc_insn(dc);
4867 num_insns++;
4869 if (dc->is_br)
4870 break;
4871 /* if the next PC is different, we abort now */
4872 if (dc->pc != (last_pc + 4))
4873 break;
4874 /* if we reach a page boundary, we stop generation so that the
4875 PC of a TT_TFAULT exception is always in the right page */
4876 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4877 break;
4878 /* if single step mode, we generate only one instruction and
4879 generate an exception */
4880 if (dc->singlestep) {
4881 break;
4883 } while ((gen_opc_ptr < gen_opc_end) &&
4884 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4885 num_insns < max_insns);
4887 exit_gen_loop:
4888 tcg_temp_free(cpu_addr);
4889 tcg_temp_free(cpu_val);
4890 tcg_temp_free(cpu_dst);
4891 tcg_temp_free_i64(cpu_tmp64);
4892 tcg_temp_free_i32(cpu_tmp32);
4893 tcg_temp_free(cpu_tmp0);
4894 if (tb->cflags & CF_LAST_IO)
4895 gen_io_end();
4896 if (!dc->is_br) {
4897 if (dc->pc != DYNAMIC_PC &&
4898 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4899 /* static PC and NPC: we can use direct chaining */
4900 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4901 } else {
4902 if (dc->pc != DYNAMIC_PC)
4903 tcg_gen_movi_tl(cpu_pc, dc->pc);
4904 save_npc(dc, cpu_cond);
4905 tcg_gen_exit_tb(0);
4908 gen_icount_end(tb, num_insns);
4909 *gen_opc_ptr = INDEX_op_end;
4910 if (spc) {
4911 j = gen_opc_ptr - gen_opc_buf;
4912 lj++;
4913 while (lj <= j)
4914 gen_opc_instr_start[lj++] = 0;
4915 #if 0
4916 log_page_dump();
4917 #endif
4918 gen_opc_jump_pc[0] = dc->jump_pc[0];
4919 gen_opc_jump_pc[1] = dc->jump_pc[1];
4920 } else {
4921 tb->size = last_pc + 4 - pc_start;
4922 tb->icount = num_insns;
4924 #ifdef DEBUG_DISAS
4925 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4926 qemu_log("--------------\n");
4927 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4928 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4929 qemu_log("\n");
4931 #endif
4934 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4936 gen_intermediate_code_internal(tb, 0, env);
4939 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4941 gen_intermediate_code_internal(tb, 1, env);
4944 void gen_intermediate_code_init(CPUSPARCState *env)
4946 unsigned int i;
4947 static int inited;
4948 static const char * const gregnames[8] = {
4949 NULL, // g0 not used
4950 "g1",
4951 "g2",
4952 "g3",
4953 "g4",
4954 "g5",
4955 "g6",
4956 "g7",
4958 static const char * const fregnames[64] = {
4959 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4960 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4961 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4962 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4963 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4964 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4965 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4966 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4969 /* init various static tables */
4970 if (!inited) {
4971 inited = 1;
4973 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4974 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4975 offsetof(CPUState, regwptr),
4976 "regwptr");
4977 #ifdef TARGET_SPARC64
4978 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4979 "xcc");
4980 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4981 "asi");
4982 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4983 "fprs");
4984 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4985 "gsr");
4986 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4987 offsetof(CPUState, tick_cmpr),
4988 "tick_cmpr");
4989 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4990 offsetof(CPUState, stick_cmpr),
4991 "stick_cmpr");
4992 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4993 offsetof(CPUState, hstick_cmpr),
4994 "hstick_cmpr");
4995 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4996 "hintp");
4997 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4998 "htba");
4999 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5000 "hver");
5001 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5002 offsetof(CPUState, ssr), "ssr");
5003 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5004 offsetof(CPUState, version), "ver");
5005 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5006 offsetof(CPUState, softint),
5007 "softint");
5008 #else
5009 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5010 "wim");
5011 #endif
5012 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5013 "cond");
5014 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5015 "cc_src");
5016 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5017 offsetof(CPUState, cc_src2),
5018 "cc_src2");
5019 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5020 "cc_dst");
5021 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5022 "cc_op");
5023 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5024 "psr");
5025 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5026 "fsr");
5027 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5028 "pc");
5029 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5030 "npc");
5031 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5032 #ifndef CONFIG_USER_ONLY
5033 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5034 "tbr");
5035 #endif
5036 for (i = 1; i < 8; i++)
5037 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5038 offsetof(CPUState, gregs[i]),
5039 gregnames[i]);
5040 for (i = 0; i < TARGET_FPREGS; i++)
5041 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5042 offsetof(CPUState, fpr[i]),
5043 fregnames[i]);
5045 /* register helpers */
5047 #define GEN_HELPER 2
5048 #include "helper.h"
5052 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5053 unsigned long searched_pc, int pc_pos, void *puc)
5055 target_ulong npc;
5056 env->pc = gen_opc_pc[pc_pos];
5057 npc = gen_opc_npc[pc_pos];
5058 if (npc == 1) {
5059 /* dynamic NPC: already stored */
5060 } else if (npc == 2) {
5061 /* jump PC: use 'cond' and the jump targets of the translation */
5062 if (env->cond) {
5063 env->npc = gen_opc_jump_pc[0];
5064 } else {
5065 env->npc = gen_opc_jump_pc[1];
5067 } else {
5068 env->npc = npc;
5071 /* flush pending conditional evaluations before exposing cpu state */
5072 if (CC_OP != CC_OP_FLAGS) {
5073 helper_compute_psr();