MIPS: Initial support of VIA USB controller used by fulong mini pc
[qemu/ar7.git] / target-sparc / translate.c
blob23f95191adf37fecd0436aa3ab93017c08444337
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70 static target_ulong gen_opc_jump_pc[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext {
75 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int is_br;
79 int mem_idx;
80 int fpu_enabled;
81 int address_mask_32bit;
82 int singlestep;
83 uint32_t cc_op; /* current CC operation */
84 struct TranslationBlock *tb;
85 sparc_def_t *def;
86 } DisasContext;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #ifdef TARGET_SPARC64
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #else
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x, int len)
112 len = 32 - len;
113 return (x << len) >> len;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src)
121 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122 offsetof(CPU_DoubleU, l.upper));
123 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.lower));
127 static void gen_op_load_fpr_DT1(unsigned int src)
129 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130 offsetof(CPU_DoubleU, l.upper));
131 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132 offsetof(CPU_DoubleU, l.lower));
135 static void gen_op_store_DT0_fpr(unsigned int dst)
137 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138 offsetof(CPU_DoubleU, l.upper));
139 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140 offsetof(CPU_DoubleU, l.lower));
143 static void gen_op_load_fpr_QT0(unsigned int src)
145 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.upmost));
147 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.upper));
149 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150 offsetof(CPU_QuadU, l.lower));
151 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152 offsetof(CPU_QuadU, l.lowest));
155 static void gen_op_load_fpr_QT1(unsigned int src)
157 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.upmost));
159 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.upper));
161 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162 offsetof(CPU_QuadU, l.lower));
163 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164 offsetof(CPU_QuadU, l.lowest));
167 static void gen_op_store_QT0_fpr(unsigned int dst)
169 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.upmost));
171 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.upper));
173 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174 offsetof(CPU_QuadU, l.lower));
175 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176 offsetof(CPU_QuadU, l.lowest));
179 /* moves */
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
184 #endif
185 #else
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
189 #else
190 #endif
191 #endif
193 #ifdef TARGET_SPARC64
194 #ifndef TARGET_ABI32
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #else
197 #define AM_CHECK(dc) (1)
198 #endif
199 #endif
201 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
203 #ifdef TARGET_SPARC64
204 if (AM_CHECK(dc))
205 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206 #endif
209 static inline void gen_movl_reg_TN(int reg, TCGv tn)
211 if (reg == 0)
212 tcg_gen_movi_tl(tn, 0);
213 else if (reg < 8)
214 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215 else {
216 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
220 static inline void gen_movl_TN_reg(int reg, TCGv tn)
222 if (reg == 0)
223 return;
224 else if (reg < 8)
225 tcg_gen_mov_tl(cpu_gregs[reg], tn);
226 else {
227 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231 static inline void gen_goto_tb(DisasContext *s, int tb_num,
232 target_ulong pc, target_ulong npc)
234 TranslationBlock *tb;
236 tb = s->tb;
237 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 !s->singlestep) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
284 TCGv r_temp;
285 TCGv_i32 r_const;
286 int l1;
288 l1 = gen_new_label();
290 r_temp = tcg_temp_new();
291 tcg_gen_xor_tl(r_temp, src1, src2);
292 tcg_gen_not_tl(r_temp, r_temp);
293 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297 r_const = tcg_const_i32(TT_TOVF);
298 gen_helper_raise_exception(r_const);
299 tcg_temp_free_i32(r_const);
300 gen_set_label(l1);
301 tcg_temp_free(r_temp);
304 static inline void gen_tag_tv(TCGv src1, TCGv src2)
306 int l1;
307 TCGv_i32 r_const;
309 l1 = gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0, src1, src2);
311 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313 r_const = tcg_const_i32(TT_TOVF);
314 gen_helper_raise_exception(r_const);
315 tcg_temp_free_i32(r_const);
316 gen_set_label(l1);
319 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
321 tcg_gen_mov_tl(cpu_cc_src, src1);
322 tcg_gen_movi_tl(cpu_cc_src2, src2);
323 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324 tcg_gen_mov_tl(dst, cpu_cc_dst);
327 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
329 tcg_gen_mov_tl(cpu_cc_src, src1);
330 tcg_gen_mov_tl(cpu_cc_src2, src2);
331 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332 tcg_gen_mov_tl(dst, cpu_cc_dst);
335 static TCGv_i32 gen_add32_carry32(void)
337 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32 = tcg_temp_new_i32();
342 cc_src2_32 = tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
344 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
345 #else
346 cc_src1_32 = cpu_cc_dst;
347 cc_src2_32 = cpu_cc_src;
348 #endif
350 carry_32 = tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32);
355 tcg_temp_free_i32(cc_src2_32);
356 #endif
358 return carry_32;
361 static TCGv_i32 gen_sub32_carry32(void)
363 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32 = tcg_temp_new_i32();
368 cc_src2_32 = tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
370 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
371 #else
372 cc_src1_32 = cpu_cc_src;
373 cc_src2_32 = cpu_cc_src2;
374 #endif
376 carry_32 = tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32);
381 tcg_temp_free_i32(cc_src2_32);
382 #endif
384 return carry_32;
387 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
388 TCGv src2, int update_cc)
390 TCGv_i32 carry_32;
391 TCGv carry;
393 switch (dc->cc_op) {
394 case CC_OP_DIV:
395 case CC_OP_LOGIC:
396 /* Carry is known to be zero. Fall back to plain ADD. */
397 if (update_cc) {
398 gen_op_add_cc(dst, src1, src2);
399 } else {
400 tcg_gen_add_tl(dst, src1, src2);
402 return;
404 case CC_OP_ADD:
405 case CC_OP_TADD:
406 case CC_OP_TADDTV:
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low = tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
415 cpu_cc_src, src1, cpu_cc_src2, src2);
416 tcg_temp_free(dst_low);
417 goto add_done;
419 #endif
420 carry_32 = gen_add32_carry32();
421 break;
423 case CC_OP_SUB:
424 case CC_OP_TSUB:
425 case CC_OP_TSUBTV:
426 carry_32 = gen_sub32_carry32();
427 break;
429 default:
430 /* We need external help to produce the carry. */
431 carry_32 = tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32);
433 break;
436 #if TARGET_LONG_BITS == 64
437 carry = tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry, carry_32);
439 #else
440 carry = carry_32;
441 #endif
443 tcg_gen_add_tl(dst, src1, src2);
444 tcg_gen_add_tl(dst, dst, carry);
446 tcg_temp_free_i32(carry_32);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry);
449 #endif
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452 add_done:
453 #endif
454 if (update_cc) {
455 tcg_gen_mov_tl(cpu_cc_src, src1);
456 tcg_gen_mov_tl(cpu_cc_src2, src2);
457 tcg_gen_mov_tl(cpu_cc_dst, dst);
458 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
459 dc->cc_op = CC_OP_ADDX;
463 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
465 tcg_gen_mov_tl(cpu_cc_src, src1);
466 tcg_gen_mov_tl(cpu_cc_src2, src2);
467 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 tcg_gen_mov_tl(dst, cpu_cc_dst);
471 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
473 tcg_gen_mov_tl(cpu_cc_src, src1);
474 tcg_gen_mov_tl(cpu_cc_src2, src2);
475 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
476 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
483 TCGv r_temp;
484 TCGv_i32 r_const;
485 int l1;
487 l1 = gen_new_label();
489 r_temp = tcg_temp_new();
490 tcg_gen_xor_tl(r_temp, src1, src2);
491 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
492 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
493 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
495 r_const = tcg_const_i32(TT_TOVF);
496 gen_helper_raise_exception(r_const);
497 tcg_temp_free_i32(r_const);
498 gen_set_label(l1);
499 tcg_temp_free(r_temp);
502 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
504 tcg_gen_mov_tl(cpu_cc_src, src1);
505 tcg_gen_movi_tl(cpu_cc_src2, src2);
506 if (src2 == 0) {
507 tcg_gen_mov_tl(cpu_cc_dst, src1);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509 dc->cc_op = CC_OP_LOGIC;
510 } else {
511 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513 dc->cc_op = CC_OP_SUB;
515 tcg_gen_mov_tl(dst, cpu_cc_dst);
518 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527 TCGv src2, int update_cc)
529 TCGv_i32 carry_32;
530 TCGv carry;
532 switch (dc->cc_op) {
533 case CC_OP_DIV:
534 case CC_OP_LOGIC:
535 /* Carry is known to be zero. Fall back to plain SUB. */
536 if (update_cc) {
537 gen_op_sub_cc(dst, src1, src2);
538 } else {
539 tcg_gen_sub_tl(dst, src1, src2);
541 return;
543 case CC_OP_ADD:
544 case CC_OP_TADD:
545 case CC_OP_TADDTV:
546 carry_32 = gen_add32_carry32();
547 break;
549 case CC_OP_SUB:
550 case CC_OP_TSUB:
551 case CC_OP_TSUBTV:
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low = tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
560 cpu_cc_src, src1, cpu_cc_src2, src2);
561 tcg_temp_free(dst_low);
562 goto sub_done;
564 #endif
565 carry_32 = gen_sub32_carry32();
566 break;
568 default:
569 /* We need external help to produce the carry. */
570 carry_32 = tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32);
572 break;
575 #if TARGET_LONG_BITS == 64
576 carry = tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry, carry_32);
578 #else
579 carry = carry_32;
580 #endif
582 tcg_gen_sub_tl(dst, src1, src2);
583 tcg_gen_sub_tl(dst, dst, carry);
585 tcg_temp_free_i32(carry_32);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry);
588 #endif
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
591 sub_done:
592 #endif
593 if (update_cc) {
594 tcg_gen_mov_tl(cpu_cc_src, src1);
595 tcg_gen_mov_tl(cpu_cc_src2, src2);
596 tcg_gen_mov_tl(cpu_cc_dst, dst);
597 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
598 dc->cc_op = CC_OP_SUBX;
602 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
604 tcg_gen_mov_tl(cpu_cc_src, src1);
605 tcg_gen_mov_tl(cpu_cc_src2, src2);
606 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
607 tcg_gen_mov_tl(dst, cpu_cc_dst);
610 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
612 tcg_gen_mov_tl(cpu_cc_src, src1);
613 tcg_gen_mov_tl(cpu_cc_src2, src2);
614 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
615 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 tcg_gen_mov_tl(dst, cpu_cc_dst);
620 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
622 TCGv r_temp;
623 int l1;
625 l1 = gen_new_label();
626 r_temp = tcg_temp_new();
628 /* old op:
629 if (!(env->y & 1))
630 T1 = 0;
632 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
633 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
636 tcg_gen_movi_tl(cpu_cc_src2, 0);
637 gen_set_label(l1);
639 // b2 = T0 & 1;
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
642 tcg_gen_shli_tl(r_temp, r_temp, 31);
643 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
644 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
646 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
648 // b1 = N ^ V;
649 gen_mov_reg_N(cpu_tmp0, cpu_psr);
650 gen_mov_reg_V(r_temp, cpu_psr);
651 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
652 tcg_temp_free(r_temp);
654 // T0 = (b1 << 31) | (T0 >> 1);
655 // src1 = T0;
656 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
657 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
658 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
660 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
665 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
667 TCGv_i32 r_src1, r_src2;
668 TCGv_i64 r_temp, r_temp2;
670 r_src1 = tcg_temp_new_i32();
671 r_src2 = tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1, src1);
674 tcg_gen_trunc_tl_i32(r_src2, src2);
676 r_temp = tcg_temp_new_i64();
677 r_temp2 = tcg_temp_new_i64();
679 if (sign_ext) {
680 tcg_gen_ext_i32_i64(r_temp, r_src2);
681 tcg_gen_ext_i32_i64(r_temp2, r_src1);
682 } else {
683 tcg_gen_extu_i32_i64(r_temp, r_src2);
684 tcg_gen_extu_i32_i64(r_temp2, r_src1);
687 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
689 tcg_gen_shri_i64(r_temp, r_temp2, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
691 tcg_temp_free_i64(r_temp);
692 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst, r_temp2);
696 tcg_temp_free_i64(r_temp2);
698 tcg_temp_free_i32(r_src1);
699 tcg_temp_free_i32(r_src2);
702 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst, src1, src2, 0);
708 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst, src1, src2, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
717 TCGv_i32 r_const;
718 int l1;
720 l1 = gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
722 r_const = tcg_const_i32(TT_DIV_ZERO);
723 gen_helper_raise_exception(r_const);
724 tcg_temp_free_i32(r_const);
725 gen_set_label(l1);
728 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
730 int l1, l2;
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src, src1);
735 tcg_gen_mov_tl(cpu_cc_src2, src2);
736 gen_trap_ifdivzero_tl(cpu_cc_src2);
737 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
738 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
739 tcg_gen_movi_i64(dst, INT64_MIN);
740 tcg_gen_br(l2);
741 gen_set_label(l1);
742 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
743 gen_set_label(l2);
745 #endif
747 // 1
748 static inline void gen_op_eval_ba(TCGv dst)
750 tcg_gen_movi_tl(dst, 1);
753 // Z
754 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
756 gen_mov_reg_Z(dst, src);
759 // Z | (N ^ V)
760 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
762 gen_mov_reg_N(cpu_tmp0, src);
763 gen_mov_reg_V(dst, src);
764 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
765 gen_mov_reg_Z(cpu_tmp0, src);
766 tcg_gen_or_tl(dst, dst, cpu_tmp0);
769 // N ^ V
770 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
772 gen_mov_reg_V(cpu_tmp0, src);
773 gen_mov_reg_N(dst, src);
774 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
777 // C | Z
778 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
780 gen_mov_reg_Z(cpu_tmp0, src);
781 gen_mov_reg_C(dst, src);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
785 // C
786 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
788 gen_mov_reg_C(dst, src);
791 // V
792 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
794 gen_mov_reg_V(dst, src);
797 // 0
798 static inline void gen_op_eval_bn(TCGv dst)
800 tcg_gen_movi_tl(dst, 0);
803 // N
804 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
806 gen_mov_reg_N(dst, src);
809 // !Z
810 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
812 gen_mov_reg_Z(dst, src);
813 tcg_gen_xori_tl(dst, dst, 0x1);
816 // !(Z | (N ^ V))
817 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
819 gen_mov_reg_N(cpu_tmp0, src);
820 gen_mov_reg_V(dst, src);
821 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
822 gen_mov_reg_Z(cpu_tmp0, src);
823 tcg_gen_or_tl(dst, dst, cpu_tmp0);
824 tcg_gen_xori_tl(dst, dst, 0x1);
827 // !(N ^ V)
828 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
830 gen_mov_reg_V(cpu_tmp0, src);
831 gen_mov_reg_N(dst, src);
832 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
833 tcg_gen_xori_tl(dst, dst, 0x1);
836 // !(C | Z)
837 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
839 gen_mov_reg_Z(cpu_tmp0, src);
840 gen_mov_reg_C(dst, src);
841 tcg_gen_or_tl(dst, dst, cpu_tmp0);
842 tcg_gen_xori_tl(dst, dst, 0x1);
845 // !C
846 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
848 gen_mov_reg_C(dst, src);
849 tcg_gen_xori_tl(dst, dst, 0x1);
852 // !N
853 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
855 gen_mov_reg_N(dst, src);
856 tcg_gen_xori_tl(dst, dst, 0x1);
859 // !V
860 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
862 gen_mov_reg_V(dst, src);
863 tcg_gen_xori_tl(dst, dst, 0x1);
867 FPSR bit field FCC1 | FCC0:
871 3 unordered
873 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
874 unsigned int fcc_offset)
876 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
877 tcg_gen_andi_tl(reg, reg, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
881 unsigned int fcc_offset)
883 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
884 tcg_gen_andi_tl(reg, reg, 0x1);
887 // !0: FCC0 | FCC1
888 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
889 unsigned int fcc_offset)
891 gen_mov_reg_FCC0(dst, src, fcc_offset);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_or_tl(dst, dst, cpu_tmp0);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
898 unsigned int fcc_offset)
900 gen_mov_reg_FCC0(dst, src, fcc_offset);
901 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
902 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
905 // 1 or 3: FCC0
906 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
912 // 1: FCC0 & !FCC1
913 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
914 unsigned int fcc_offset)
916 gen_mov_reg_FCC0(dst, src, fcc_offset);
917 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
918 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
919 tcg_gen_and_tl(dst, dst, cpu_tmp0);
922 // 2 or 3: FCC1
923 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
926 gen_mov_reg_FCC1(dst, src, fcc_offset);
929 // 2: !FCC0 & FCC1
930 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
931 unsigned int fcc_offset)
933 gen_mov_reg_FCC0(dst, src, fcc_offset);
934 tcg_gen_xori_tl(dst, dst, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
936 tcg_gen_and_tl(dst, dst, cpu_tmp0);
939 // 3: FCC0 & FCC1
940 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
941 unsigned int fcc_offset)
943 gen_mov_reg_FCC0(dst, src, fcc_offset);
944 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
945 tcg_gen_and_tl(dst, dst, cpu_tmp0);
948 // 0: !(FCC0 | FCC1)
949 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
950 unsigned int fcc_offset)
952 gen_mov_reg_FCC0(dst, src, fcc_offset);
953 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
954 tcg_gen_or_tl(dst, dst, cpu_tmp0);
955 tcg_gen_xori_tl(dst, dst, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
960 unsigned int fcc_offset)
962 gen_mov_reg_FCC0(dst, src, fcc_offset);
963 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
964 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
965 tcg_gen_xori_tl(dst, dst, 0x1);
968 // 0 or 2: !FCC0
969 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
970 unsigned int fcc_offset)
972 gen_mov_reg_FCC0(dst, src, fcc_offset);
973 tcg_gen_xori_tl(dst, dst, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
978 unsigned int fcc_offset)
980 gen_mov_reg_FCC0(dst, src, fcc_offset);
981 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
983 tcg_gen_and_tl(dst, dst, cpu_tmp0);
984 tcg_gen_xori_tl(dst, dst, 0x1);
987 // 0 or 1: !FCC1
988 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
989 unsigned int fcc_offset)
991 gen_mov_reg_FCC1(dst, src, fcc_offset);
992 tcg_gen_xori_tl(dst, dst, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 tcg_gen_xori_tl(dst, dst, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1002 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003 tcg_gen_xori_tl(dst, dst, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1008 unsigned int fcc_offset)
1010 gen_mov_reg_FCC0(dst, src, fcc_offset);
1011 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1012 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1013 tcg_gen_xori_tl(dst, dst, 0x1);
1016 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1017 target_ulong pc2, TCGv r_cond)
1019 int l1;
1021 l1 = gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1025 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1027 gen_set_label(l1);
1028 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1031 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1032 target_ulong pc2, TCGv r_cond)
1034 int l1;
1036 l1 = gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1040 gen_goto_tb(dc, 0, pc2, pc1);
1042 gen_set_label(l1);
1043 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1046 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1047 TCGv r_cond)
1049 int l1, l2;
1051 l1 = gen_new_label();
1052 l2 = gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1056 tcg_gen_movi_tl(cpu_npc, npc1);
1057 tcg_gen_br(l2);
1059 gen_set_label(l1);
1060 tcg_gen_movi_tl(cpu_npc, npc2);
1061 gen_set_label(l2);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext *dc, TCGv cond)
1068 if (dc->npc == JUMP_PC) {
1069 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1070 dc->npc = DYNAMIC_PC;
1074 static inline void save_npc(DisasContext *dc, TCGv cond)
1076 if (dc->npc == JUMP_PC) {
1077 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1078 dc->npc = DYNAMIC_PC;
1079 } else if (dc->npc != DYNAMIC_PC) {
1080 tcg_gen_movi_tl(cpu_npc, dc->npc);
1084 static inline void save_state(DisasContext *dc, TCGv cond)
1086 tcg_gen_movi_tl(cpu_pc, dc->pc);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc->cc_op != CC_OP_FLAGS) {
1089 dc->cc_op = CC_OP_FLAGS;
1090 gen_helper_compute_psr();
1092 save_npc(dc, cond);
1095 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1097 if (dc->npc == JUMP_PC) {
1098 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1099 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100 dc->pc = DYNAMIC_PC;
1101 } else if (dc->npc == DYNAMIC_PC) {
1102 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1103 dc->pc = DYNAMIC_PC;
1104 } else {
1105 dc->pc = dc->npc;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1112 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1115 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1116 DisasContext *dc)
1118 TCGv_i32 r_src;
1120 #ifdef TARGET_SPARC64
1121 if (cc)
1122 r_src = cpu_xcc;
1123 else
1124 r_src = cpu_psr;
1125 #else
1126 r_src = cpu_psr;
1127 #endif
1128 switch (dc->cc_op) {
1129 case CC_OP_FLAGS:
1130 break;
1131 default:
1132 gen_helper_compute_psr();
1133 dc->cc_op = CC_OP_FLAGS;
1134 break;
1136 switch (cond) {
1137 case 0x0:
1138 gen_op_eval_bn(r_dst);
1139 break;
1140 case 0x1:
1141 gen_op_eval_be(r_dst, r_src);
1142 break;
1143 case 0x2:
1144 gen_op_eval_ble(r_dst, r_src);
1145 break;
1146 case 0x3:
1147 gen_op_eval_bl(r_dst, r_src);
1148 break;
1149 case 0x4:
1150 gen_op_eval_bleu(r_dst, r_src);
1151 break;
1152 case 0x5:
1153 gen_op_eval_bcs(r_dst, r_src);
1154 break;
1155 case 0x6:
1156 gen_op_eval_bneg(r_dst, r_src);
1157 break;
1158 case 0x7:
1159 gen_op_eval_bvs(r_dst, r_src);
1160 break;
1161 case 0x8:
1162 gen_op_eval_ba(r_dst);
1163 break;
1164 case 0x9:
1165 gen_op_eval_bne(r_dst, r_src);
1166 break;
1167 case 0xa:
1168 gen_op_eval_bg(r_dst, r_src);
1169 break;
1170 case 0xb:
1171 gen_op_eval_bge(r_dst, r_src);
1172 break;
1173 case 0xc:
1174 gen_op_eval_bgu(r_dst, r_src);
1175 break;
1176 case 0xd:
1177 gen_op_eval_bcc(r_dst, r_src);
1178 break;
1179 case 0xe:
1180 gen_op_eval_bpos(r_dst, r_src);
1181 break;
1182 case 0xf:
1183 gen_op_eval_bvc(r_dst, r_src);
1184 break;
1188 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1190 unsigned int offset;
1192 switch (cc) {
1193 default:
1194 case 0x0:
1195 offset = 0;
1196 break;
1197 case 0x1:
1198 offset = 32 - 10;
1199 break;
1200 case 0x2:
1201 offset = 34 - 10;
1202 break;
1203 case 0x3:
1204 offset = 36 - 10;
1205 break;
1208 switch (cond) {
1209 case 0x0:
1210 gen_op_eval_bn(r_dst);
1211 break;
1212 case 0x1:
1213 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1214 break;
1215 case 0x2:
1216 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1217 break;
1218 case 0x3:
1219 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1220 break;
1221 case 0x4:
1222 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1223 break;
1224 case 0x5:
1225 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1226 break;
1227 case 0x6:
1228 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1229 break;
1230 case 0x7:
1231 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1232 break;
1233 case 0x8:
1234 gen_op_eval_ba(r_dst);
1235 break;
1236 case 0x9:
1237 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1238 break;
1239 case 0xa:
1240 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1241 break;
1242 case 0xb:
1243 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1244 break;
1245 case 0xc:
1246 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1247 break;
1248 case 0xd:
1249 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1250 break;
1251 case 0xe:
1252 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1253 break;
1254 case 0xf:
1255 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1256 break;
1260 #ifdef TARGET_SPARC64
1261 // Inverted logic
1262 static const int gen_tcg_cond_reg[8] = {
1264 TCG_COND_NE,
1265 TCG_COND_GT,
1266 TCG_COND_GE,
1268 TCG_COND_EQ,
1269 TCG_COND_LE,
1270 TCG_COND_LT,
1273 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1275 int l1;
1277 l1 = gen_new_label();
1278 tcg_gen_movi_tl(r_dst, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1280 tcg_gen_movi_tl(r_dst, 1);
1281 gen_set_label(l1);
1283 #endif
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1287 TCGv r_cond)
1289 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1290 target_ulong target = dc->pc + offset;
1292 if (cond == 0x0) {
1293 /* unconditional not taken */
1294 if (a) {
1295 dc->pc = dc->npc + 4;
1296 dc->npc = dc->pc + 4;
1297 } else {
1298 dc->pc = dc->npc;
1299 dc->npc = dc->pc + 4;
1301 } else if (cond == 0x8) {
1302 /* unconditional taken */
1303 if (a) {
1304 dc->pc = target;
1305 dc->npc = dc->pc + 4;
1306 } else {
1307 dc->pc = dc->npc;
1308 dc->npc = target;
1309 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1311 } else {
1312 flush_cond(dc, r_cond);
1313 gen_cond(r_cond, cc, cond, dc);
1314 if (a) {
1315 gen_branch_a(dc, target, dc->npc, r_cond);
1316 dc->is_br = 1;
1317 } else {
1318 dc->pc = dc->npc;
1319 dc->jump_pc[0] = target;
1320 dc->jump_pc[1] = dc->npc + 4;
1321 dc->npc = JUMP_PC;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1328 TCGv r_cond)
1330 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1331 target_ulong target = dc->pc + offset;
1333 if (cond == 0x0) {
1334 /* unconditional not taken */
1335 if (a) {
1336 dc->pc = dc->npc + 4;
1337 dc->npc = dc->pc + 4;
1338 } else {
1339 dc->pc = dc->npc;
1340 dc->npc = dc->pc + 4;
1342 } else if (cond == 0x8) {
1343 /* unconditional taken */
1344 if (a) {
1345 dc->pc = target;
1346 dc->npc = dc->pc + 4;
1347 } else {
1348 dc->pc = dc->npc;
1349 dc->npc = target;
1350 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1352 } else {
1353 flush_cond(dc, r_cond);
1354 gen_fcond(r_cond, cc, cond);
1355 if (a) {
1356 gen_branch_a(dc, target, dc->npc, r_cond);
1357 dc->is_br = 1;
1358 } else {
1359 dc->pc = dc->npc;
1360 dc->jump_pc[0] = target;
1361 dc->jump_pc[1] = dc->npc + 4;
1362 dc->npc = JUMP_PC;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1370 TCGv r_cond, TCGv r_reg)
1372 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1373 target_ulong target = dc->pc + offset;
1375 flush_cond(dc, r_cond);
1376 gen_cond_reg(r_cond, cond, r_reg);
1377 if (a) {
1378 gen_branch_a(dc, target, dc->npc, r_cond);
1379 dc->is_br = 1;
1380 } else {
1381 dc->pc = dc->npc;
1382 dc->jump_pc[0] = target;
1383 dc->jump_pc[1] = dc->npc + 4;
1384 dc->npc = JUMP_PC;
1388 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1390 switch (fccno) {
1391 case 0:
1392 gen_helper_fcmps(r_rs1, r_rs2);
1393 break;
1394 case 1:
1395 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1396 break;
1397 case 2:
1398 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1399 break;
1400 case 3:
1401 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1402 break;
1406 static inline void gen_op_fcmpd(int fccno)
1408 switch (fccno) {
1409 case 0:
1410 gen_helper_fcmpd();
1411 break;
1412 case 1:
1413 gen_helper_fcmpd_fcc1();
1414 break;
1415 case 2:
1416 gen_helper_fcmpd_fcc2();
1417 break;
1418 case 3:
1419 gen_helper_fcmpd_fcc3();
1420 break;
1424 static inline void gen_op_fcmpq(int fccno)
1426 switch (fccno) {
1427 case 0:
1428 gen_helper_fcmpq();
1429 break;
1430 case 1:
1431 gen_helper_fcmpq_fcc1();
1432 break;
1433 case 2:
1434 gen_helper_fcmpq_fcc2();
1435 break;
1436 case 3:
1437 gen_helper_fcmpq_fcc3();
1438 break;
1442 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1444 switch (fccno) {
1445 case 0:
1446 gen_helper_fcmpes(r_rs1, r_rs2);
1447 break;
1448 case 1:
1449 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1450 break;
1451 case 2:
1452 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1453 break;
1454 case 3:
1455 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1456 break;
1460 static inline void gen_op_fcmped(int fccno)
1462 switch (fccno) {
1463 case 0:
1464 gen_helper_fcmped();
1465 break;
1466 case 1:
1467 gen_helper_fcmped_fcc1();
1468 break;
1469 case 2:
1470 gen_helper_fcmped_fcc2();
1471 break;
1472 case 3:
1473 gen_helper_fcmped_fcc3();
1474 break;
1478 static inline void gen_op_fcmpeq(int fccno)
1480 switch (fccno) {
1481 case 0:
1482 gen_helper_fcmpeq();
1483 break;
1484 case 1:
1485 gen_helper_fcmpeq_fcc1();
1486 break;
1487 case 2:
1488 gen_helper_fcmpeq_fcc2();
1489 break;
1490 case 3:
1491 gen_helper_fcmpeq_fcc3();
1492 break;
1496 #else
1498 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1500 gen_helper_fcmps(r_rs1, r_rs2);
1503 static inline void gen_op_fcmpd(int fccno)
1505 gen_helper_fcmpd();
1508 static inline void gen_op_fcmpq(int fccno)
1510 gen_helper_fcmpq();
1513 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1515 gen_helper_fcmpes(r_rs1, r_rs2);
1518 static inline void gen_op_fcmped(int fccno)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno)
1525 gen_helper_fcmpeq();
1527 #endif
1529 static inline void gen_op_fpexception_im(int fsr_flags)
1531 TCGv_i32 r_const;
1533 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1534 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1535 r_const = tcg_const_i32(TT_FP_EXCP);
1536 gen_helper_raise_exception(r_const);
1537 tcg_temp_free_i32(r_const);
1540 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc->fpu_enabled) {
1544 TCGv_i32 r_const;
1546 save_state(dc, r_cond);
1547 r_const = tcg_const_i32(TT_NFPU_INSN);
1548 gen_helper_raise_exception(r_const);
1549 tcg_temp_free_i32(r_const);
1550 dc->is_br = 1;
1551 return 1;
1553 #endif
1554 return 0;
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1567 /* asi moves */
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1571 int asi;
1572 TCGv_i32 r_asi;
1574 if (IS_IMM) {
1575 r_asi = tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi, cpu_asi);
1577 } else {
1578 asi = GET_FIELD(insn, 19, 26);
1579 r_asi = tcg_const_i32(asi);
1581 return r_asi;
1584 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1585 int sign)
1587 TCGv_i32 r_asi, r_size, r_sign;
1589 r_asi = gen_get_asi(insn, addr);
1590 r_size = tcg_const_i32(size);
1591 r_sign = tcg_const_i32(sign);
1592 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1593 tcg_temp_free_i32(r_sign);
1594 tcg_temp_free_i32(r_size);
1595 tcg_temp_free_i32(r_asi);
1598 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1600 TCGv_i32 r_asi, r_size;
1602 r_asi = gen_get_asi(insn, addr);
1603 r_size = tcg_const_i32(size);
1604 gen_helper_st_asi(addr, src, r_asi, r_size);
1605 tcg_temp_free_i32(r_size);
1606 tcg_temp_free_i32(r_asi);
1609 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1611 TCGv_i32 r_asi, r_size, r_rd;
1613 r_asi = gen_get_asi(insn, addr);
1614 r_size = tcg_const_i32(size);
1615 r_rd = tcg_const_i32(rd);
1616 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1617 tcg_temp_free_i32(r_rd);
1618 tcg_temp_free_i32(r_size);
1619 tcg_temp_free_i32(r_asi);
1622 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1624 TCGv_i32 r_asi, r_size, r_rd;
1626 r_asi = gen_get_asi(insn, addr);
1627 r_size = tcg_const_i32(size);
1628 r_rd = tcg_const_i32(rd);
1629 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1630 tcg_temp_free_i32(r_rd);
1631 tcg_temp_free_i32(r_size);
1632 tcg_temp_free_i32(r_asi);
1635 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1637 TCGv_i32 r_asi, r_size, r_sign;
1639 r_asi = gen_get_asi(insn, addr);
1640 r_size = tcg_const_i32(4);
1641 r_sign = tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1643 tcg_temp_free_i32(r_sign);
1644 gen_helper_st_asi(addr, dst, r_asi, r_size);
1645 tcg_temp_free_i32(r_size);
1646 tcg_temp_free_i32(r_asi);
1647 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1650 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1652 TCGv_i32 r_asi, r_rd;
1654 r_asi = gen_get_asi(insn, addr);
1655 r_rd = tcg_const_i32(rd);
1656 gen_helper_ldda_asi(addr, r_asi, r_rd);
1657 tcg_temp_free_i32(r_rd);
1658 tcg_temp_free_i32(r_asi);
1661 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1663 TCGv_i32 r_asi, r_size;
1665 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1666 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1667 r_asi = gen_get_asi(insn, addr);
1668 r_size = tcg_const_i32(8);
1669 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1670 tcg_temp_free_i32(r_size);
1671 tcg_temp_free_i32(r_asi);
1674 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1675 int rd)
1677 TCGv r_val1;
1678 TCGv_i32 r_asi;
1680 r_val1 = tcg_temp_new();
1681 gen_movl_reg_TN(rd, r_val1);
1682 r_asi = gen_get_asi(insn, addr);
1683 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1684 tcg_temp_free_i32(r_asi);
1685 tcg_temp_free(r_val1);
1688 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1689 int rd)
1691 TCGv_i32 r_asi;
1693 gen_movl_reg_TN(rd, cpu_tmp64);
1694 r_asi = gen_get_asi(insn, addr);
1695 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1696 tcg_temp_free_i32(r_asi);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1702 int sign)
1704 TCGv_i32 r_asi, r_size, r_sign;
1706 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1707 r_size = tcg_const_i32(size);
1708 r_sign = tcg_const_i32(sign);
1709 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1710 tcg_temp_free(r_sign);
1711 tcg_temp_free(r_size);
1712 tcg_temp_free(r_asi);
1713 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1716 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1718 TCGv_i32 r_asi, r_size;
1720 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1721 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1722 r_size = tcg_const_i32(size);
1723 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1724 tcg_temp_free(r_size);
1725 tcg_temp_free(r_asi);
1728 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1730 TCGv_i32 r_asi, r_size, r_sign;
1731 TCGv_i64 r_val;
1733 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1734 r_size = tcg_const_i32(4);
1735 r_sign = tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1737 tcg_temp_free(r_sign);
1738 r_val = tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val, dst);
1740 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1741 tcg_temp_free_i64(r_val);
1742 tcg_temp_free(r_size);
1743 tcg_temp_free(r_asi);
1744 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1747 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1749 TCGv_i32 r_asi, r_size, r_sign;
1751 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752 r_size = tcg_const_i32(8);
1753 r_sign = tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1755 tcg_temp_free(r_sign);
1756 tcg_temp_free(r_size);
1757 tcg_temp_free(r_asi);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1759 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1760 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1761 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1762 gen_movl_TN_reg(rd, hi);
1765 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1767 TCGv_i32 r_asi, r_size;
1769 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1770 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1771 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1772 r_size = tcg_const_i32(8);
1773 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1774 tcg_temp_free(r_size);
1775 tcg_temp_free(r_asi);
1777 #endif
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1782 TCGv_i64 r_val;
1783 TCGv_i32 r_asi, r_size;
1785 gen_ld_asi(dst, addr, insn, 1, 0);
1787 r_val = tcg_const_i64(0xffULL);
1788 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1789 r_size = tcg_const_i32(1);
1790 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1791 tcg_temp_free_i32(r_size);
1792 tcg_temp_free_i32(r_asi);
1793 tcg_temp_free_i64(r_val);
1795 #endif
1797 static inline TCGv get_src1(unsigned int insn, TCGv def)
1799 TCGv r_rs1 = def;
1800 unsigned int rs1;
1802 rs1 = GET_FIELD(insn, 13, 17);
1803 if (rs1 == 0) {
1804 tcg_gen_movi_tl(def, 0);
1805 } else if (rs1 < 8) {
1806 r_rs1 = cpu_gregs[rs1];
1807 } else {
1808 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1810 return r_rs1;
1813 static inline TCGv get_src2(unsigned int insn, TCGv def)
1815 TCGv r_rs2 = def;
1817 if (IS_IMM) { /* immediate */
1818 target_long simm = GET_FIELDs(insn, 19, 31);
1819 tcg_gen_movi_tl(def, simm);
1820 } else { /* register */
1821 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1822 if (rs2 == 0) {
1823 tcg_gen_movi_tl(def, 0);
1824 } else if (rs2 < 8) {
1825 r_rs2 = cpu_gregs[rs2];
1826 } else {
1827 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1830 return r_rs2;
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1836 TCGv_i32 r_tl = tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1846 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1852 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1853 tcg_temp_free_ptr(r_tl_tmp);
1856 tcg_temp_free_i32(r_tl);
1858 #endif
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1862 goto illegal_insn;
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1865 goto nfpu_insn;
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext * dc)
1870 unsigned int insn, opc, rs1, rs2, rd;
1871 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1872 target_long simm;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1875 tcg_gen_debug_insn_start(dc->pc);
1876 insn = ldl_code(dc->pc);
1877 opc = GET_FIELD(insn, 0, 1);
1879 rd = GET_FIELD(insn, 2, 6);
1881 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1882 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1884 switch (opc) {
1885 case 0: /* branches/sethi */
1887 unsigned int xop = GET_FIELD(insn, 7, 9);
1888 int32_t target;
1889 switch (xop) {
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1893 int cc;
1895 target = GET_FIELD_SP(insn, 0, 18);
1896 target = sign_extend(target, 18);
1897 target <<= 2;
1898 cc = GET_FIELD_SP(insn, 20, 21);
1899 if (cc == 0)
1900 do_branch(dc, target, insn, 0, cpu_cond);
1901 else if (cc == 2)
1902 do_branch(dc, target, insn, 1, cpu_cond);
1903 else
1904 goto illegal_insn;
1905 goto jmp_insn;
1907 case 0x3: /* V9 BPr */
1909 target = GET_FIELD_SP(insn, 0, 13) |
1910 (GET_FIELD_SP(insn, 20, 21) << 14);
1911 target = sign_extend(target, 16);
1912 target <<= 2;
1913 cpu_src1 = get_src1(insn, cpu_src1);
1914 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1915 goto jmp_insn;
1917 case 0x5: /* V9 FBPcc */
1919 int cc = GET_FIELD_SP(insn, 20, 21);
1920 if (gen_trap_ifnofpu(dc, cpu_cond))
1921 goto jmp_insn;
1922 target = GET_FIELD_SP(insn, 0, 18);
1923 target = sign_extend(target, 19);
1924 target <<= 2;
1925 do_fbranch(dc, target, insn, cc, cpu_cond);
1926 goto jmp_insn;
1928 #else
1929 case 0x7: /* CBN+x */
1931 goto ncp_insn;
1933 #endif
1934 case 0x2: /* BN+x */
1936 target = GET_FIELD(insn, 10, 31);
1937 target = sign_extend(target, 22);
1938 target <<= 2;
1939 do_branch(dc, target, insn, 0, cpu_cond);
1940 goto jmp_insn;
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc, cpu_cond))
1945 goto jmp_insn;
1946 target = GET_FIELD(insn, 10, 31);
1947 target = sign_extend(target, 22);
1948 target <<= 2;
1949 do_fbranch(dc, target, insn, 0, cpu_cond);
1950 goto jmp_insn;
1952 case 0x4: /* SETHI */
1953 if (rd) { // nop
1954 uint32_t value = GET_FIELD(insn, 10, 31);
1955 TCGv r_const;
1957 r_const = tcg_const_tl(value << 10);
1958 gen_movl_TN_reg(rd, r_const);
1959 tcg_temp_free(r_const);
1961 break;
1962 case 0x0: /* UNIMPL */
1963 default:
1964 goto illegal_insn;
1966 break;
1968 break;
1969 case 1: /*CALL*/
1971 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1972 TCGv r_const;
1974 r_const = tcg_const_tl(dc->pc);
1975 gen_movl_TN_reg(15, r_const);
1976 tcg_temp_free(r_const);
1977 target += dc->pc;
1978 gen_mov_pc_npc(dc, cpu_cond);
1979 dc->npc = target;
1981 goto jmp_insn;
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop = GET_FIELD(insn, 7, 12);
1985 if (xop == 0x3a) { /* generate trap */
1986 int cond;
1988 cpu_src1 = get_src1(insn, cpu_src1);
1989 if (IS_IMM) {
1990 rs2 = GET_FIELD(insn, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1992 } else {
1993 rs2 = GET_FIELD(insn, 27, 31);
1994 if (rs2 != 0) {
1995 gen_movl_reg_TN(rs2, cpu_src2);
1996 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1997 } else
1998 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2000 cond = GET_FIELD(insn, 3, 6);
2001 if (cond == 0x8) {
2002 save_state(dc, cpu_cond);
2003 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2004 supervisor(dc))
2005 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2006 else
2007 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2008 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2009 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2010 gen_helper_raise_exception(cpu_tmp32);
2011 } else if (cond != 0) {
2012 TCGv r_cond = tcg_temp_new();
2013 int l1;
2014 #ifdef TARGET_SPARC64
2015 /* V9 icc/xcc */
2016 int cc = GET_FIELD_SP(insn, 11, 12);
2018 save_state(dc, cpu_cond);
2019 if (cc == 0)
2020 gen_cond(r_cond, 0, cond, dc);
2021 else if (cc == 2)
2022 gen_cond(r_cond, 1, cond, dc);
2023 else
2024 goto illegal_insn;
2025 #else
2026 save_state(dc, cpu_cond);
2027 gen_cond(r_cond, 0, cond, dc);
2028 #endif
2029 l1 = gen_new_label();
2030 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2032 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2033 supervisor(dc))
2034 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2035 else
2036 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2037 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2038 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2039 gen_helper_raise_exception(cpu_tmp32);
2041 gen_set_label(l1);
2042 tcg_temp_free(r_cond);
2044 gen_op_next_insn();
2045 tcg_gen_exit_tb(0);
2046 dc->is_br = 1;
2047 goto jmp_insn;
2048 } else if (xop == 0x28) {
2049 rs1 = GET_FIELD(insn, 13, 17);
2050 switch(rs1) {
2051 case 0: /* rdy */
2052 #ifndef TARGET_SPARC64
2053 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2054 manual, rdy on the microSPARC
2055 II */
2056 case 0x0f: /* stbar in the SPARCv8 manual,
2057 rdy on the microSPARC II */
2058 case 0x10 ... 0x1f: /* implementation-dependent in the
2059 SPARCv8 manual, rdy on the
2060 microSPARC II */
2061 #endif
2062 gen_movl_TN_reg(rd, cpu_y);
2063 break;
2064 #ifdef TARGET_SPARC64
2065 case 0x2: /* V9 rdccr */
2066 gen_helper_compute_psr();
2067 gen_helper_rdccr(cpu_dst);
2068 gen_movl_TN_reg(rd, cpu_dst);
2069 break;
2070 case 0x3: /* V9 rdasi */
2071 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2072 gen_movl_TN_reg(rd, cpu_dst);
2073 break;
2074 case 0x4: /* V9 rdtick */
2076 TCGv_ptr r_tickptr;
2078 r_tickptr = tcg_temp_new_ptr();
2079 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2080 offsetof(CPUState, tick));
2081 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2082 tcg_temp_free_ptr(r_tickptr);
2083 gen_movl_TN_reg(rd, cpu_dst);
2085 break;
2086 case 0x5: /* V9 rdpc */
2088 TCGv r_const;
2090 r_const = tcg_const_tl(dc->pc);
2091 gen_movl_TN_reg(rd, r_const);
2092 tcg_temp_free(r_const);
2094 break;
2095 case 0x6: /* V9 rdfprs */
2096 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2097 gen_movl_TN_reg(rd, cpu_dst);
2098 break;
2099 case 0xf: /* V9 membar */
2100 break; /* no effect */
2101 case 0x13: /* Graphics Status */
2102 if (gen_trap_ifnofpu(dc, cpu_cond))
2103 goto jmp_insn;
2104 gen_movl_TN_reg(rd, cpu_gsr);
2105 break;
2106 case 0x16: /* Softint */
2107 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2108 gen_movl_TN_reg(rd, cpu_dst);
2109 break;
2110 case 0x17: /* Tick compare */
2111 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2112 break;
2113 case 0x18: /* System tick */
2115 TCGv_ptr r_tickptr;
2117 r_tickptr = tcg_temp_new_ptr();
2118 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2119 offsetof(CPUState, stick));
2120 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2121 tcg_temp_free_ptr(r_tickptr);
2122 gen_movl_TN_reg(rd, cpu_dst);
2124 break;
2125 case 0x19: /* System tick compare */
2126 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2127 break;
2128 case 0x10: /* Performance Control */
2129 case 0x11: /* Performance Instrumentation Counter */
2130 case 0x12: /* Dispatch Control */
2131 case 0x14: /* Softint set, WO */
2132 case 0x15: /* Softint clear, WO */
2133 #endif
2134 default:
2135 goto illegal_insn;
2137 #if !defined(CONFIG_USER_ONLY)
2138 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2139 #ifndef TARGET_SPARC64
2140 if (!supervisor(dc))
2141 goto priv_insn;
2142 gen_helper_compute_psr();
2143 dc->cc_op = CC_OP_FLAGS;
2144 gen_helper_rdpsr(cpu_dst);
2145 #else
2146 CHECK_IU_FEATURE(dc, HYPV);
2147 if (!hypervisor(dc))
2148 goto priv_insn;
2149 rs1 = GET_FIELD(insn, 13, 17);
2150 switch (rs1) {
2151 case 0: // hpstate
2152 // gen_op_rdhpstate();
2153 break;
2154 case 1: // htstate
2155 // gen_op_rdhtstate();
2156 break;
2157 case 3: // hintp
2158 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2159 break;
2160 case 5: // htba
2161 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2162 break;
2163 case 6: // hver
2164 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2165 break;
2166 case 31: // hstick_cmpr
2167 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2168 break;
2169 default:
2170 goto illegal_insn;
2172 #endif
2173 gen_movl_TN_reg(rd, cpu_dst);
2174 break;
2175 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2176 if (!supervisor(dc))
2177 goto priv_insn;
2178 #ifdef TARGET_SPARC64
2179 rs1 = GET_FIELD(insn, 13, 17);
2180 switch (rs1) {
2181 case 0: // tpc
2183 TCGv_ptr r_tsptr;
2185 r_tsptr = tcg_temp_new_ptr();
2186 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2187 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2188 offsetof(trap_state, tpc));
2189 tcg_temp_free_ptr(r_tsptr);
2191 break;
2192 case 1: // tnpc
2194 TCGv_ptr r_tsptr;
2196 r_tsptr = tcg_temp_new_ptr();
2197 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2198 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2199 offsetof(trap_state, tnpc));
2200 tcg_temp_free_ptr(r_tsptr);
2202 break;
2203 case 2: // tstate
2205 TCGv_ptr r_tsptr;
2207 r_tsptr = tcg_temp_new_ptr();
2208 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2209 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2210 offsetof(trap_state, tstate));
2211 tcg_temp_free_ptr(r_tsptr);
2213 break;
2214 case 3: // tt
2216 TCGv_ptr r_tsptr;
2218 r_tsptr = tcg_temp_new_ptr();
2219 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2220 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2221 offsetof(trap_state, tt));
2222 tcg_temp_free_ptr(r_tsptr);
2223 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2225 break;
2226 case 4: // tick
2228 TCGv_ptr r_tickptr;
2230 r_tickptr = tcg_temp_new_ptr();
2231 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2232 offsetof(CPUState, tick));
2233 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2234 gen_movl_TN_reg(rd, cpu_tmp0);
2235 tcg_temp_free_ptr(r_tickptr);
2237 break;
2238 case 5: // tba
2239 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2240 break;
2241 case 6: // pstate
2242 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2243 offsetof(CPUSPARCState, pstate));
2244 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2245 break;
2246 case 7: // tl
2247 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2248 offsetof(CPUSPARCState, tl));
2249 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2250 break;
2251 case 8: // pil
2252 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2253 offsetof(CPUSPARCState, psrpil));
2254 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2255 break;
2256 case 9: // cwp
2257 gen_helper_rdcwp(cpu_tmp0);
2258 break;
2259 case 10: // cansave
2260 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2261 offsetof(CPUSPARCState, cansave));
2262 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2263 break;
2264 case 11: // canrestore
2265 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2266 offsetof(CPUSPARCState, canrestore));
2267 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2268 break;
2269 case 12: // cleanwin
2270 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2271 offsetof(CPUSPARCState, cleanwin));
2272 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2273 break;
2274 case 13: // otherwin
2275 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2276 offsetof(CPUSPARCState, otherwin));
2277 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2278 break;
2279 case 14: // wstate
2280 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2281 offsetof(CPUSPARCState, wstate));
2282 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2283 break;
2284 case 16: // UA2005 gl
2285 CHECK_IU_FEATURE(dc, GL);
2286 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2287 offsetof(CPUSPARCState, gl));
2288 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2289 break;
2290 case 26: // UA2005 strand status
2291 CHECK_IU_FEATURE(dc, HYPV);
2292 if (!hypervisor(dc))
2293 goto priv_insn;
2294 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2295 break;
2296 case 31: // ver
2297 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2298 break;
2299 case 15: // fq
2300 default:
2301 goto illegal_insn;
2303 #else
2304 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2305 #endif
2306 gen_movl_TN_reg(rd, cpu_tmp0);
2307 break;
2308 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2309 #ifdef TARGET_SPARC64
2310 save_state(dc, cpu_cond);
2311 gen_helper_flushw();
2312 #else
2313 if (!supervisor(dc))
2314 goto priv_insn;
2315 gen_movl_TN_reg(rd, cpu_tbr);
2316 #endif
2317 break;
2318 #endif
2319 } else if (xop == 0x34) { /* FPU Operations */
2320 if (gen_trap_ifnofpu(dc, cpu_cond))
2321 goto jmp_insn;
2322 gen_op_clear_ieee_excp_and_FTT();
2323 rs1 = GET_FIELD(insn, 13, 17);
2324 rs2 = GET_FIELD(insn, 27, 31);
2325 xop = GET_FIELD(insn, 18, 26);
2326 save_state(dc, cpu_cond);
2327 switch (xop) {
2328 case 0x1: /* fmovs */
2329 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2330 break;
2331 case 0x5: /* fnegs */
2332 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2333 break;
2334 case 0x9: /* fabss */
2335 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2336 break;
2337 case 0x29: /* fsqrts */
2338 CHECK_FPU_FEATURE(dc, FSQRT);
2339 gen_clear_float_exceptions();
2340 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2341 gen_helper_check_ieee_exceptions();
2342 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2343 break;
2344 case 0x2a: /* fsqrtd */
2345 CHECK_FPU_FEATURE(dc, FSQRT);
2346 gen_op_load_fpr_DT1(DFPREG(rs2));
2347 gen_clear_float_exceptions();
2348 gen_helper_fsqrtd();
2349 gen_helper_check_ieee_exceptions();
2350 gen_op_store_DT0_fpr(DFPREG(rd));
2351 break;
2352 case 0x2b: /* fsqrtq */
2353 CHECK_FPU_FEATURE(dc, FLOAT128);
2354 gen_op_load_fpr_QT1(QFPREG(rs2));
2355 gen_clear_float_exceptions();
2356 gen_helper_fsqrtq();
2357 gen_helper_check_ieee_exceptions();
2358 gen_op_store_QT0_fpr(QFPREG(rd));
2359 break;
2360 case 0x41: /* fadds */
2361 gen_clear_float_exceptions();
2362 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2363 gen_helper_check_ieee_exceptions();
2364 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2365 break;
2366 case 0x42: /* faddd */
2367 gen_op_load_fpr_DT0(DFPREG(rs1));
2368 gen_op_load_fpr_DT1(DFPREG(rs2));
2369 gen_clear_float_exceptions();
2370 gen_helper_faddd();
2371 gen_helper_check_ieee_exceptions();
2372 gen_op_store_DT0_fpr(DFPREG(rd));
2373 break;
2374 case 0x43: /* faddq */
2375 CHECK_FPU_FEATURE(dc, FLOAT128);
2376 gen_op_load_fpr_QT0(QFPREG(rs1));
2377 gen_op_load_fpr_QT1(QFPREG(rs2));
2378 gen_clear_float_exceptions();
2379 gen_helper_faddq();
2380 gen_helper_check_ieee_exceptions();
2381 gen_op_store_QT0_fpr(QFPREG(rd));
2382 break;
2383 case 0x45: /* fsubs */
2384 gen_clear_float_exceptions();
2385 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2386 gen_helper_check_ieee_exceptions();
2387 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2388 break;
2389 case 0x46: /* fsubd */
2390 gen_op_load_fpr_DT0(DFPREG(rs1));
2391 gen_op_load_fpr_DT1(DFPREG(rs2));
2392 gen_clear_float_exceptions();
2393 gen_helper_fsubd();
2394 gen_helper_check_ieee_exceptions();
2395 gen_op_store_DT0_fpr(DFPREG(rd));
2396 break;
2397 case 0x47: /* fsubq */
2398 CHECK_FPU_FEATURE(dc, FLOAT128);
2399 gen_op_load_fpr_QT0(QFPREG(rs1));
2400 gen_op_load_fpr_QT1(QFPREG(rs2));
2401 gen_clear_float_exceptions();
2402 gen_helper_fsubq();
2403 gen_helper_check_ieee_exceptions();
2404 gen_op_store_QT0_fpr(QFPREG(rd));
2405 break;
2406 case 0x49: /* fmuls */
2407 CHECK_FPU_FEATURE(dc, FMUL);
2408 gen_clear_float_exceptions();
2409 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2410 gen_helper_check_ieee_exceptions();
2411 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2412 break;
2413 case 0x4a: /* fmuld */
2414 CHECK_FPU_FEATURE(dc, FMUL);
2415 gen_op_load_fpr_DT0(DFPREG(rs1));
2416 gen_op_load_fpr_DT1(DFPREG(rs2));
2417 gen_clear_float_exceptions();
2418 gen_helper_fmuld();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd));
2421 break;
2422 case 0x4b: /* fmulq */
2423 CHECK_FPU_FEATURE(dc, FLOAT128);
2424 CHECK_FPU_FEATURE(dc, FMUL);
2425 gen_op_load_fpr_QT0(QFPREG(rs1));
2426 gen_op_load_fpr_QT1(QFPREG(rs2));
2427 gen_clear_float_exceptions();
2428 gen_helper_fmulq();
2429 gen_helper_check_ieee_exceptions();
2430 gen_op_store_QT0_fpr(QFPREG(rd));
2431 break;
2432 case 0x4d: /* fdivs */
2433 gen_clear_float_exceptions();
2434 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2435 gen_helper_check_ieee_exceptions();
2436 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2437 break;
2438 case 0x4e: /* fdivd */
2439 gen_op_load_fpr_DT0(DFPREG(rs1));
2440 gen_op_load_fpr_DT1(DFPREG(rs2));
2441 gen_clear_float_exceptions();
2442 gen_helper_fdivd();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_DT0_fpr(DFPREG(rd));
2445 break;
2446 case 0x4f: /* fdivq */
2447 CHECK_FPU_FEATURE(dc, FLOAT128);
2448 gen_op_load_fpr_QT0(QFPREG(rs1));
2449 gen_op_load_fpr_QT1(QFPREG(rs2));
2450 gen_clear_float_exceptions();
2451 gen_helper_fdivq();
2452 gen_helper_check_ieee_exceptions();
2453 gen_op_store_QT0_fpr(QFPREG(rd));
2454 break;
2455 case 0x69: /* fsmuld */
2456 CHECK_FPU_FEATURE(dc, FSMULD);
2457 gen_clear_float_exceptions();
2458 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2459 gen_helper_check_ieee_exceptions();
2460 gen_op_store_DT0_fpr(DFPREG(rd));
2461 break;
2462 case 0x6e: /* fdmulq */
2463 CHECK_FPU_FEATURE(dc, FLOAT128);
2464 gen_op_load_fpr_DT0(DFPREG(rs1));
2465 gen_op_load_fpr_DT1(DFPREG(rs2));
2466 gen_clear_float_exceptions();
2467 gen_helper_fdmulq();
2468 gen_helper_check_ieee_exceptions();
2469 gen_op_store_QT0_fpr(QFPREG(rd));
2470 break;
2471 case 0xc4: /* fitos */
2472 gen_clear_float_exceptions();
2473 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2474 gen_helper_check_ieee_exceptions();
2475 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2476 break;
2477 case 0xc6: /* fdtos */
2478 gen_op_load_fpr_DT1(DFPREG(rs2));
2479 gen_clear_float_exceptions();
2480 gen_helper_fdtos(cpu_tmp32);
2481 gen_helper_check_ieee_exceptions();
2482 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2483 break;
2484 case 0xc7: /* fqtos */
2485 CHECK_FPU_FEATURE(dc, FLOAT128);
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2488 gen_helper_fqtos(cpu_tmp32);
2489 gen_helper_check_ieee_exceptions();
2490 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2491 break;
2492 case 0xc8: /* fitod */
2493 gen_helper_fitod(cpu_fpr[rs2]);
2494 gen_op_store_DT0_fpr(DFPREG(rd));
2495 break;
2496 case 0xc9: /* fstod */
2497 gen_helper_fstod(cpu_fpr[rs2]);
2498 gen_op_store_DT0_fpr(DFPREG(rd));
2499 break;
2500 case 0xcb: /* fqtod */
2501 CHECK_FPU_FEATURE(dc, FLOAT128);
2502 gen_op_load_fpr_QT1(QFPREG(rs2));
2503 gen_clear_float_exceptions();
2504 gen_helper_fqtod();
2505 gen_helper_check_ieee_exceptions();
2506 gen_op_store_DT0_fpr(DFPREG(rd));
2507 break;
2508 case 0xcc: /* fitoq */
2509 CHECK_FPU_FEATURE(dc, FLOAT128);
2510 gen_helper_fitoq(cpu_fpr[rs2]);
2511 gen_op_store_QT0_fpr(QFPREG(rd));
2512 break;
2513 case 0xcd: /* fstoq */
2514 CHECK_FPU_FEATURE(dc, FLOAT128);
2515 gen_helper_fstoq(cpu_fpr[rs2]);
2516 gen_op_store_QT0_fpr(QFPREG(rd));
2517 break;
2518 case 0xce: /* fdtoq */
2519 CHECK_FPU_FEATURE(dc, FLOAT128);
2520 gen_op_load_fpr_DT1(DFPREG(rs2));
2521 gen_helper_fdtoq();
2522 gen_op_store_QT0_fpr(QFPREG(rd));
2523 break;
2524 case 0xd1: /* fstoi */
2525 gen_clear_float_exceptions();
2526 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2527 gen_helper_check_ieee_exceptions();
2528 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2529 break;
2530 case 0xd2: /* fdtoi */
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdtoi(cpu_tmp32);
2534 gen_helper_check_ieee_exceptions();
2535 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2536 break;
2537 case 0xd3: /* fqtoi */
2538 CHECK_FPU_FEATURE(dc, FLOAT128);
2539 gen_op_load_fpr_QT1(QFPREG(rs2));
2540 gen_clear_float_exceptions();
2541 gen_helper_fqtoi(cpu_tmp32);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2544 break;
2545 #ifdef TARGET_SPARC64
2546 case 0x2: /* V9 fmovd */
2547 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2548 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2549 cpu_fpr[DFPREG(rs2) + 1]);
2550 break;
2551 case 0x3: /* V9 fmovq */
2552 CHECK_FPU_FEATURE(dc, FLOAT128);
2553 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2554 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2555 cpu_fpr[QFPREG(rs2) + 1]);
2556 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2557 cpu_fpr[QFPREG(rs2) + 2]);
2558 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2559 cpu_fpr[QFPREG(rs2) + 3]);
2560 break;
2561 case 0x6: /* V9 fnegd */
2562 gen_op_load_fpr_DT1(DFPREG(rs2));
2563 gen_helper_fnegd();
2564 gen_op_store_DT0_fpr(DFPREG(rd));
2565 break;
2566 case 0x7: /* V9 fnegq */
2567 CHECK_FPU_FEATURE(dc, FLOAT128);
2568 gen_op_load_fpr_QT1(QFPREG(rs2));
2569 gen_helper_fnegq();
2570 gen_op_store_QT0_fpr(QFPREG(rd));
2571 break;
2572 case 0xa: /* V9 fabsd */
2573 gen_op_load_fpr_DT1(DFPREG(rs2));
2574 gen_helper_fabsd();
2575 gen_op_store_DT0_fpr(DFPREG(rd));
2576 break;
2577 case 0xb: /* V9 fabsq */
2578 CHECK_FPU_FEATURE(dc, FLOAT128);
2579 gen_op_load_fpr_QT1(QFPREG(rs2));
2580 gen_helper_fabsq();
2581 gen_op_store_QT0_fpr(QFPREG(rd));
2582 break;
2583 case 0x81: /* V9 fstox */
2584 gen_clear_float_exceptions();
2585 gen_helper_fstox(cpu_fpr[rs2]);
2586 gen_helper_check_ieee_exceptions();
2587 gen_op_store_DT0_fpr(DFPREG(rd));
2588 break;
2589 case 0x82: /* V9 fdtox */
2590 gen_op_load_fpr_DT1(DFPREG(rs2));
2591 gen_clear_float_exceptions();
2592 gen_helper_fdtox();
2593 gen_helper_check_ieee_exceptions();
2594 gen_op_store_DT0_fpr(DFPREG(rd));
2595 break;
2596 case 0x83: /* V9 fqtox */
2597 CHECK_FPU_FEATURE(dc, FLOAT128);
2598 gen_op_load_fpr_QT1(QFPREG(rs2));
2599 gen_clear_float_exceptions();
2600 gen_helper_fqtox();
2601 gen_helper_check_ieee_exceptions();
2602 gen_op_store_DT0_fpr(DFPREG(rd));
2603 break;
2604 case 0x84: /* V9 fxtos */
2605 gen_op_load_fpr_DT1(DFPREG(rs2));
2606 gen_clear_float_exceptions();
2607 gen_helper_fxtos(cpu_tmp32);
2608 gen_helper_check_ieee_exceptions();
2609 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2610 break;
2611 case 0x88: /* V9 fxtod */
2612 gen_op_load_fpr_DT1(DFPREG(rs2));
2613 gen_clear_float_exceptions();
2614 gen_helper_fxtod();
2615 gen_helper_check_ieee_exceptions();
2616 gen_op_store_DT0_fpr(DFPREG(rd));
2617 break;
2618 case 0x8c: /* V9 fxtoq */
2619 CHECK_FPU_FEATURE(dc, FLOAT128);
2620 gen_op_load_fpr_DT1(DFPREG(rs2));
2621 gen_clear_float_exceptions();
2622 gen_helper_fxtoq();
2623 gen_helper_check_ieee_exceptions();
2624 gen_op_store_QT0_fpr(QFPREG(rd));
2625 break;
2626 #endif
2627 default:
2628 goto illegal_insn;
2630 } else if (xop == 0x35) { /* FPU Operations */
2631 #ifdef TARGET_SPARC64
2632 int cond;
2633 #endif
2634 if (gen_trap_ifnofpu(dc, cpu_cond))
2635 goto jmp_insn;
2636 gen_op_clear_ieee_excp_and_FTT();
2637 rs1 = GET_FIELD(insn, 13, 17);
2638 rs2 = GET_FIELD(insn, 27, 31);
2639 xop = GET_FIELD(insn, 18, 26);
2640 save_state(dc, cpu_cond);
2641 #ifdef TARGET_SPARC64
2642 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2643 int l1;
2645 l1 = gen_new_label();
2646 cond = GET_FIELD_SP(insn, 14, 17);
2647 cpu_src1 = get_src1(insn, cpu_src1);
2648 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2649 0, l1);
2650 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2651 gen_set_label(l1);
2652 break;
2653 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2654 int l1;
2656 l1 = gen_new_label();
2657 cond = GET_FIELD_SP(insn, 14, 17);
2658 cpu_src1 = get_src1(insn, cpu_src1);
2659 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2660 0, l1);
2661 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2662 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2663 gen_set_label(l1);
2664 break;
2665 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2666 int l1;
2668 CHECK_FPU_FEATURE(dc, FLOAT128);
2669 l1 = gen_new_label();
2670 cond = GET_FIELD_SP(insn, 14, 17);
2671 cpu_src1 = get_src1(insn, cpu_src1);
2672 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2673 0, l1);
2674 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2675 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2676 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2677 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2678 gen_set_label(l1);
2679 break;
2681 #endif
2682 switch (xop) {
2683 #ifdef TARGET_SPARC64
2684 #define FMOVSCC(fcc) \
2686 TCGv r_cond; \
2687 int l1; \
2689 l1 = gen_new_label(); \
2690 r_cond = tcg_temp_new(); \
2691 cond = GET_FIELD_SP(insn, 14, 17); \
2692 gen_fcond(r_cond, fcc, cond); \
2693 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2694 0, l1); \
2695 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2696 gen_set_label(l1); \
2697 tcg_temp_free(r_cond); \
2699 #define FMOVDCC(fcc) \
2701 TCGv r_cond; \
2702 int l1; \
2704 l1 = gen_new_label(); \
2705 r_cond = tcg_temp_new(); \
2706 cond = GET_FIELD_SP(insn, 14, 17); \
2707 gen_fcond(r_cond, fcc, cond); \
2708 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2709 0, l1); \
2710 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2711 cpu_fpr[DFPREG(rs2)]); \
2712 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2713 cpu_fpr[DFPREG(rs2) + 1]); \
2714 gen_set_label(l1); \
2715 tcg_temp_free(r_cond); \
2717 #define FMOVQCC(fcc) \
2719 TCGv r_cond; \
2720 int l1; \
2722 l1 = gen_new_label(); \
2723 r_cond = tcg_temp_new(); \
2724 cond = GET_FIELD_SP(insn, 14, 17); \
2725 gen_fcond(r_cond, fcc, cond); \
2726 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2727 0, l1); \
2728 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2729 cpu_fpr[QFPREG(rs2)]); \
2730 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2731 cpu_fpr[QFPREG(rs2) + 1]); \
2732 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2733 cpu_fpr[QFPREG(rs2) + 2]); \
2734 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2735 cpu_fpr[QFPREG(rs2) + 3]); \
2736 gen_set_label(l1); \
2737 tcg_temp_free(r_cond); \
2739 case 0x001: /* V9 fmovscc %fcc0 */
2740 FMOVSCC(0);
2741 break;
2742 case 0x002: /* V9 fmovdcc %fcc0 */
2743 FMOVDCC(0);
2744 break;
2745 case 0x003: /* V9 fmovqcc %fcc0 */
2746 CHECK_FPU_FEATURE(dc, FLOAT128);
2747 FMOVQCC(0);
2748 break;
2749 case 0x041: /* V9 fmovscc %fcc1 */
2750 FMOVSCC(1);
2751 break;
2752 case 0x042: /* V9 fmovdcc %fcc1 */
2753 FMOVDCC(1);
2754 break;
2755 case 0x043: /* V9 fmovqcc %fcc1 */
2756 CHECK_FPU_FEATURE(dc, FLOAT128);
2757 FMOVQCC(1);
2758 break;
2759 case 0x081: /* V9 fmovscc %fcc2 */
2760 FMOVSCC(2);
2761 break;
2762 case 0x082: /* V9 fmovdcc %fcc2 */
2763 FMOVDCC(2);
2764 break;
2765 case 0x083: /* V9 fmovqcc %fcc2 */
2766 CHECK_FPU_FEATURE(dc, FLOAT128);
2767 FMOVQCC(2);
2768 break;
2769 case 0x0c1: /* V9 fmovscc %fcc3 */
2770 FMOVSCC(3);
2771 break;
2772 case 0x0c2: /* V9 fmovdcc %fcc3 */
2773 FMOVDCC(3);
2774 break;
2775 case 0x0c3: /* V9 fmovqcc %fcc3 */
2776 CHECK_FPU_FEATURE(dc, FLOAT128);
2777 FMOVQCC(3);
2778 break;
2779 #undef FMOVSCC
2780 #undef FMOVDCC
2781 #undef FMOVQCC
2782 #define FMOVSCC(icc) \
2784 TCGv r_cond; \
2785 int l1; \
2787 l1 = gen_new_label(); \
2788 r_cond = tcg_temp_new(); \
2789 cond = GET_FIELD_SP(insn, 14, 17); \
2790 gen_cond(r_cond, icc, cond, dc); \
2791 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2792 0, l1); \
2793 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2794 gen_set_label(l1); \
2795 tcg_temp_free(r_cond); \
2797 #define FMOVDCC(icc) \
2799 TCGv r_cond; \
2800 int l1; \
2802 l1 = gen_new_label(); \
2803 r_cond = tcg_temp_new(); \
2804 cond = GET_FIELD_SP(insn, 14, 17); \
2805 gen_cond(r_cond, icc, cond, dc); \
2806 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2807 0, l1); \
2808 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2809 cpu_fpr[DFPREG(rs2)]); \
2810 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2811 cpu_fpr[DFPREG(rs2) + 1]); \
2812 gen_set_label(l1); \
2813 tcg_temp_free(r_cond); \
2815 #define FMOVQCC(icc) \
2817 TCGv r_cond; \
2818 int l1; \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_cond(r_cond, icc, cond, dc); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2825 0, l1); \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2827 cpu_fpr[QFPREG(rs2)]); \
2828 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2829 cpu_fpr[QFPREG(rs2) + 1]); \
2830 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2831 cpu_fpr[QFPREG(rs2) + 2]); \
2832 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2833 cpu_fpr[QFPREG(rs2) + 3]); \
2834 gen_set_label(l1); \
2835 tcg_temp_free(r_cond); \
2838 case 0x101: /* V9 fmovscc %icc */
2839 FMOVSCC(0);
2840 break;
2841 case 0x102: /* V9 fmovdcc %icc */
2842 FMOVDCC(0);
2843 case 0x103: /* V9 fmovqcc %icc */
2844 CHECK_FPU_FEATURE(dc, FLOAT128);
2845 FMOVQCC(0);
2846 break;
2847 case 0x181: /* V9 fmovscc %xcc */
2848 FMOVSCC(1);
2849 break;
2850 case 0x182: /* V9 fmovdcc %xcc */
2851 FMOVDCC(1);
2852 break;
2853 case 0x183: /* V9 fmovqcc %xcc */
2854 CHECK_FPU_FEATURE(dc, FLOAT128);
2855 FMOVQCC(1);
2856 break;
2857 #undef FMOVSCC
2858 #undef FMOVDCC
2859 #undef FMOVQCC
2860 #endif
2861 case 0x51: /* fcmps, V9 %fcc */
2862 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2863 break;
2864 case 0x52: /* fcmpd, V9 %fcc */
2865 gen_op_load_fpr_DT0(DFPREG(rs1));
2866 gen_op_load_fpr_DT1(DFPREG(rs2));
2867 gen_op_fcmpd(rd & 3);
2868 break;
2869 case 0x53: /* fcmpq, V9 %fcc */
2870 CHECK_FPU_FEATURE(dc, FLOAT128);
2871 gen_op_load_fpr_QT0(QFPREG(rs1));
2872 gen_op_load_fpr_QT1(QFPREG(rs2));
2873 gen_op_fcmpq(rd & 3);
2874 break;
2875 case 0x55: /* fcmpes, V9 %fcc */
2876 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2877 break;
2878 case 0x56: /* fcmped, V9 %fcc */
2879 gen_op_load_fpr_DT0(DFPREG(rs1));
2880 gen_op_load_fpr_DT1(DFPREG(rs2));
2881 gen_op_fcmped(rd & 3);
2882 break;
2883 case 0x57: /* fcmpeq, V9 %fcc */
2884 CHECK_FPU_FEATURE(dc, FLOAT128);
2885 gen_op_load_fpr_QT0(QFPREG(rs1));
2886 gen_op_load_fpr_QT1(QFPREG(rs2));
2887 gen_op_fcmpeq(rd & 3);
2888 break;
2889 default:
2890 goto illegal_insn;
2892 } else if (xop == 0x2) {
2893 // clr/mov shortcut
2895 rs1 = GET_FIELD(insn, 13, 17);
2896 if (rs1 == 0) {
2897 // or %g0, x, y -> mov T0, x; mov y, T0
2898 if (IS_IMM) { /* immediate */
2899 TCGv r_const;
2901 simm = GET_FIELDs(insn, 19, 31);
2902 r_const = tcg_const_tl(simm);
2903 gen_movl_TN_reg(rd, r_const);
2904 tcg_temp_free(r_const);
2905 } else { /* register */
2906 rs2 = GET_FIELD(insn, 27, 31);
2907 gen_movl_reg_TN(rs2, cpu_dst);
2908 gen_movl_TN_reg(rd, cpu_dst);
2910 } else {
2911 cpu_src1 = get_src1(insn, cpu_src1);
2912 if (IS_IMM) { /* immediate */
2913 simm = GET_FIELDs(insn, 19, 31);
2914 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2915 gen_movl_TN_reg(rd, cpu_dst);
2916 } else { /* register */
2917 // or x, %g0, y -> mov T1, x; mov y, T1
2918 rs2 = GET_FIELD(insn, 27, 31);
2919 if (rs2 != 0) {
2920 gen_movl_reg_TN(rs2, cpu_src2);
2921 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2922 gen_movl_TN_reg(rd, cpu_dst);
2923 } else
2924 gen_movl_TN_reg(rd, cpu_src1);
2927 #ifdef TARGET_SPARC64
2928 } else if (xop == 0x25) { /* sll, V9 sllx */
2929 cpu_src1 = get_src1(insn, cpu_src1);
2930 if (IS_IMM) { /* immediate */
2931 simm = GET_FIELDs(insn, 20, 31);
2932 if (insn & (1 << 12)) {
2933 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2934 } else {
2935 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2937 } else { /* register */
2938 rs2 = GET_FIELD(insn, 27, 31);
2939 gen_movl_reg_TN(rs2, cpu_src2);
2940 if (insn & (1 << 12)) {
2941 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2942 } else {
2943 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2945 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2947 gen_movl_TN_reg(rd, cpu_dst);
2948 } else if (xop == 0x26) { /* srl, V9 srlx */
2949 cpu_src1 = get_src1(insn, cpu_src1);
2950 if (IS_IMM) { /* immediate */
2951 simm = GET_FIELDs(insn, 20, 31);
2952 if (insn & (1 << 12)) {
2953 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2954 } else {
2955 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2956 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2958 } else { /* register */
2959 rs2 = GET_FIELD(insn, 27, 31);
2960 gen_movl_reg_TN(rs2, cpu_src2);
2961 if (insn & (1 << 12)) {
2962 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2963 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2964 } else {
2965 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2966 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2967 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2970 gen_movl_TN_reg(rd, cpu_dst);
2971 } else if (xop == 0x27) { /* sra, V9 srax */
2972 cpu_src1 = get_src1(insn, cpu_src1);
2973 if (IS_IMM) { /* immediate */
2974 simm = GET_FIELDs(insn, 20, 31);
2975 if (insn & (1 << 12)) {
2976 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2977 } else {
2978 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2979 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2980 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2982 } else { /* register */
2983 rs2 = GET_FIELD(insn, 27, 31);
2984 gen_movl_reg_TN(rs2, cpu_src2);
2985 if (insn & (1 << 12)) {
2986 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2987 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2988 } else {
2989 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2990 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2991 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2992 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2995 gen_movl_TN_reg(rd, cpu_dst);
2996 #endif
2997 } else if (xop < 0x36) {
2998 if (xop < 0x20) {
2999 cpu_src1 = get_src1(insn, cpu_src1);
3000 cpu_src2 = get_src2(insn, cpu_src2);
3001 switch (xop & ~0x10) {
3002 case 0x0: /* add */
3003 if (IS_IMM) {
3004 simm = GET_FIELDs(insn, 19, 31);
3005 if (xop & 0x10) {
3006 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3007 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3008 dc->cc_op = CC_OP_ADD;
3009 } else {
3010 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3012 } else {
3013 if (xop & 0x10) {
3014 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3015 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3016 dc->cc_op = CC_OP_ADD;
3017 } else {
3018 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3021 break;
3022 case 0x1: /* and */
3023 if (IS_IMM) {
3024 simm = GET_FIELDs(insn, 19, 31);
3025 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3026 } else {
3027 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3029 if (xop & 0x10) {
3030 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3031 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3032 dc->cc_op = CC_OP_LOGIC;
3034 break;
3035 case 0x2: /* or */
3036 if (IS_IMM) {
3037 simm = GET_FIELDs(insn, 19, 31);
3038 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3039 } else {
3040 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3042 if (xop & 0x10) {
3043 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3044 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3045 dc->cc_op = CC_OP_LOGIC;
3047 break;
3048 case 0x3: /* xor */
3049 if (IS_IMM) {
3050 simm = GET_FIELDs(insn, 19, 31);
3051 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3052 } else {
3053 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3055 if (xop & 0x10) {
3056 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3057 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3058 dc->cc_op = CC_OP_LOGIC;
3060 break;
3061 case 0x4: /* sub */
3062 if (IS_IMM) {
3063 simm = GET_FIELDs(insn, 19, 31);
3064 if (xop & 0x10) {
3065 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3066 } else {
3067 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3069 } else {
3070 if (xop & 0x10) {
3071 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3072 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3073 dc->cc_op = CC_OP_SUB;
3074 } else {
3075 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3078 break;
3079 case 0x5: /* andn */
3080 if (IS_IMM) {
3081 simm = GET_FIELDs(insn, 19, 31);
3082 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3083 } else {
3084 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3086 if (xop & 0x10) {
3087 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3088 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3089 dc->cc_op = CC_OP_LOGIC;
3091 break;
3092 case 0x6: /* orn */
3093 if (IS_IMM) {
3094 simm = GET_FIELDs(insn, 19, 31);
3095 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3096 } else {
3097 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3099 if (xop & 0x10) {
3100 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3101 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3102 dc->cc_op = CC_OP_LOGIC;
3104 break;
3105 case 0x7: /* xorn */
3106 if (IS_IMM) {
3107 simm = GET_FIELDs(insn, 19, 31);
3108 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3109 } else {
3110 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3111 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3113 if (xop & 0x10) {
3114 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3115 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3116 dc->cc_op = CC_OP_LOGIC;
3118 break;
3119 case 0x8: /* addx, V9 addc */
3120 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3121 (xop & 0x10));
3122 break;
3123 #ifdef TARGET_SPARC64
3124 case 0x9: /* V9 mulx */
3125 if (IS_IMM) {
3126 simm = GET_FIELDs(insn, 19, 31);
3127 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3128 } else {
3129 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3131 break;
3132 #endif
3133 case 0xa: /* umul */
3134 CHECK_IU_FEATURE(dc, MUL);
3135 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3136 if (xop & 0x10) {
3137 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3138 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3139 dc->cc_op = CC_OP_LOGIC;
3141 break;
3142 case 0xb: /* smul */
3143 CHECK_IU_FEATURE(dc, MUL);
3144 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3145 if (xop & 0x10) {
3146 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3147 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3148 dc->cc_op = CC_OP_LOGIC;
3150 break;
3151 case 0xc: /* subx, V9 subc */
3152 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3153 (xop & 0x10));
3154 break;
3155 #ifdef TARGET_SPARC64
3156 case 0xd: /* V9 udivx */
3157 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3158 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3159 gen_trap_ifdivzero_tl(cpu_cc_src2);
3160 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3161 break;
3162 #endif
3163 case 0xe: /* udiv */
3164 CHECK_IU_FEATURE(dc, DIV);
3165 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3166 if (xop & 0x10) {
3167 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3168 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3169 dc->cc_op = CC_OP_DIV;
3171 break;
3172 case 0xf: /* sdiv */
3173 CHECK_IU_FEATURE(dc, DIV);
3174 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3175 if (xop & 0x10) {
3176 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3177 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3178 dc->cc_op = CC_OP_DIV;
3180 break;
3181 default:
3182 goto illegal_insn;
3184 gen_movl_TN_reg(rd, cpu_dst);
3185 } else {
3186 cpu_src1 = get_src1(insn, cpu_src1);
3187 cpu_src2 = get_src2(insn, cpu_src2);
3188 switch (xop) {
3189 case 0x20: /* taddcc */
3190 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3191 gen_movl_TN_reg(rd, cpu_dst);
3192 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3193 dc->cc_op = CC_OP_TADD;
3194 break;
3195 case 0x21: /* tsubcc */
3196 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3197 gen_movl_TN_reg(rd, cpu_dst);
3198 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3199 dc->cc_op = CC_OP_TSUB;
3200 break;
3201 case 0x22: /* taddcctv */
3202 save_state(dc, cpu_cond);
3203 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3204 gen_movl_TN_reg(rd, cpu_dst);
3205 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3206 dc->cc_op = CC_OP_TADDTV;
3207 break;
3208 case 0x23: /* tsubcctv */
3209 save_state(dc, cpu_cond);
3210 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3211 gen_movl_TN_reg(rd, cpu_dst);
3212 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3213 dc->cc_op = CC_OP_TSUBTV;
3214 break;
3215 case 0x24: /* mulscc */
3216 gen_helper_compute_psr();
3217 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3218 gen_movl_TN_reg(rd, cpu_dst);
3219 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3220 dc->cc_op = CC_OP_ADD;
3221 break;
3222 #ifndef TARGET_SPARC64
3223 case 0x25: /* sll */
3224 if (IS_IMM) { /* immediate */
3225 simm = GET_FIELDs(insn, 20, 31);
3226 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3227 } else { /* register */
3228 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3229 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3231 gen_movl_TN_reg(rd, cpu_dst);
3232 break;
3233 case 0x26: /* srl */
3234 if (IS_IMM) { /* immediate */
3235 simm = GET_FIELDs(insn, 20, 31);
3236 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3237 } else { /* register */
3238 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3239 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3241 gen_movl_TN_reg(rd, cpu_dst);
3242 break;
3243 case 0x27: /* sra */
3244 if (IS_IMM) { /* immediate */
3245 simm = GET_FIELDs(insn, 20, 31);
3246 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3247 } else { /* register */
3248 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3249 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3251 gen_movl_TN_reg(rd, cpu_dst);
3252 break;
3253 #endif
3254 case 0x30:
3256 switch(rd) {
3257 case 0: /* wry */
3258 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3259 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3260 break;
3261 #ifndef TARGET_SPARC64
3262 case 0x01 ... 0x0f: /* undefined in the
3263 SPARCv8 manual, nop
3264 on the microSPARC
3265 II */
3266 case 0x10 ... 0x1f: /* implementation-dependent
3267 in the SPARCv8
3268 manual, nop on the
3269 microSPARC II */
3270 break;
3271 #else
3272 case 0x2: /* V9 wrccr */
3273 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3274 gen_helper_wrccr(cpu_dst);
3275 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3276 dc->cc_op = CC_OP_FLAGS;
3277 break;
3278 case 0x3: /* V9 wrasi */
3279 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3280 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3281 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3282 break;
3283 case 0x6: /* V9 wrfprs */
3284 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3285 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3286 save_state(dc, cpu_cond);
3287 gen_op_next_insn();
3288 tcg_gen_exit_tb(0);
3289 dc->is_br = 1;
3290 break;
3291 case 0xf: /* V9 sir, nop if user */
3292 #if !defined(CONFIG_USER_ONLY)
3293 if (supervisor(dc)) {
3294 ; // XXX
3296 #endif
3297 break;
3298 case 0x13: /* Graphics Status */
3299 if (gen_trap_ifnofpu(dc, cpu_cond))
3300 goto jmp_insn;
3301 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3302 break;
3303 case 0x14: /* Softint set */
3304 if (!supervisor(dc))
3305 goto illegal_insn;
3306 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3307 gen_helper_set_softint(cpu_tmp64);
3308 break;
3309 case 0x15: /* Softint clear */
3310 if (!supervisor(dc))
3311 goto illegal_insn;
3312 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3313 gen_helper_clear_softint(cpu_tmp64);
3314 break;
3315 case 0x16: /* Softint write */
3316 if (!supervisor(dc))
3317 goto illegal_insn;
3318 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3319 gen_helper_write_softint(cpu_tmp64);
3320 break;
3321 case 0x17: /* Tick compare */
3322 #if !defined(CONFIG_USER_ONLY)
3323 if (!supervisor(dc))
3324 goto illegal_insn;
3325 #endif
3327 TCGv_ptr r_tickptr;
3329 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3330 cpu_src2);
3331 r_tickptr = tcg_temp_new_ptr();
3332 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3333 offsetof(CPUState, tick));
3334 gen_helper_tick_set_limit(r_tickptr,
3335 cpu_tick_cmpr);
3336 tcg_temp_free_ptr(r_tickptr);
3338 break;
3339 case 0x18: /* System tick */
3340 #if !defined(CONFIG_USER_ONLY)
3341 if (!supervisor(dc))
3342 goto illegal_insn;
3343 #endif
3345 TCGv_ptr r_tickptr;
3347 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3348 cpu_src2);
3349 r_tickptr = tcg_temp_new_ptr();
3350 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3351 offsetof(CPUState, stick));
3352 gen_helper_tick_set_count(r_tickptr,
3353 cpu_dst);
3354 tcg_temp_free_ptr(r_tickptr);
3356 break;
3357 case 0x19: /* System tick compare */
3358 #if !defined(CONFIG_USER_ONLY)
3359 if (!supervisor(dc))
3360 goto illegal_insn;
3361 #endif
3363 TCGv_ptr r_tickptr;
3365 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3366 cpu_src2);
3367 r_tickptr = tcg_temp_new_ptr();
3368 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3369 offsetof(CPUState, stick));
3370 gen_helper_tick_set_limit(r_tickptr,
3371 cpu_stick_cmpr);
3372 tcg_temp_free_ptr(r_tickptr);
3374 break;
3376 case 0x10: /* Performance Control */
3377 case 0x11: /* Performance Instrumentation
3378 Counter */
3379 case 0x12: /* Dispatch Control */
3380 #endif
3381 default:
3382 goto illegal_insn;
3385 break;
3386 #if !defined(CONFIG_USER_ONLY)
3387 case 0x31: /* wrpsr, V9 saved, restored */
3389 if (!supervisor(dc))
3390 goto priv_insn;
3391 #ifdef TARGET_SPARC64
3392 switch (rd) {
3393 case 0:
3394 gen_helper_saved();
3395 break;
3396 case 1:
3397 gen_helper_restored();
3398 break;
3399 case 2: /* UA2005 allclean */
3400 case 3: /* UA2005 otherw */
3401 case 4: /* UA2005 normalw */
3402 case 5: /* UA2005 invalw */
3403 // XXX
3404 default:
3405 goto illegal_insn;
3407 #else
3408 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3409 gen_helper_wrpsr(cpu_dst);
3410 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3411 dc->cc_op = CC_OP_FLAGS;
3412 save_state(dc, cpu_cond);
3413 gen_op_next_insn();
3414 tcg_gen_exit_tb(0);
3415 dc->is_br = 1;
3416 #endif
3418 break;
3419 case 0x32: /* wrwim, V9 wrpr */
3421 if (!supervisor(dc))
3422 goto priv_insn;
3423 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3424 #ifdef TARGET_SPARC64
3425 switch (rd) {
3426 case 0: // tpc
3428 TCGv_ptr r_tsptr;
3430 r_tsptr = tcg_temp_new_ptr();
3431 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3432 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3433 offsetof(trap_state, tpc));
3434 tcg_temp_free_ptr(r_tsptr);
3436 break;
3437 case 1: // tnpc
3439 TCGv_ptr r_tsptr;
3441 r_tsptr = tcg_temp_new_ptr();
3442 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3443 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3444 offsetof(trap_state, tnpc));
3445 tcg_temp_free_ptr(r_tsptr);
3447 break;
3448 case 2: // tstate
3450 TCGv_ptr r_tsptr;
3452 r_tsptr = tcg_temp_new_ptr();
3453 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3454 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3455 offsetof(trap_state,
3456 tstate));
3457 tcg_temp_free_ptr(r_tsptr);
3459 break;
3460 case 3: // tt
3462 TCGv_ptr r_tsptr;
3464 r_tsptr = tcg_temp_new_ptr();
3465 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3466 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3467 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3468 offsetof(trap_state, tt));
3469 tcg_temp_free_ptr(r_tsptr);
3471 break;
3472 case 4: // tick
3474 TCGv_ptr r_tickptr;
3476 r_tickptr = tcg_temp_new_ptr();
3477 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3478 offsetof(CPUState, tick));
3479 gen_helper_tick_set_count(r_tickptr,
3480 cpu_tmp0);
3481 tcg_temp_free_ptr(r_tickptr);
3483 break;
3484 case 5: // tba
3485 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3486 break;
3487 case 6: // pstate
3488 save_state(dc, cpu_cond);
3489 gen_helper_wrpstate(cpu_tmp0);
3490 dc->npc = DYNAMIC_PC;
3491 break;
3492 case 7: // tl
3493 save_state(dc, cpu_cond);
3494 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3495 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3496 offsetof(CPUSPARCState, tl));
3497 dc->npc = DYNAMIC_PC;
3498 break;
3499 case 8: // pil
3500 gen_helper_wrpil(cpu_tmp0);
3501 break;
3502 case 9: // cwp
3503 gen_helper_wrcwp(cpu_tmp0);
3504 break;
3505 case 10: // cansave
3506 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3507 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3508 offsetof(CPUSPARCState,
3509 cansave));
3510 break;
3511 case 11: // canrestore
3512 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3513 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3514 offsetof(CPUSPARCState,
3515 canrestore));
3516 break;
3517 case 12: // cleanwin
3518 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3519 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3520 offsetof(CPUSPARCState,
3521 cleanwin));
3522 break;
3523 case 13: // otherwin
3524 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3525 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3526 offsetof(CPUSPARCState,
3527 otherwin));
3528 break;
3529 case 14: // wstate
3530 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3531 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3532 offsetof(CPUSPARCState,
3533 wstate));
3534 break;
3535 case 16: // UA2005 gl
3536 CHECK_IU_FEATURE(dc, GL);
3537 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3538 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3539 offsetof(CPUSPARCState, gl));
3540 break;
3541 case 26: // UA2005 strand status
3542 CHECK_IU_FEATURE(dc, HYPV);
3543 if (!hypervisor(dc))
3544 goto priv_insn;
3545 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3546 break;
3547 default:
3548 goto illegal_insn;
3550 #else
3551 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3552 if (dc->def->nwindows != 32)
3553 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3554 (1 << dc->def->nwindows) - 1);
3555 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3556 #endif
3558 break;
3559 case 0x33: /* wrtbr, UA2005 wrhpr */
3561 #ifndef TARGET_SPARC64
3562 if (!supervisor(dc))
3563 goto priv_insn;
3564 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3565 #else
3566 CHECK_IU_FEATURE(dc, HYPV);
3567 if (!hypervisor(dc))
3568 goto priv_insn;
3569 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3570 switch (rd) {
3571 case 0: // hpstate
3572 // XXX gen_op_wrhpstate();
3573 save_state(dc, cpu_cond);
3574 gen_op_next_insn();
3575 tcg_gen_exit_tb(0);
3576 dc->is_br = 1;
3577 break;
3578 case 1: // htstate
3579 // XXX gen_op_wrhtstate();
3580 break;
3581 case 3: // hintp
3582 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3583 break;
3584 case 5: // htba
3585 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3586 break;
3587 case 31: // hstick_cmpr
3589 TCGv_ptr r_tickptr;
3591 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3592 r_tickptr = tcg_temp_new_ptr();
3593 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3594 offsetof(CPUState, hstick));
3595 gen_helper_tick_set_limit(r_tickptr,
3596 cpu_hstick_cmpr);
3597 tcg_temp_free_ptr(r_tickptr);
3599 break;
3600 case 6: // hver readonly
3601 default:
3602 goto illegal_insn;
3604 #endif
3606 break;
3607 #endif
3608 #ifdef TARGET_SPARC64
3609 case 0x2c: /* V9 movcc */
3611 int cc = GET_FIELD_SP(insn, 11, 12);
3612 int cond = GET_FIELD_SP(insn, 14, 17);
3613 TCGv r_cond;
3614 int l1;
3616 r_cond = tcg_temp_new();
3617 if (insn & (1 << 18)) {
3618 if (cc == 0)
3619 gen_cond(r_cond, 0, cond, dc);
3620 else if (cc == 2)
3621 gen_cond(r_cond, 1, cond, dc);
3622 else
3623 goto illegal_insn;
3624 } else {
3625 gen_fcond(r_cond, cc, cond);
3628 l1 = gen_new_label();
3630 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3631 if (IS_IMM) { /* immediate */
3632 TCGv r_const;
3634 simm = GET_FIELD_SPs(insn, 0, 10);
3635 r_const = tcg_const_tl(simm);
3636 gen_movl_TN_reg(rd, r_const);
3637 tcg_temp_free(r_const);
3638 } else {
3639 rs2 = GET_FIELD_SP(insn, 0, 4);
3640 gen_movl_reg_TN(rs2, cpu_tmp0);
3641 gen_movl_TN_reg(rd, cpu_tmp0);
3643 gen_set_label(l1);
3644 tcg_temp_free(r_cond);
3645 break;
3647 case 0x2d: /* V9 sdivx */
3648 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3649 gen_movl_TN_reg(rd, cpu_dst);
3650 break;
3651 case 0x2e: /* V9 popc */
3653 cpu_src2 = get_src2(insn, cpu_src2);
3654 gen_helper_popc(cpu_dst, cpu_src2);
3655 gen_movl_TN_reg(rd, cpu_dst);
3657 case 0x2f: /* V9 movr */
3659 int cond = GET_FIELD_SP(insn, 10, 12);
3660 int l1;
3662 cpu_src1 = get_src1(insn, cpu_src1);
3664 l1 = gen_new_label();
3666 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3667 cpu_src1, 0, l1);
3668 if (IS_IMM) { /* immediate */
3669 TCGv r_const;
3671 simm = GET_FIELD_SPs(insn, 0, 9);
3672 r_const = tcg_const_tl(simm);
3673 gen_movl_TN_reg(rd, r_const);
3674 tcg_temp_free(r_const);
3675 } else {
3676 rs2 = GET_FIELD_SP(insn, 0, 4);
3677 gen_movl_reg_TN(rs2, cpu_tmp0);
3678 gen_movl_TN_reg(rd, cpu_tmp0);
3680 gen_set_label(l1);
3681 break;
3683 #endif
3684 default:
3685 goto illegal_insn;
3688 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3689 #ifdef TARGET_SPARC64
3690 int opf = GET_FIELD_SP(insn, 5, 13);
3691 rs1 = GET_FIELD(insn, 13, 17);
3692 rs2 = GET_FIELD(insn, 27, 31);
3693 if (gen_trap_ifnofpu(dc, cpu_cond))
3694 goto jmp_insn;
3696 switch (opf) {
3697 case 0x000: /* VIS I edge8cc */
3698 case 0x001: /* VIS II edge8n */
3699 case 0x002: /* VIS I edge8lcc */
3700 case 0x003: /* VIS II edge8ln */
3701 case 0x004: /* VIS I edge16cc */
3702 case 0x005: /* VIS II edge16n */
3703 case 0x006: /* VIS I edge16lcc */
3704 case 0x007: /* VIS II edge16ln */
3705 case 0x008: /* VIS I edge32cc */
3706 case 0x009: /* VIS II edge32n */
3707 case 0x00a: /* VIS I edge32lcc */
3708 case 0x00b: /* VIS II edge32ln */
3709 // XXX
3710 goto illegal_insn;
3711 case 0x010: /* VIS I array8 */
3712 CHECK_FPU_FEATURE(dc, VIS1);
3713 cpu_src1 = get_src1(insn, cpu_src1);
3714 gen_movl_reg_TN(rs2, cpu_src2);
3715 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3716 gen_movl_TN_reg(rd, cpu_dst);
3717 break;
3718 case 0x012: /* VIS I array16 */
3719 CHECK_FPU_FEATURE(dc, VIS1);
3720 cpu_src1 = get_src1(insn, cpu_src1);
3721 gen_movl_reg_TN(rs2, cpu_src2);
3722 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3723 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3724 gen_movl_TN_reg(rd, cpu_dst);
3725 break;
3726 case 0x014: /* VIS I array32 */
3727 CHECK_FPU_FEATURE(dc, VIS1);
3728 cpu_src1 = get_src1(insn, cpu_src1);
3729 gen_movl_reg_TN(rs2, cpu_src2);
3730 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3731 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3732 gen_movl_TN_reg(rd, cpu_dst);
3733 break;
3734 case 0x018: /* VIS I alignaddr */
3735 CHECK_FPU_FEATURE(dc, VIS1);
3736 cpu_src1 = get_src1(insn, cpu_src1);
3737 gen_movl_reg_TN(rs2, cpu_src2);
3738 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3739 gen_movl_TN_reg(rd, cpu_dst);
3740 break;
3741 case 0x019: /* VIS II bmask */
3742 case 0x01a: /* VIS I alignaddrl */
3743 // XXX
3744 goto illegal_insn;
3745 case 0x020: /* VIS I fcmple16 */
3746 CHECK_FPU_FEATURE(dc, VIS1);
3747 gen_op_load_fpr_DT0(DFPREG(rs1));
3748 gen_op_load_fpr_DT1(DFPREG(rs2));
3749 gen_helper_fcmple16();
3750 gen_op_store_DT0_fpr(DFPREG(rd));
3751 break;
3752 case 0x022: /* VIS I fcmpne16 */
3753 CHECK_FPU_FEATURE(dc, VIS1);
3754 gen_op_load_fpr_DT0(DFPREG(rs1));
3755 gen_op_load_fpr_DT1(DFPREG(rs2));
3756 gen_helper_fcmpne16();
3757 gen_op_store_DT0_fpr(DFPREG(rd));
3758 break;
3759 case 0x024: /* VIS I fcmple32 */
3760 CHECK_FPU_FEATURE(dc, VIS1);
3761 gen_op_load_fpr_DT0(DFPREG(rs1));
3762 gen_op_load_fpr_DT1(DFPREG(rs2));
3763 gen_helper_fcmple32();
3764 gen_op_store_DT0_fpr(DFPREG(rd));
3765 break;
3766 case 0x026: /* VIS I fcmpne32 */
3767 CHECK_FPU_FEATURE(dc, VIS1);
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3770 gen_helper_fcmpne32();
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3772 break;
3773 case 0x028: /* VIS I fcmpgt16 */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_op_load_fpr_DT0(DFPREG(rs1));
3776 gen_op_load_fpr_DT1(DFPREG(rs2));
3777 gen_helper_fcmpgt16();
3778 gen_op_store_DT0_fpr(DFPREG(rd));
3779 break;
3780 case 0x02a: /* VIS I fcmpeq16 */
3781 CHECK_FPU_FEATURE(dc, VIS1);
3782 gen_op_load_fpr_DT0(DFPREG(rs1));
3783 gen_op_load_fpr_DT1(DFPREG(rs2));
3784 gen_helper_fcmpeq16();
3785 gen_op_store_DT0_fpr(DFPREG(rd));
3786 break;
3787 case 0x02c: /* VIS I fcmpgt32 */
3788 CHECK_FPU_FEATURE(dc, VIS1);
3789 gen_op_load_fpr_DT0(DFPREG(rs1));
3790 gen_op_load_fpr_DT1(DFPREG(rs2));
3791 gen_helper_fcmpgt32();
3792 gen_op_store_DT0_fpr(DFPREG(rd));
3793 break;
3794 case 0x02e: /* VIS I fcmpeq32 */
3795 CHECK_FPU_FEATURE(dc, VIS1);
3796 gen_op_load_fpr_DT0(DFPREG(rs1));
3797 gen_op_load_fpr_DT1(DFPREG(rs2));
3798 gen_helper_fcmpeq32();
3799 gen_op_store_DT0_fpr(DFPREG(rd));
3800 break;
3801 case 0x031: /* VIS I fmul8x16 */
3802 CHECK_FPU_FEATURE(dc, VIS1);
3803 gen_op_load_fpr_DT0(DFPREG(rs1));
3804 gen_op_load_fpr_DT1(DFPREG(rs2));
3805 gen_helper_fmul8x16();
3806 gen_op_store_DT0_fpr(DFPREG(rd));
3807 break;
3808 case 0x033: /* VIS I fmul8x16au */
3809 CHECK_FPU_FEATURE(dc, VIS1);
3810 gen_op_load_fpr_DT0(DFPREG(rs1));
3811 gen_op_load_fpr_DT1(DFPREG(rs2));
3812 gen_helper_fmul8x16au();
3813 gen_op_store_DT0_fpr(DFPREG(rd));
3814 break;
3815 case 0x035: /* VIS I fmul8x16al */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 gen_op_load_fpr_DT0(DFPREG(rs1));
3818 gen_op_load_fpr_DT1(DFPREG(rs2));
3819 gen_helper_fmul8x16al();
3820 gen_op_store_DT0_fpr(DFPREG(rd));
3821 break;
3822 case 0x036: /* VIS I fmul8sux16 */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 gen_op_load_fpr_DT0(DFPREG(rs1));
3825 gen_op_load_fpr_DT1(DFPREG(rs2));
3826 gen_helper_fmul8sux16();
3827 gen_op_store_DT0_fpr(DFPREG(rd));
3828 break;
3829 case 0x037: /* VIS I fmul8ulx16 */
3830 CHECK_FPU_FEATURE(dc, VIS1);
3831 gen_op_load_fpr_DT0(DFPREG(rs1));
3832 gen_op_load_fpr_DT1(DFPREG(rs2));
3833 gen_helper_fmul8ulx16();
3834 gen_op_store_DT0_fpr(DFPREG(rd));
3835 break;
3836 case 0x038: /* VIS I fmuld8sux16 */
3837 CHECK_FPU_FEATURE(dc, VIS1);
3838 gen_op_load_fpr_DT0(DFPREG(rs1));
3839 gen_op_load_fpr_DT1(DFPREG(rs2));
3840 gen_helper_fmuld8sux16();
3841 gen_op_store_DT0_fpr(DFPREG(rd));
3842 break;
3843 case 0x039: /* VIS I fmuld8ulx16 */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 gen_op_load_fpr_DT0(DFPREG(rs1));
3846 gen_op_load_fpr_DT1(DFPREG(rs2));
3847 gen_helper_fmuld8ulx16();
3848 gen_op_store_DT0_fpr(DFPREG(rd));
3849 break;
3850 case 0x03a: /* VIS I fpack32 */
3851 case 0x03b: /* VIS I fpack16 */
3852 case 0x03d: /* VIS I fpackfix */
3853 case 0x03e: /* VIS I pdist */
3854 // XXX
3855 goto illegal_insn;
3856 case 0x048: /* VIS I faligndata */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 gen_op_load_fpr_DT0(DFPREG(rs1));
3859 gen_op_load_fpr_DT1(DFPREG(rs2));
3860 gen_helper_faligndata();
3861 gen_op_store_DT0_fpr(DFPREG(rd));
3862 break;
3863 case 0x04b: /* VIS I fpmerge */
3864 CHECK_FPU_FEATURE(dc, VIS1);
3865 gen_op_load_fpr_DT0(DFPREG(rs1));
3866 gen_op_load_fpr_DT1(DFPREG(rs2));
3867 gen_helper_fpmerge();
3868 gen_op_store_DT0_fpr(DFPREG(rd));
3869 break;
3870 case 0x04c: /* VIS II bshuffle */
3871 // XXX
3872 goto illegal_insn;
3873 case 0x04d: /* VIS I fexpand */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 gen_op_load_fpr_DT0(DFPREG(rs1));
3876 gen_op_load_fpr_DT1(DFPREG(rs2));
3877 gen_helper_fexpand();
3878 gen_op_store_DT0_fpr(DFPREG(rd));
3879 break;
3880 case 0x050: /* VIS I fpadd16 */
3881 CHECK_FPU_FEATURE(dc, VIS1);
3882 gen_op_load_fpr_DT0(DFPREG(rs1));
3883 gen_op_load_fpr_DT1(DFPREG(rs2));
3884 gen_helper_fpadd16();
3885 gen_op_store_DT0_fpr(DFPREG(rd));
3886 break;
3887 case 0x051: /* VIS I fpadd16s */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 gen_helper_fpadd16s(cpu_fpr[rd],
3890 cpu_fpr[rs1], cpu_fpr[rs2]);
3891 break;
3892 case 0x052: /* VIS I fpadd32 */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 gen_op_load_fpr_DT0(DFPREG(rs1));
3895 gen_op_load_fpr_DT1(DFPREG(rs2));
3896 gen_helper_fpadd32();
3897 gen_op_store_DT0_fpr(DFPREG(rd));
3898 break;
3899 case 0x053: /* VIS I fpadd32s */
3900 CHECK_FPU_FEATURE(dc, VIS1);
3901 gen_helper_fpadd32s(cpu_fpr[rd],
3902 cpu_fpr[rs1], cpu_fpr[rs2]);
3903 break;
3904 case 0x054: /* VIS I fpsub16 */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 gen_op_load_fpr_DT0(DFPREG(rs1));
3907 gen_op_load_fpr_DT1(DFPREG(rs2));
3908 gen_helper_fpsub16();
3909 gen_op_store_DT0_fpr(DFPREG(rd));
3910 break;
3911 case 0x055: /* VIS I fpsub16s */
3912 CHECK_FPU_FEATURE(dc, VIS1);
3913 gen_helper_fpsub16s(cpu_fpr[rd],
3914 cpu_fpr[rs1], cpu_fpr[rs2]);
3915 break;
3916 case 0x056: /* VIS I fpsub32 */
3917 CHECK_FPU_FEATURE(dc, VIS1);
3918 gen_op_load_fpr_DT0(DFPREG(rs1));
3919 gen_op_load_fpr_DT1(DFPREG(rs2));
3920 gen_helper_fpsub32();
3921 gen_op_store_DT0_fpr(DFPREG(rd));
3922 break;
3923 case 0x057: /* VIS I fpsub32s */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 gen_helper_fpsub32s(cpu_fpr[rd],
3926 cpu_fpr[rs1], cpu_fpr[rs2]);
3927 break;
3928 case 0x060: /* VIS I fzero */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3931 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3932 break;
3933 case 0x061: /* VIS I fzeros */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3936 break;
3937 case 0x062: /* VIS I fnor */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3940 cpu_fpr[DFPREG(rs2)]);
3941 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3942 cpu_fpr[DFPREG(rs2) + 1]);
3943 break;
3944 case 0x063: /* VIS I fnors */
3945 CHECK_FPU_FEATURE(dc, VIS1);
3946 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3947 break;
3948 case 0x064: /* VIS I fandnot2 */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3951 cpu_fpr[DFPREG(rs2)]);
3952 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3953 cpu_fpr[DFPREG(rs1) + 1],
3954 cpu_fpr[DFPREG(rs2) + 1]);
3955 break;
3956 case 0x065: /* VIS I fandnot2s */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3959 break;
3960 case 0x066: /* VIS I fnot2 */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3963 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3964 cpu_fpr[DFPREG(rs2) + 1]);
3965 break;
3966 case 0x067: /* VIS I fnot2s */
3967 CHECK_FPU_FEATURE(dc, VIS1);
3968 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3969 break;
3970 case 0x068: /* VIS I fandnot1 */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3973 cpu_fpr[DFPREG(rs1)]);
3974 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3975 cpu_fpr[DFPREG(rs2) + 1],
3976 cpu_fpr[DFPREG(rs1) + 1]);
3977 break;
3978 case 0x069: /* VIS I fandnot1s */
3979 CHECK_FPU_FEATURE(dc, VIS1);
3980 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3981 break;
3982 case 0x06a: /* VIS I fnot1 */
3983 CHECK_FPU_FEATURE(dc, VIS1);
3984 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3985 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3986 cpu_fpr[DFPREG(rs1) + 1]);
3987 break;
3988 case 0x06b: /* VIS I fnot1s */
3989 CHECK_FPU_FEATURE(dc, VIS1);
3990 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3991 break;
3992 case 0x06c: /* VIS I fxor */
3993 CHECK_FPU_FEATURE(dc, VIS1);
3994 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3995 cpu_fpr[DFPREG(rs2)]);
3996 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3997 cpu_fpr[DFPREG(rs1) + 1],
3998 cpu_fpr[DFPREG(rs2) + 1]);
3999 break;
4000 case 0x06d: /* VIS I fxors */
4001 CHECK_FPU_FEATURE(dc, VIS1);
4002 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4003 break;
4004 case 0x06e: /* VIS I fnand */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4007 cpu_fpr[DFPREG(rs2)]);
4008 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4009 cpu_fpr[DFPREG(rs2) + 1]);
4010 break;
4011 case 0x06f: /* VIS I fnands */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4014 break;
4015 case 0x070: /* VIS I fand */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4018 cpu_fpr[DFPREG(rs2)]);
4019 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4020 cpu_fpr[DFPREG(rs1) + 1],
4021 cpu_fpr[DFPREG(rs2) + 1]);
4022 break;
4023 case 0x071: /* VIS I fands */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4026 break;
4027 case 0x072: /* VIS I fxnor */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4030 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4031 cpu_fpr[DFPREG(rs1)]);
4032 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4033 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4034 cpu_fpr[DFPREG(rs1) + 1]);
4035 break;
4036 case 0x073: /* VIS I fxnors */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4039 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4040 break;
4041 case 0x074: /* VIS I fsrc1 */
4042 CHECK_FPU_FEATURE(dc, VIS1);
4043 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4044 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4045 cpu_fpr[DFPREG(rs1) + 1]);
4046 break;
4047 case 0x075: /* VIS I fsrc1s */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4050 break;
4051 case 0x076: /* VIS I fornot2 */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4054 cpu_fpr[DFPREG(rs2)]);
4055 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4056 cpu_fpr[DFPREG(rs1) + 1],
4057 cpu_fpr[DFPREG(rs2) + 1]);
4058 break;
4059 case 0x077: /* VIS I fornot2s */
4060 CHECK_FPU_FEATURE(dc, VIS1);
4061 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4062 break;
4063 case 0x078: /* VIS I fsrc2 */
4064 CHECK_FPU_FEATURE(dc, VIS1);
4065 gen_op_load_fpr_DT0(DFPREG(rs2));
4066 gen_op_store_DT0_fpr(DFPREG(rd));
4067 break;
4068 case 0x079: /* VIS I fsrc2s */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4071 break;
4072 case 0x07a: /* VIS I fornot1 */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4075 cpu_fpr[DFPREG(rs1)]);
4076 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4077 cpu_fpr[DFPREG(rs2) + 1],
4078 cpu_fpr[DFPREG(rs1) + 1]);
4079 break;
4080 case 0x07b: /* VIS I fornot1s */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4083 break;
4084 case 0x07c: /* VIS I for */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4087 cpu_fpr[DFPREG(rs2)]);
4088 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4089 cpu_fpr[DFPREG(rs1) + 1],
4090 cpu_fpr[DFPREG(rs2) + 1]);
4091 break;
4092 case 0x07d: /* VIS I fors */
4093 CHECK_FPU_FEATURE(dc, VIS1);
4094 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4095 break;
4096 case 0x07e: /* VIS I fone */
4097 CHECK_FPU_FEATURE(dc, VIS1);
4098 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4099 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4100 break;
4101 case 0x07f: /* VIS I fones */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4104 break;
4105 case 0x080: /* VIS I shutdown */
4106 case 0x081: /* VIS II siam */
4107 // XXX
4108 goto illegal_insn;
4109 default:
4110 goto illegal_insn;
4112 #else
4113 goto ncp_insn;
4114 #endif
4115 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4116 #ifdef TARGET_SPARC64
4117 goto illegal_insn;
4118 #else
4119 goto ncp_insn;
4120 #endif
4121 #ifdef TARGET_SPARC64
4122 } else if (xop == 0x39) { /* V9 return */
4123 TCGv_i32 r_const;
4125 save_state(dc, cpu_cond);
4126 cpu_src1 = get_src1(insn, cpu_src1);
4127 if (IS_IMM) { /* immediate */
4128 simm = GET_FIELDs(insn, 19, 31);
4129 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4130 } else { /* register */
4131 rs2 = GET_FIELD(insn, 27, 31);
4132 if (rs2) {
4133 gen_movl_reg_TN(rs2, cpu_src2);
4134 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4135 } else
4136 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4138 gen_helper_restore();
4139 gen_mov_pc_npc(dc, cpu_cond);
4140 r_const = tcg_const_i32(3);
4141 gen_helper_check_align(cpu_dst, r_const);
4142 tcg_temp_free_i32(r_const);
4143 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4144 dc->npc = DYNAMIC_PC;
4145 goto jmp_insn;
4146 #endif
4147 } else {
4148 cpu_src1 = get_src1(insn, cpu_src1);
4149 if (IS_IMM) { /* immediate */
4150 simm = GET_FIELDs(insn, 19, 31);
4151 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4152 } else { /* register */
4153 rs2 = GET_FIELD(insn, 27, 31);
4154 if (rs2) {
4155 gen_movl_reg_TN(rs2, cpu_src2);
4156 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4157 } else
4158 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4160 switch (xop) {
4161 case 0x38: /* jmpl */
4163 TCGv r_pc;
4164 TCGv_i32 r_const;
4166 r_pc = tcg_const_tl(dc->pc);
4167 gen_movl_TN_reg(rd, r_pc);
4168 tcg_temp_free(r_pc);
4169 gen_mov_pc_npc(dc, cpu_cond);
4170 r_const = tcg_const_i32(3);
4171 gen_helper_check_align(cpu_dst, r_const);
4172 tcg_temp_free_i32(r_const);
4173 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4174 dc->npc = DYNAMIC_PC;
4176 goto jmp_insn;
4177 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4178 case 0x39: /* rett, V9 return */
4180 TCGv_i32 r_const;
4182 if (!supervisor(dc))
4183 goto priv_insn;
4184 gen_mov_pc_npc(dc, cpu_cond);
4185 r_const = tcg_const_i32(3);
4186 gen_helper_check_align(cpu_dst, r_const);
4187 tcg_temp_free_i32(r_const);
4188 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4189 dc->npc = DYNAMIC_PC;
4190 gen_helper_rett();
4192 goto jmp_insn;
4193 #endif
4194 case 0x3b: /* flush */
4195 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4196 goto unimp_flush;
4197 gen_helper_flush(cpu_dst);
4198 break;
4199 case 0x3c: /* save */
4200 save_state(dc, cpu_cond);
4201 gen_helper_save();
4202 gen_movl_TN_reg(rd, cpu_dst);
4203 break;
4204 case 0x3d: /* restore */
4205 save_state(dc, cpu_cond);
4206 gen_helper_restore();
4207 gen_movl_TN_reg(rd, cpu_dst);
4208 break;
4209 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4210 case 0x3e: /* V9 done/retry */
4212 switch (rd) {
4213 case 0:
4214 if (!supervisor(dc))
4215 goto priv_insn;
4216 dc->npc = DYNAMIC_PC;
4217 dc->pc = DYNAMIC_PC;
4218 gen_helper_done();
4219 goto jmp_insn;
4220 case 1:
4221 if (!supervisor(dc))
4222 goto priv_insn;
4223 dc->npc = DYNAMIC_PC;
4224 dc->pc = DYNAMIC_PC;
4225 gen_helper_retry();
4226 goto jmp_insn;
4227 default:
4228 goto illegal_insn;
4231 break;
4232 #endif
4233 default:
4234 goto illegal_insn;
4237 break;
4239 break;
4240 case 3: /* load/store instructions */
4242 unsigned int xop = GET_FIELD(insn, 7, 12);
4244 /* flush pending conditional evaluations before exposing
4245 cpu state */
4246 if (dc->cc_op != CC_OP_FLAGS) {
4247 dc->cc_op = CC_OP_FLAGS;
4248 gen_helper_compute_psr();
4250 cpu_src1 = get_src1(insn, cpu_src1);
4251 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4252 rs2 = GET_FIELD(insn, 27, 31);
4253 gen_movl_reg_TN(rs2, cpu_src2);
4254 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4255 } else if (IS_IMM) { /* immediate */
4256 simm = GET_FIELDs(insn, 19, 31);
4257 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4258 } else { /* register */
4259 rs2 = GET_FIELD(insn, 27, 31);
4260 if (rs2 != 0) {
4261 gen_movl_reg_TN(rs2, cpu_src2);
4262 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4263 } else
4264 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4266 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4267 (xop > 0x17 && xop <= 0x1d ) ||
4268 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4269 switch (xop) {
4270 case 0x0: /* ld, V9 lduw, load unsigned word */
4271 gen_address_mask(dc, cpu_addr);
4272 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4273 break;
4274 case 0x1: /* ldub, load unsigned byte */
4275 gen_address_mask(dc, cpu_addr);
4276 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4277 break;
4278 case 0x2: /* lduh, load unsigned halfword */
4279 gen_address_mask(dc, cpu_addr);
4280 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4281 break;
4282 case 0x3: /* ldd, load double word */
4283 if (rd & 1)
4284 goto illegal_insn;
4285 else {
4286 TCGv_i32 r_const;
4288 save_state(dc, cpu_cond);
4289 r_const = tcg_const_i32(7);
4290 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4291 tcg_temp_free_i32(r_const);
4292 gen_address_mask(dc, cpu_addr);
4293 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4294 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4295 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4296 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4297 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4298 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4299 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4301 break;
4302 case 0x9: /* ldsb, load signed byte */
4303 gen_address_mask(dc, cpu_addr);
4304 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4305 break;
4306 case 0xa: /* ldsh, load signed halfword */
4307 gen_address_mask(dc, cpu_addr);
4308 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4309 break;
4310 case 0xd: /* ldstub -- XXX: should be atomically */
4312 TCGv r_const;
4314 gen_address_mask(dc, cpu_addr);
4315 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4316 r_const = tcg_const_tl(0xff);
4317 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4318 tcg_temp_free(r_const);
4320 break;
4321 case 0x0f: /* swap, swap register with memory. Also
4322 atomically */
4323 CHECK_IU_FEATURE(dc, SWAP);
4324 gen_movl_reg_TN(rd, cpu_val);
4325 gen_address_mask(dc, cpu_addr);
4326 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4327 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4328 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4329 break;
4330 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4331 case 0x10: /* lda, V9 lduwa, load word alternate */
4332 #ifndef TARGET_SPARC64
4333 if (IS_IMM)
4334 goto illegal_insn;
4335 if (!supervisor(dc))
4336 goto priv_insn;
4337 #endif
4338 save_state(dc, cpu_cond);
4339 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4340 break;
4341 case 0x11: /* lduba, load unsigned byte alternate */
4342 #ifndef TARGET_SPARC64
4343 if (IS_IMM)
4344 goto illegal_insn;
4345 if (!supervisor(dc))
4346 goto priv_insn;
4347 #endif
4348 save_state(dc, cpu_cond);
4349 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4350 break;
4351 case 0x12: /* lduha, load unsigned halfword alternate */
4352 #ifndef TARGET_SPARC64
4353 if (IS_IMM)
4354 goto illegal_insn;
4355 if (!supervisor(dc))
4356 goto priv_insn;
4357 #endif
4358 save_state(dc, cpu_cond);
4359 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4360 break;
4361 case 0x13: /* ldda, load double word alternate */
4362 #ifndef TARGET_SPARC64
4363 if (IS_IMM)
4364 goto illegal_insn;
4365 if (!supervisor(dc))
4366 goto priv_insn;
4367 #endif
4368 if (rd & 1)
4369 goto illegal_insn;
4370 save_state(dc, cpu_cond);
4371 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4372 goto skip_move;
4373 case 0x19: /* ldsba, load signed byte alternate */
4374 #ifndef TARGET_SPARC64
4375 if (IS_IMM)
4376 goto illegal_insn;
4377 if (!supervisor(dc))
4378 goto priv_insn;
4379 #endif
4380 save_state(dc, cpu_cond);
4381 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4382 break;
4383 case 0x1a: /* ldsha, load signed halfword alternate */
4384 #ifndef TARGET_SPARC64
4385 if (IS_IMM)
4386 goto illegal_insn;
4387 if (!supervisor(dc))
4388 goto priv_insn;
4389 #endif
4390 save_state(dc, cpu_cond);
4391 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4392 break;
4393 case 0x1d: /* ldstuba -- XXX: should be atomically */
4394 #ifndef TARGET_SPARC64
4395 if (IS_IMM)
4396 goto illegal_insn;
4397 if (!supervisor(dc))
4398 goto priv_insn;
4399 #endif
4400 save_state(dc, cpu_cond);
4401 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4402 break;
4403 case 0x1f: /* swapa, swap reg with alt. memory. Also
4404 atomically */
4405 CHECK_IU_FEATURE(dc, SWAP);
4406 #ifndef TARGET_SPARC64
4407 if (IS_IMM)
4408 goto illegal_insn;
4409 if (!supervisor(dc))
4410 goto priv_insn;
4411 #endif
4412 save_state(dc, cpu_cond);
4413 gen_movl_reg_TN(rd, cpu_val);
4414 gen_swap_asi(cpu_val, cpu_addr, insn);
4415 break;
4417 #ifndef TARGET_SPARC64
4418 case 0x30: /* ldc */
4419 case 0x31: /* ldcsr */
4420 case 0x33: /* lddc */
4421 goto ncp_insn;
4422 #endif
4423 #endif
4424 #ifdef TARGET_SPARC64
4425 case 0x08: /* V9 ldsw */
4426 gen_address_mask(dc, cpu_addr);
4427 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4428 break;
4429 case 0x0b: /* V9 ldx */
4430 gen_address_mask(dc, cpu_addr);
4431 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4432 break;
4433 case 0x18: /* V9 ldswa */
4434 save_state(dc, cpu_cond);
4435 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4436 break;
4437 case 0x1b: /* V9 ldxa */
4438 save_state(dc, cpu_cond);
4439 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4440 break;
4441 case 0x2d: /* V9 prefetch, no effect */
4442 goto skip_move;
4443 case 0x30: /* V9 ldfa */
4444 save_state(dc, cpu_cond);
4445 gen_ldf_asi(cpu_addr, insn, 4, rd);
4446 goto skip_move;
4447 case 0x33: /* V9 lddfa */
4448 save_state(dc, cpu_cond);
4449 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4450 goto skip_move;
4451 case 0x3d: /* V9 prefetcha, no effect */
4452 goto skip_move;
4453 case 0x32: /* V9 ldqfa */
4454 CHECK_FPU_FEATURE(dc, FLOAT128);
4455 save_state(dc, cpu_cond);
4456 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4457 goto skip_move;
4458 #endif
4459 default:
4460 goto illegal_insn;
4462 gen_movl_TN_reg(rd, cpu_val);
4463 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4464 skip_move: ;
4465 #endif
4466 } else if (xop >= 0x20 && xop < 0x24) {
4467 if (gen_trap_ifnofpu(dc, cpu_cond))
4468 goto jmp_insn;
4469 save_state(dc, cpu_cond);
4470 switch (xop) {
4471 case 0x20: /* ldf, load fpreg */
4472 gen_address_mask(dc, cpu_addr);
4473 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4474 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4475 break;
4476 case 0x21: /* ldfsr, V9 ldxfsr */
4477 #ifdef TARGET_SPARC64
4478 gen_address_mask(dc, cpu_addr);
4479 if (rd == 1) {
4480 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4481 gen_helper_ldxfsr(cpu_tmp64);
4482 } else {
4483 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4484 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4485 gen_helper_ldfsr(cpu_tmp32);
4487 #else
4489 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4490 gen_helper_ldfsr(cpu_tmp32);
4492 #endif
4493 break;
4494 case 0x22: /* ldqf, load quad fpreg */
4496 TCGv_i32 r_const;
4498 CHECK_FPU_FEATURE(dc, FLOAT128);
4499 r_const = tcg_const_i32(dc->mem_idx);
4500 gen_address_mask(dc, cpu_addr);
4501 gen_helper_ldqf(cpu_addr, r_const);
4502 tcg_temp_free_i32(r_const);
4503 gen_op_store_QT0_fpr(QFPREG(rd));
4505 break;
4506 case 0x23: /* lddf, load double fpreg */
4508 TCGv_i32 r_const;
4510 r_const = tcg_const_i32(dc->mem_idx);
4511 gen_address_mask(dc, cpu_addr);
4512 gen_helper_lddf(cpu_addr, r_const);
4513 tcg_temp_free_i32(r_const);
4514 gen_op_store_DT0_fpr(DFPREG(rd));
4516 break;
4517 default:
4518 goto illegal_insn;
4520 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4521 xop == 0xe || xop == 0x1e) {
4522 gen_movl_reg_TN(rd, cpu_val);
4523 switch (xop) {
4524 case 0x4: /* st, store word */
4525 gen_address_mask(dc, cpu_addr);
4526 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4527 break;
4528 case 0x5: /* stb, store byte */
4529 gen_address_mask(dc, cpu_addr);
4530 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4531 break;
4532 case 0x6: /* sth, store halfword */
4533 gen_address_mask(dc, cpu_addr);
4534 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4535 break;
4536 case 0x7: /* std, store double word */
4537 if (rd & 1)
4538 goto illegal_insn;
4539 else {
4540 TCGv_i32 r_const;
4542 save_state(dc, cpu_cond);
4543 gen_address_mask(dc, cpu_addr);
4544 r_const = tcg_const_i32(7);
4545 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4546 tcg_temp_free_i32(r_const);
4547 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4548 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4549 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4551 break;
4552 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4553 case 0x14: /* sta, V9 stwa, store word alternate */
4554 #ifndef TARGET_SPARC64
4555 if (IS_IMM)
4556 goto illegal_insn;
4557 if (!supervisor(dc))
4558 goto priv_insn;
4559 #endif
4560 save_state(dc, cpu_cond);
4561 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4562 dc->npc = DYNAMIC_PC;
4563 break;
4564 case 0x15: /* stba, store byte alternate */
4565 #ifndef TARGET_SPARC64
4566 if (IS_IMM)
4567 goto illegal_insn;
4568 if (!supervisor(dc))
4569 goto priv_insn;
4570 #endif
4571 save_state(dc, cpu_cond);
4572 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4573 dc->npc = DYNAMIC_PC;
4574 break;
4575 case 0x16: /* stha, store halfword alternate */
4576 #ifndef TARGET_SPARC64
4577 if (IS_IMM)
4578 goto illegal_insn;
4579 if (!supervisor(dc))
4580 goto priv_insn;
4581 #endif
4582 save_state(dc, cpu_cond);
4583 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4584 dc->npc = DYNAMIC_PC;
4585 break;
4586 case 0x17: /* stda, store double word alternate */
4587 #ifndef TARGET_SPARC64
4588 if (IS_IMM)
4589 goto illegal_insn;
4590 if (!supervisor(dc))
4591 goto priv_insn;
4592 #endif
4593 if (rd & 1)
4594 goto illegal_insn;
4595 else {
4596 save_state(dc, cpu_cond);
4597 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4599 break;
4600 #endif
4601 #ifdef TARGET_SPARC64
4602 case 0x0e: /* V9 stx */
4603 gen_address_mask(dc, cpu_addr);
4604 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4605 break;
4606 case 0x1e: /* V9 stxa */
4607 save_state(dc, cpu_cond);
4608 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4609 dc->npc = DYNAMIC_PC;
4610 break;
4611 #endif
4612 default:
4613 goto illegal_insn;
4615 } else if (xop > 0x23 && xop < 0x28) {
4616 if (gen_trap_ifnofpu(dc, cpu_cond))
4617 goto jmp_insn;
4618 save_state(dc, cpu_cond);
4619 switch (xop) {
4620 case 0x24: /* stf, store fpreg */
4621 gen_address_mask(dc, cpu_addr);
4622 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4623 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4624 break;
4625 case 0x25: /* stfsr, V9 stxfsr */
4626 #ifdef TARGET_SPARC64
4627 gen_address_mask(dc, cpu_addr);
4628 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4629 if (rd == 1)
4630 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4631 else
4632 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4633 #else
4634 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4635 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4636 #endif
4637 break;
4638 case 0x26:
4639 #ifdef TARGET_SPARC64
4640 /* V9 stqf, store quad fpreg */
4642 TCGv_i32 r_const;
4644 CHECK_FPU_FEATURE(dc, FLOAT128);
4645 gen_op_load_fpr_QT0(QFPREG(rd));
4646 r_const = tcg_const_i32(dc->mem_idx);
4647 gen_address_mask(dc, cpu_addr);
4648 gen_helper_stqf(cpu_addr, r_const);
4649 tcg_temp_free_i32(r_const);
4651 break;
4652 #else /* !TARGET_SPARC64 */
4653 /* stdfq, store floating point queue */
4654 #if defined(CONFIG_USER_ONLY)
4655 goto illegal_insn;
4656 #else
4657 if (!supervisor(dc))
4658 goto priv_insn;
4659 if (gen_trap_ifnofpu(dc, cpu_cond))
4660 goto jmp_insn;
4661 goto nfq_insn;
4662 #endif
4663 #endif
4664 case 0x27: /* stdf, store double fpreg */
4666 TCGv_i32 r_const;
4668 gen_op_load_fpr_DT0(DFPREG(rd));
4669 r_const = tcg_const_i32(dc->mem_idx);
4670 gen_address_mask(dc, cpu_addr);
4671 gen_helper_stdf(cpu_addr, r_const);
4672 tcg_temp_free_i32(r_const);
4674 break;
4675 default:
4676 goto illegal_insn;
4678 } else if (xop > 0x33 && xop < 0x3f) {
4679 save_state(dc, cpu_cond);
4680 switch (xop) {
4681 #ifdef TARGET_SPARC64
4682 case 0x34: /* V9 stfa */
4683 gen_stf_asi(cpu_addr, insn, 4, rd);
4684 break;
4685 case 0x36: /* V9 stqfa */
4687 TCGv_i32 r_const;
4689 CHECK_FPU_FEATURE(dc, FLOAT128);
4690 r_const = tcg_const_i32(7);
4691 gen_helper_check_align(cpu_addr, r_const);
4692 tcg_temp_free_i32(r_const);
4693 gen_op_load_fpr_QT0(QFPREG(rd));
4694 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4696 break;
4697 case 0x37: /* V9 stdfa */
4698 gen_op_load_fpr_DT0(DFPREG(rd));
4699 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4700 break;
4701 case 0x3c: /* V9 casa */
4702 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4703 gen_movl_TN_reg(rd, cpu_val);
4704 break;
4705 case 0x3e: /* V9 casxa */
4706 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4707 gen_movl_TN_reg(rd, cpu_val);
4708 break;
4709 #else
4710 case 0x34: /* stc */
4711 case 0x35: /* stcsr */
4712 case 0x36: /* stdcq */
4713 case 0x37: /* stdc */
4714 goto ncp_insn;
4715 #endif
4716 default:
4717 goto illegal_insn;
4719 } else
4720 goto illegal_insn;
4722 break;
4724 /* default case for non jump instructions */
4725 if (dc->npc == DYNAMIC_PC) {
4726 dc->pc = DYNAMIC_PC;
4727 gen_op_next_insn();
4728 } else if (dc->npc == JUMP_PC) {
4729 /* we can do a static jump */
4730 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4731 dc->is_br = 1;
4732 } else {
4733 dc->pc = dc->npc;
4734 dc->npc = dc->npc + 4;
4736 jmp_insn:
4737 goto egress;
4738 illegal_insn:
4740 TCGv_i32 r_const;
4742 save_state(dc, cpu_cond);
4743 r_const = tcg_const_i32(TT_ILL_INSN);
4744 gen_helper_raise_exception(r_const);
4745 tcg_temp_free_i32(r_const);
4746 dc->is_br = 1;
4748 goto egress;
4749 unimp_flush:
4751 TCGv_i32 r_const;
4753 save_state(dc, cpu_cond);
4754 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4755 gen_helper_raise_exception(r_const);
4756 tcg_temp_free_i32(r_const);
4757 dc->is_br = 1;
4759 goto egress;
4760 #if !defined(CONFIG_USER_ONLY)
4761 priv_insn:
4763 TCGv_i32 r_const;
4765 save_state(dc, cpu_cond);
4766 r_const = tcg_const_i32(TT_PRIV_INSN);
4767 gen_helper_raise_exception(r_const);
4768 tcg_temp_free_i32(r_const);
4769 dc->is_br = 1;
4771 goto egress;
4772 #endif
4773 nfpu_insn:
4774 save_state(dc, cpu_cond);
4775 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4776 dc->is_br = 1;
4777 goto egress;
4778 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4779 nfq_insn:
4780 save_state(dc, cpu_cond);
4781 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4782 dc->is_br = 1;
4783 goto egress;
4784 #endif
4785 #ifndef TARGET_SPARC64
4786 ncp_insn:
4788 TCGv r_const;
4790 save_state(dc, cpu_cond);
4791 r_const = tcg_const_i32(TT_NCP_INSN);
4792 gen_helper_raise_exception(r_const);
4793 tcg_temp_free(r_const);
4794 dc->is_br = 1;
4796 goto egress;
4797 #endif
4798 egress:
4799 tcg_temp_free(cpu_tmp1);
4800 tcg_temp_free(cpu_tmp2);
4803 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4804 int spc, CPUSPARCState *env)
4806 target_ulong pc_start, last_pc;
4807 uint16_t *gen_opc_end;
4808 DisasContext dc1, *dc = &dc1;
4809 CPUBreakpoint *bp;
4810 int j, lj = -1;
4811 int num_insns;
4812 int max_insns;
4814 memset(dc, 0, sizeof(DisasContext));
4815 dc->tb = tb;
4816 pc_start = tb->pc;
4817 dc->pc = pc_start;
4818 last_pc = dc->pc;
4819 dc->npc = (target_ulong) tb->cs_base;
4820 dc->cc_op = CC_OP_DYNAMIC;
4821 dc->mem_idx = cpu_mmu_index(env);
4822 dc->def = env->def;
4823 if ((dc->def->features & CPU_FEATURE_FLOAT))
4824 dc->fpu_enabled = cpu_fpu_enabled(env);
4825 else
4826 dc->fpu_enabled = 0;
4827 #ifdef TARGET_SPARC64
4828 dc->address_mask_32bit = env->pstate & PS_AM;
4829 #endif
4830 dc->singlestep = (env->singlestep_enabled || singlestep);
4831 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4833 cpu_tmp0 = tcg_temp_new();
4834 cpu_tmp32 = tcg_temp_new_i32();
4835 cpu_tmp64 = tcg_temp_new_i64();
4837 cpu_dst = tcg_temp_local_new();
4839 // loads and stores
4840 cpu_val = tcg_temp_local_new();
4841 cpu_addr = tcg_temp_local_new();
4843 num_insns = 0;
4844 max_insns = tb->cflags & CF_COUNT_MASK;
4845 if (max_insns == 0)
4846 max_insns = CF_COUNT_MASK;
4847 gen_icount_start();
4848 do {
4849 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4850 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4851 if (bp->pc == dc->pc) {
4852 if (dc->pc != pc_start)
4853 save_state(dc, cpu_cond);
4854 gen_helper_debug();
4855 tcg_gen_exit_tb(0);
4856 dc->is_br = 1;
4857 goto exit_gen_loop;
4861 if (spc) {
4862 qemu_log("Search PC...\n");
4863 j = gen_opc_ptr - gen_opc_buf;
4864 if (lj < j) {
4865 lj++;
4866 while (lj < j)
4867 gen_opc_instr_start[lj++] = 0;
4868 gen_opc_pc[lj] = dc->pc;
4869 gen_opc_npc[lj] = dc->npc;
4870 gen_opc_instr_start[lj] = 1;
4871 gen_opc_icount[lj] = num_insns;
4874 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4875 gen_io_start();
4876 last_pc = dc->pc;
4877 disas_sparc_insn(dc);
4878 num_insns++;
4880 if (dc->is_br)
4881 break;
4882 /* if the next PC is different, we abort now */
4883 if (dc->pc != (last_pc + 4))
4884 break;
4885 /* if we reach a page boundary, we stop generation so that the
4886 PC of a TT_TFAULT exception is always in the right page */
4887 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4888 break;
4889 /* if single step mode, we generate only one instruction and
4890 generate an exception */
4891 if (dc->singlestep) {
4892 break;
4894 } while ((gen_opc_ptr < gen_opc_end) &&
4895 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4896 num_insns < max_insns);
4898 exit_gen_loop:
4899 tcg_temp_free(cpu_addr);
4900 tcg_temp_free(cpu_val);
4901 tcg_temp_free(cpu_dst);
4902 tcg_temp_free_i64(cpu_tmp64);
4903 tcg_temp_free_i32(cpu_tmp32);
4904 tcg_temp_free(cpu_tmp0);
4905 if (tb->cflags & CF_LAST_IO)
4906 gen_io_end();
4907 if (!dc->is_br) {
4908 if (dc->pc != DYNAMIC_PC &&
4909 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4910 /* static PC and NPC: we can use direct chaining */
4911 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4912 } else {
4913 if (dc->pc != DYNAMIC_PC)
4914 tcg_gen_movi_tl(cpu_pc, dc->pc);
4915 save_npc(dc, cpu_cond);
4916 tcg_gen_exit_tb(0);
4919 gen_icount_end(tb, num_insns);
4920 *gen_opc_ptr = INDEX_op_end;
4921 if (spc) {
4922 j = gen_opc_ptr - gen_opc_buf;
4923 lj++;
4924 while (lj <= j)
4925 gen_opc_instr_start[lj++] = 0;
4926 #if 0
4927 log_page_dump();
4928 #endif
4929 gen_opc_jump_pc[0] = dc->jump_pc[0];
4930 gen_opc_jump_pc[1] = dc->jump_pc[1];
4931 } else {
4932 tb->size = last_pc + 4 - pc_start;
4933 tb->icount = num_insns;
4935 #ifdef DEBUG_DISAS
4936 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4937 qemu_log("--------------\n");
4938 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4939 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4940 qemu_log("\n");
4942 #endif
4945 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4947 gen_intermediate_code_internal(tb, 0, env);
4950 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4952 gen_intermediate_code_internal(tb, 1, env);
4955 void gen_intermediate_code_init(CPUSPARCState *env)
4957 unsigned int i;
4958 static int inited;
4959 static const char * const gregnames[8] = {
4960 NULL, // g0 not used
4961 "g1",
4962 "g2",
4963 "g3",
4964 "g4",
4965 "g5",
4966 "g6",
4967 "g7",
4969 static const char * const fregnames[64] = {
4970 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4971 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4972 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4973 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4974 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4975 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4976 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4977 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4980 /* init various static tables */
4981 if (!inited) {
4982 inited = 1;
4984 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4985 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4986 offsetof(CPUState, regwptr),
4987 "regwptr");
4988 #ifdef TARGET_SPARC64
4989 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4990 "xcc");
4991 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4992 "asi");
4993 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4994 "fprs");
4995 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4996 "gsr");
4997 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4998 offsetof(CPUState, tick_cmpr),
4999 "tick_cmpr");
5000 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5001 offsetof(CPUState, stick_cmpr),
5002 "stick_cmpr");
5003 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5004 offsetof(CPUState, hstick_cmpr),
5005 "hstick_cmpr");
5006 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5007 "hintp");
5008 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5009 "htba");
5010 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5011 "hver");
5012 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5013 offsetof(CPUState, ssr), "ssr");
5014 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5015 offsetof(CPUState, version), "ver");
5016 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5017 offsetof(CPUState, softint),
5018 "softint");
5019 #else
5020 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5021 "wim");
5022 #endif
5023 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5024 "cond");
5025 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5026 "cc_src");
5027 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5028 offsetof(CPUState, cc_src2),
5029 "cc_src2");
5030 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5031 "cc_dst");
5032 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5033 "cc_op");
5034 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5035 "psr");
5036 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5037 "fsr");
5038 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5039 "pc");
5040 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5041 "npc");
5042 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5043 #ifndef CONFIG_USER_ONLY
5044 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5045 "tbr");
5046 #endif
5047 for (i = 1; i < 8; i++)
5048 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5049 offsetof(CPUState, gregs[i]),
5050 gregnames[i]);
5051 for (i = 0; i < TARGET_FPREGS; i++)
5052 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5053 offsetof(CPUState, fpr[i]),
5054 fregnames[i]);
5056 /* register helpers */
5058 #define GEN_HELPER 2
5059 #include "helper.h"
5063 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5064 unsigned long searched_pc, int pc_pos, void *puc)
5066 target_ulong npc;
5067 env->pc = gen_opc_pc[pc_pos];
5068 npc = gen_opc_npc[pc_pos];
5069 if (npc == 1) {
5070 /* dynamic NPC: already stored */
5071 } else if (npc == 2) {
5072 /* jump PC: use 'cond' and the jump targets of the translation */
5073 if (env->cond) {
5074 env->npc = gen_opc_jump_pc[0];
5075 } else {
5076 env->npc = gen_opc_jump_pc[1];
5078 } else {
5079 env->npc = npc;
5082 /* flush pending conditional evaluations before exposing cpu state */
5083 if (CC_OP != CC_OP_FLAGS) {
5084 helper_compute_psr();