SPARC: Emulation of Leon3
[qemu.git] / target-sparc / translate.c
blobdff0f19f7074a40f9f83bd7105faf31d092e248d
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70 static target_ulong gen_opc_jump_pc[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext {
75 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int is_br;
79 int mem_idx;
80 int fpu_enabled;
81 int address_mask_32bit;
82 int singlestep;
83 uint32_t cc_op; /* current CC operation */
84 struct TranslationBlock *tb;
85 sparc_def_t *def;
86 } DisasContext;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #ifdef TARGET_SPARC64
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #else
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x, int len)
112 len = 32 - len;
113 return (x << len) >> len;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src)
121 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122 offsetof(CPU_DoubleU, l.upper));
123 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.lower));
127 static void gen_op_load_fpr_DT1(unsigned int src)
129 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130 offsetof(CPU_DoubleU, l.upper));
131 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132 offsetof(CPU_DoubleU, l.lower));
135 static void gen_op_store_DT0_fpr(unsigned int dst)
137 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138 offsetof(CPU_DoubleU, l.upper));
139 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140 offsetof(CPU_DoubleU, l.lower));
143 static void gen_op_load_fpr_QT0(unsigned int src)
145 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.upmost));
147 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.upper));
149 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150 offsetof(CPU_QuadU, l.lower));
151 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152 offsetof(CPU_QuadU, l.lowest));
155 static void gen_op_load_fpr_QT1(unsigned int src)
157 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.upmost));
159 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.upper));
161 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162 offsetof(CPU_QuadU, l.lower));
163 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164 offsetof(CPU_QuadU, l.lowest));
167 static void gen_op_store_QT0_fpr(unsigned int dst)
169 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.upmost));
171 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.upper));
173 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174 offsetof(CPU_QuadU, l.lower));
175 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176 offsetof(CPU_QuadU, l.lowest));
179 /* moves */
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
184 #endif
185 #else
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
189 #else
190 #endif
191 #endif
193 #ifdef TARGET_SPARC64
194 #ifndef TARGET_ABI32
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #else
197 #define AM_CHECK(dc) (1)
198 #endif
199 #endif
201 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
203 #ifdef TARGET_SPARC64
204 if (AM_CHECK(dc))
205 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206 #endif
209 static inline void gen_movl_reg_TN(int reg, TCGv tn)
211 if (reg == 0)
212 tcg_gen_movi_tl(tn, 0);
213 else if (reg < 8)
214 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215 else {
216 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
220 static inline void gen_movl_TN_reg(int reg, TCGv tn)
222 if (reg == 0)
223 return;
224 else if (reg < 8)
225 tcg_gen_mov_tl(cpu_gregs[reg], tn);
226 else {
227 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231 static inline void gen_goto_tb(DisasContext *s, int tb_num,
232 target_ulong pc, target_ulong npc)
234 TranslationBlock *tb;
236 tb = s->tb;
237 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 !s->singlestep) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
284 TCGv r_temp;
285 TCGv_i32 r_const;
286 int l1;
288 l1 = gen_new_label();
290 r_temp = tcg_temp_new();
291 tcg_gen_xor_tl(r_temp, src1, src2);
292 tcg_gen_not_tl(r_temp, r_temp);
293 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297 r_const = tcg_const_i32(TT_TOVF);
298 gen_helper_raise_exception(r_const);
299 tcg_temp_free_i32(r_const);
300 gen_set_label(l1);
301 tcg_temp_free(r_temp);
304 static inline void gen_tag_tv(TCGv src1, TCGv src2)
306 int l1;
307 TCGv_i32 r_const;
309 l1 = gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0, src1, src2);
311 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313 r_const = tcg_const_i32(TT_TOVF);
314 gen_helper_raise_exception(r_const);
315 tcg_temp_free_i32(r_const);
316 gen_set_label(l1);
319 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
321 tcg_gen_mov_tl(cpu_cc_src, src1);
322 tcg_gen_movi_tl(cpu_cc_src2, src2);
323 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324 tcg_gen_mov_tl(dst, cpu_cc_dst);
327 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
329 tcg_gen_mov_tl(cpu_cc_src, src1);
330 tcg_gen_mov_tl(cpu_cc_src2, src2);
331 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332 tcg_gen_mov_tl(dst, cpu_cc_dst);
335 static TCGv_i32 gen_add32_carry32(void)
337 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32 = tcg_temp_new_i32();
342 cc_src2_32 = tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
344 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
345 #else
346 cc_src1_32 = cpu_cc_dst;
347 cc_src2_32 = cpu_cc_src;
348 #endif
350 carry_32 = tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32);
355 tcg_temp_free_i32(cc_src2_32);
356 #endif
358 return carry_32;
361 static TCGv_i32 gen_sub32_carry32(void)
363 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32 = tcg_temp_new_i32();
368 cc_src2_32 = tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
370 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
371 #else
372 cc_src1_32 = cpu_cc_src;
373 cc_src2_32 = cpu_cc_src2;
374 #endif
376 carry_32 = tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32);
381 tcg_temp_free_i32(cc_src2_32);
382 #endif
384 return carry_32;
387 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
388 TCGv src2, int update_cc)
390 TCGv_i32 carry_32;
391 TCGv carry;
393 switch (dc->cc_op) {
394 case CC_OP_DIV:
395 case CC_OP_LOGIC:
396 /* Carry is known to be zero. Fall back to plain ADD. */
397 if (update_cc) {
398 gen_op_add_cc(dst, src1, src2);
399 } else {
400 tcg_gen_add_tl(dst, src1, src2);
402 return;
404 case CC_OP_ADD:
405 case CC_OP_TADD:
406 case CC_OP_TADDTV:
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low = tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
415 cpu_cc_src, src1, cpu_cc_src2, src2);
416 tcg_temp_free(dst_low);
417 goto add_done;
419 #endif
420 carry_32 = gen_add32_carry32();
421 break;
423 case CC_OP_SUB:
424 case CC_OP_TSUB:
425 case CC_OP_TSUBTV:
426 carry_32 = gen_sub32_carry32();
427 break;
429 default:
430 /* We need external help to produce the carry. */
431 carry_32 = tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32);
433 break;
436 #if TARGET_LONG_BITS == 64
437 carry = tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry, carry_32);
439 #else
440 carry = carry_32;
441 #endif
443 tcg_gen_add_tl(dst, src1, src2);
444 tcg_gen_add_tl(dst, dst, carry);
446 tcg_temp_free_i32(carry_32);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry);
449 #endif
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452 add_done:
453 #endif
454 if (update_cc) {
455 tcg_gen_mov_tl(cpu_cc_src, src1);
456 tcg_gen_mov_tl(cpu_cc_src2, src2);
457 tcg_gen_mov_tl(cpu_cc_dst, dst);
458 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
459 dc->cc_op = CC_OP_ADDX;
463 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
465 tcg_gen_mov_tl(cpu_cc_src, src1);
466 tcg_gen_mov_tl(cpu_cc_src2, src2);
467 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 tcg_gen_mov_tl(dst, cpu_cc_dst);
471 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
473 tcg_gen_mov_tl(cpu_cc_src, src1);
474 tcg_gen_mov_tl(cpu_cc_src2, src2);
475 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
476 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
483 TCGv r_temp;
484 TCGv_i32 r_const;
485 int l1;
487 l1 = gen_new_label();
489 r_temp = tcg_temp_new();
490 tcg_gen_xor_tl(r_temp, src1, src2);
491 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
492 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
493 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
495 r_const = tcg_const_i32(TT_TOVF);
496 gen_helper_raise_exception(r_const);
497 tcg_temp_free_i32(r_const);
498 gen_set_label(l1);
499 tcg_temp_free(r_temp);
502 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
504 tcg_gen_mov_tl(cpu_cc_src, src1);
505 tcg_gen_movi_tl(cpu_cc_src2, src2);
506 if (src2 == 0) {
507 tcg_gen_mov_tl(cpu_cc_dst, src1);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
509 dc->cc_op = CC_OP_LOGIC;
510 } else {
511 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
512 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
513 dc->cc_op = CC_OP_SUB;
515 tcg_gen_mov_tl(dst, cpu_cc_dst);
518 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 tcg_gen_mov_tl(dst, cpu_cc_dst);
526 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
527 TCGv src2, int update_cc)
529 TCGv_i32 carry_32;
530 TCGv carry;
532 switch (dc->cc_op) {
533 case CC_OP_DIV:
534 case CC_OP_LOGIC:
535 /* Carry is known to be zero. Fall back to plain SUB. */
536 if (update_cc) {
537 gen_op_sub_cc(dst, src1, src2);
538 } else {
539 tcg_gen_sub_tl(dst, src1, src2);
541 return;
543 case CC_OP_ADD:
544 case CC_OP_TADD:
545 case CC_OP_TADDTV:
546 carry_32 = gen_add32_carry32();
547 break;
549 case CC_OP_SUB:
550 case CC_OP_TSUB:
551 case CC_OP_TSUBTV:
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low = tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
560 cpu_cc_src, src1, cpu_cc_src2, src2);
561 tcg_temp_free(dst_low);
562 goto sub_done;
564 #endif
565 carry_32 = gen_sub32_carry32();
566 break;
568 default:
569 /* We need external help to produce the carry. */
570 carry_32 = tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32);
572 break;
575 #if TARGET_LONG_BITS == 64
576 carry = tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry, carry_32);
578 #else
579 carry = carry_32;
580 #endif
582 tcg_gen_sub_tl(dst, src1, src2);
583 tcg_gen_sub_tl(dst, dst, carry);
585 tcg_temp_free_i32(carry_32);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry);
588 #endif
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
591 sub_done:
592 #endif
593 if (update_cc) {
594 tcg_gen_mov_tl(cpu_cc_src, src1);
595 tcg_gen_mov_tl(cpu_cc_src2, src2);
596 tcg_gen_mov_tl(cpu_cc_dst, dst);
597 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
598 dc->cc_op = CC_OP_SUBX;
602 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
604 tcg_gen_mov_tl(cpu_cc_src, src1);
605 tcg_gen_mov_tl(cpu_cc_src2, src2);
606 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
607 tcg_gen_mov_tl(dst, cpu_cc_dst);
610 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
612 tcg_gen_mov_tl(cpu_cc_src, src1);
613 tcg_gen_mov_tl(cpu_cc_src2, src2);
614 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
615 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 tcg_gen_mov_tl(dst, cpu_cc_dst);
620 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
622 TCGv r_temp;
623 int l1;
625 l1 = gen_new_label();
626 r_temp = tcg_temp_new();
628 /* old op:
629 if (!(env->y & 1))
630 T1 = 0;
632 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
633 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
636 tcg_gen_movi_tl(cpu_cc_src2, 0);
637 gen_set_label(l1);
639 // b2 = T0 & 1;
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
642 tcg_gen_shli_tl(r_temp, r_temp, 31);
643 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
644 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
646 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
648 // b1 = N ^ V;
649 gen_mov_reg_N(cpu_tmp0, cpu_psr);
650 gen_mov_reg_V(r_temp, cpu_psr);
651 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
652 tcg_temp_free(r_temp);
654 // T0 = (b1 << 31) | (T0 >> 1);
655 // src1 = T0;
656 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
657 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
658 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
660 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
665 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
667 TCGv_i32 r_src1, r_src2;
668 TCGv_i64 r_temp, r_temp2;
670 r_src1 = tcg_temp_new_i32();
671 r_src2 = tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1, src1);
674 tcg_gen_trunc_tl_i32(r_src2, src2);
676 r_temp = tcg_temp_new_i64();
677 r_temp2 = tcg_temp_new_i64();
679 if (sign_ext) {
680 tcg_gen_ext_i32_i64(r_temp, r_src2);
681 tcg_gen_ext_i32_i64(r_temp2, r_src1);
682 } else {
683 tcg_gen_extu_i32_i64(r_temp, r_src2);
684 tcg_gen_extu_i32_i64(r_temp2, r_src1);
687 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
689 tcg_gen_shri_i64(r_temp, r_temp2, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
691 tcg_temp_free_i64(r_temp);
692 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst, r_temp2);
696 tcg_temp_free_i64(r_temp2);
698 tcg_temp_free_i32(r_src1);
699 tcg_temp_free_i32(r_src2);
702 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst, src1, src2, 0);
708 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst, src1, src2, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
717 TCGv_i32 r_const;
718 int l1;
720 l1 = gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
722 r_const = tcg_const_i32(TT_DIV_ZERO);
723 gen_helper_raise_exception(r_const);
724 tcg_temp_free_i32(r_const);
725 gen_set_label(l1);
728 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
730 int l1, l2;
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src, src1);
735 tcg_gen_mov_tl(cpu_cc_src2, src2);
736 gen_trap_ifdivzero_tl(cpu_cc_src2);
737 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
738 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
739 tcg_gen_movi_i64(dst, INT64_MIN);
740 tcg_gen_br(l2);
741 gen_set_label(l1);
742 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
743 gen_set_label(l2);
745 #endif
747 // 1
748 static inline void gen_op_eval_ba(TCGv dst)
750 tcg_gen_movi_tl(dst, 1);
753 // Z
754 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
756 gen_mov_reg_Z(dst, src);
759 // Z | (N ^ V)
760 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
762 gen_mov_reg_N(cpu_tmp0, src);
763 gen_mov_reg_V(dst, src);
764 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
765 gen_mov_reg_Z(cpu_tmp0, src);
766 tcg_gen_or_tl(dst, dst, cpu_tmp0);
769 // N ^ V
770 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
772 gen_mov_reg_V(cpu_tmp0, src);
773 gen_mov_reg_N(dst, src);
774 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
777 // C | Z
778 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
780 gen_mov_reg_Z(cpu_tmp0, src);
781 gen_mov_reg_C(dst, src);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
785 // C
786 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
788 gen_mov_reg_C(dst, src);
791 // V
792 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
794 gen_mov_reg_V(dst, src);
797 // 0
798 static inline void gen_op_eval_bn(TCGv dst)
800 tcg_gen_movi_tl(dst, 0);
803 // N
804 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
806 gen_mov_reg_N(dst, src);
809 // !Z
810 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
812 gen_mov_reg_Z(dst, src);
813 tcg_gen_xori_tl(dst, dst, 0x1);
816 // !(Z | (N ^ V))
817 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
819 gen_mov_reg_N(cpu_tmp0, src);
820 gen_mov_reg_V(dst, src);
821 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
822 gen_mov_reg_Z(cpu_tmp0, src);
823 tcg_gen_or_tl(dst, dst, cpu_tmp0);
824 tcg_gen_xori_tl(dst, dst, 0x1);
827 // !(N ^ V)
828 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
830 gen_mov_reg_V(cpu_tmp0, src);
831 gen_mov_reg_N(dst, src);
832 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
833 tcg_gen_xori_tl(dst, dst, 0x1);
836 // !(C | Z)
837 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
839 gen_mov_reg_Z(cpu_tmp0, src);
840 gen_mov_reg_C(dst, src);
841 tcg_gen_or_tl(dst, dst, cpu_tmp0);
842 tcg_gen_xori_tl(dst, dst, 0x1);
845 // !C
846 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
848 gen_mov_reg_C(dst, src);
849 tcg_gen_xori_tl(dst, dst, 0x1);
852 // !N
853 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
855 gen_mov_reg_N(dst, src);
856 tcg_gen_xori_tl(dst, dst, 0x1);
859 // !V
860 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
862 gen_mov_reg_V(dst, src);
863 tcg_gen_xori_tl(dst, dst, 0x1);
867 FPSR bit field FCC1 | FCC0:
871 3 unordered
873 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
874 unsigned int fcc_offset)
876 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
877 tcg_gen_andi_tl(reg, reg, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
881 unsigned int fcc_offset)
883 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
884 tcg_gen_andi_tl(reg, reg, 0x1);
887 // !0: FCC0 | FCC1
888 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
889 unsigned int fcc_offset)
891 gen_mov_reg_FCC0(dst, src, fcc_offset);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_or_tl(dst, dst, cpu_tmp0);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
898 unsigned int fcc_offset)
900 gen_mov_reg_FCC0(dst, src, fcc_offset);
901 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
902 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
905 // 1 or 3: FCC0
906 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
912 // 1: FCC0 & !FCC1
913 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
914 unsigned int fcc_offset)
916 gen_mov_reg_FCC0(dst, src, fcc_offset);
917 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
918 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
919 tcg_gen_and_tl(dst, dst, cpu_tmp0);
922 // 2 or 3: FCC1
923 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
926 gen_mov_reg_FCC1(dst, src, fcc_offset);
929 // 2: !FCC0 & FCC1
930 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
931 unsigned int fcc_offset)
933 gen_mov_reg_FCC0(dst, src, fcc_offset);
934 tcg_gen_xori_tl(dst, dst, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
936 tcg_gen_and_tl(dst, dst, cpu_tmp0);
939 // 3: FCC0 & FCC1
940 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
941 unsigned int fcc_offset)
943 gen_mov_reg_FCC0(dst, src, fcc_offset);
944 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
945 tcg_gen_and_tl(dst, dst, cpu_tmp0);
948 // 0: !(FCC0 | FCC1)
949 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
950 unsigned int fcc_offset)
952 gen_mov_reg_FCC0(dst, src, fcc_offset);
953 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
954 tcg_gen_or_tl(dst, dst, cpu_tmp0);
955 tcg_gen_xori_tl(dst, dst, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
960 unsigned int fcc_offset)
962 gen_mov_reg_FCC0(dst, src, fcc_offset);
963 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
964 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
965 tcg_gen_xori_tl(dst, dst, 0x1);
968 // 0 or 2: !FCC0
969 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
970 unsigned int fcc_offset)
972 gen_mov_reg_FCC0(dst, src, fcc_offset);
973 tcg_gen_xori_tl(dst, dst, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
978 unsigned int fcc_offset)
980 gen_mov_reg_FCC0(dst, src, fcc_offset);
981 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
983 tcg_gen_and_tl(dst, dst, cpu_tmp0);
984 tcg_gen_xori_tl(dst, dst, 0x1);
987 // 0 or 1: !FCC1
988 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
989 unsigned int fcc_offset)
991 gen_mov_reg_FCC1(dst, src, fcc_offset);
992 tcg_gen_xori_tl(dst, dst, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 tcg_gen_xori_tl(dst, dst, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1002 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1003 tcg_gen_xori_tl(dst, dst, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1008 unsigned int fcc_offset)
1010 gen_mov_reg_FCC0(dst, src, fcc_offset);
1011 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1012 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1013 tcg_gen_xori_tl(dst, dst, 0x1);
1016 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1017 target_ulong pc2, TCGv r_cond)
1019 int l1;
1021 l1 = gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1025 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1027 gen_set_label(l1);
1028 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1031 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1032 target_ulong pc2, TCGv r_cond)
1034 int l1;
1036 l1 = gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1040 gen_goto_tb(dc, 0, pc2, pc1);
1042 gen_set_label(l1);
1043 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1046 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1047 TCGv r_cond)
1049 int l1, l2;
1051 l1 = gen_new_label();
1052 l2 = gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1056 tcg_gen_movi_tl(cpu_npc, npc1);
1057 tcg_gen_br(l2);
1059 gen_set_label(l1);
1060 tcg_gen_movi_tl(cpu_npc, npc2);
1061 gen_set_label(l2);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext *dc, TCGv cond)
1068 if (dc->npc == JUMP_PC) {
1069 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1070 dc->npc = DYNAMIC_PC;
1074 static inline void save_npc(DisasContext *dc, TCGv cond)
1076 if (dc->npc == JUMP_PC) {
1077 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1078 dc->npc = DYNAMIC_PC;
1079 } else if (dc->npc != DYNAMIC_PC) {
1080 tcg_gen_movi_tl(cpu_npc, dc->npc);
1084 static inline void save_state(DisasContext *dc, TCGv cond)
1086 tcg_gen_movi_tl(cpu_pc, dc->pc);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc->cc_op != CC_OP_FLAGS) {
1089 dc->cc_op = CC_OP_FLAGS;
1090 gen_helper_compute_psr();
1092 save_npc(dc, cond);
1095 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1097 if (dc->npc == JUMP_PC) {
1098 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1099 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100 dc->pc = DYNAMIC_PC;
1101 } else if (dc->npc == DYNAMIC_PC) {
1102 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1103 dc->pc = DYNAMIC_PC;
1104 } else {
1105 dc->pc = dc->npc;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1112 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1115 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1116 DisasContext *dc)
1118 TCGv_i32 r_src;
1120 #ifdef TARGET_SPARC64
1121 if (cc)
1122 r_src = cpu_xcc;
1123 else
1124 r_src = cpu_psr;
1125 #else
1126 r_src = cpu_psr;
1127 #endif
1128 switch (dc->cc_op) {
1129 case CC_OP_FLAGS:
1130 break;
1131 default:
1132 gen_helper_compute_psr();
1133 dc->cc_op = CC_OP_FLAGS;
1134 break;
1136 switch (cond) {
1137 case 0x0:
1138 gen_op_eval_bn(r_dst);
1139 break;
1140 case 0x1:
1141 gen_op_eval_be(r_dst, r_src);
1142 break;
1143 case 0x2:
1144 gen_op_eval_ble(r_dst, r_src);
1145 break;
1146 case 0x3:
1147 gen_op_eval_bl(r_dst, r_src);
1148 break;
1149 case 0x4:
1150 gen_op_eval_bleu(r_dst, r_src);
1151 break;
1152 case 0x5:
1153 gen_op_eval_bcs(r_dst, r_src);
1154 break;
1155 case 0x6:
1156 gen_op_eval_bneg(r_dst, r_src);
1157 break;
1158 case 0x7:
1159 gen_op_eval_bvs(r_dst, r_src);
1160 break;
1161 case 0x8:
1162 gen_op_eval_ba(r_dst);
1163 break;
1164 case 0x9:
1165 gen_op_eval_bne(r_dst, r_src);
1166 break;
1167 case 0xa:
1168 gen_op_eval_bg(r_dst, r_src);
1169 break;
1170 case 0xb:
1171 gen_op_eval_bge(r_dst, r_src);
1172 break;
1173 case 0xc:
1174 gen_op_eval_bgu(r_dst, r_src);
1175 break;
1176 case 0xd:
1177 gen_op_eval_bcc(r_dst, r_src);
1178 break;
1179 case 0xe:
1180 gen_op_eval_bpos(r_dst, r_src);
1181 break;
1182 case 0xf:
1183 gen_op_eval_bvc(r_dst, r_src);
1184 break;
1188 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1190 unsigned int offset;
1192 switch (cc) {
1193 default:
1194 case 0x0:
1195 offset = 0;
1196 break;
1197 case 0x1:
1198 offset = 32 - 10;
1199 break;
1200 case 0x2:
1201 offset = 34 - 10;
1202 break;
1203 case 0x3:
1204 offset = 36 - 10;
1205 break;
1208 switch (cond) {
1209 case 0x0:
1210 gen_op_eval_bn(r_dst);
1211 break;
1212 case 0x1:
1213 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1214 break;
1215 case 0x2:
1216 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1217 break;
1218 case 0x3:
1219 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1220 break;
1221 case 0x4:
1222 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1223 break;
1224 case 0x5:
1225 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1226 break;
1227 case 0x6:
1228 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1229 break;
1230 case 0x7:
1231 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1232 break;
1233 case 0x8:
1234 gen_op_eval_ba(r_dst);
1235 break;
1236 case 0x9:
1237 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1238 break;
1239 case 0xa:
1240 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1241 break;
1242 case 0xb:
1243 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1244 break;
1245 case 0xc:
1246 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1247 break;
1248 case 0xd:
1249 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1250 break;
1251 case 0xe:
1252 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1253 break;
1254 case 0xf:
1255 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1256 break;
1260 #ifdef TARGET_SPARC64
1261 // Inverted logic
1262 static const int gen_tcg_cond_reg[8] = {
1264 TCG_COND_NE,
1265 TCG_COND_GT,
1266 TCG_COND_GE,
1268 TCG_COND_EQ,
1269 TCG_COND_LE,
1270 TCG_COND_LT,
1273 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1275 int l1;
1277 l1 = gen_new_label();
1278 tcg_gen_movi_tl(r_dst, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1280 tcg_gen_movi_tl(r_dst, 1);
1281 gen_set_label(l1);
1283 #endif
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1287 TCGv r_cond)
1289 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1290 target_ulong target = dc->pc + offset;
1292 if (cond == 0x0) {
1293 /* unconditional not taken */
1294 if (a) {
1295 dc->pc = dc->npc + 4;
1296 dc->npc = dc->pc + 4;
1297 } else {
1298 dc->pc = dc->npc;
1299 dc->npc = dc->pc + 4;
1301 } else if (cond == 0x8) {
1302 /* unconditional taken */
1303 if (a) {
1304 dc->pc = target;
1305 dc->npc = dc->pc + 4;
1306 } else {
1307 dc->pc = dc->npc;
1308 dc->npc = target;
1309 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1311 } else {
1312 flush_cond(dc, r_cond);
1313 gen_cond(r_cond, cc, cond, dc);
1314 if (a) {
1315 gen_branch_a(dc, target, dc->npc, r_cond);
1316 dc->is_br = 1;
1317 } else {
1318 dc->pc = dc->npc;
1319 dc->jump_pc[0] = target;
1320 dc->jump_pc[1] = dc->npc + 4;
1321 dc->npc = JUMP_PC;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1328 TCGv r_cond)
1330 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1331 target_ulong target = dc->pc + offset;
1333 if (cond == 0x0) {
1334 /* unconditional not taken */
1335 if (a) {
1336 dc->pc = dc->npc + 4;
1337 dc->npc = dc->pc + 4;
1338 } else {
1339 dc->pc = dc->npc;
1340 dc->npc = dc->pc + 4;
1342 } else if (cond == 0x8) {
1343 /* unconditional taken */
1344 if (a) {
1345 dc->pc = target;
1346 dc->npc = dc->pc + 4;
1347 } else {
1348 dc->pc = dc->npc;
1349 dc->npc = target;
1350 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1352 } else {
1353 flush_cond(dc, r_cond);
1354 gen_fcond(r_cond, cc, cond);
1355 if (a) {
1356 gen_branch_a(dc, target, dc->npc, r_cond);
1357 dc->is_br = 1;
1358 } else {
1359 dc->pc = dc->npc;
1360 dc->jump_pc[0] = target;
1361 dc->jump_pc[1] = dc->npc + 4;
1362 dc->npc = JUMP_PC;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1370 TCGv r_cond, TCGv r_reg)
1372 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1373 target_ulong target = dc->pc + offset;
1375 flush_cond(dc, r_cond);
1376 gen_cond_reg(r_cond, cond, r_reg);
1377 if (a) {
1378 gen_branch_a(dc, target, dc->npc, r_cond);
1379 dc->is_br = 1;
1380 } else {
1381 dc->pc = dc->npc;
1382 dc->jump_pc[0] = target;
1383 dc->jump_pc[1] = dc->npc + 4;
1384 dc->npc = JUMP_PC;
1388 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1390 switch (fccno) {
1391 case 0:
1392 gen_helper_fcmps(r_rs1, r_rs2);
1393 break;
1394 case 1:
1395 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1396 break;
1397 case 2:
1398 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1399 break;
1400 case 3:
1401 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1402 break;
1406 static inline void gen_op_fcmpd(int fccno)
1408 switch (fccno) {
1409 case 0:
1410 gen_helper_fcmpd();
1411 break;
1412 case 1:
1413 gen_helper_fcmpd_fcc1();
1414 break;
1415 case 2:
1416 gen_helper_fcmpd_fcc2();
1417 break;
1418 case 3:
1419 gen_helper_fcmpd_fcc3();
1420 break;
1424 static inline void gen_op_fcmpq(int fccno)
1426 switch (fccno) {
1427 case 0:
1428 gen_helper_fcmpq();
1429 break;
1430 case 1:
1431 gen_helper_fcmpq_fcc1();
1432 break;
1433 case 2:
1434 gen_helper_fcmpq_fcc2();
1435 break;
1436 case 3:
1437 gen_helper_fcmpq_fcc3();
1438 break;
1442 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1444 switch (fccno) {
1445 case 0:
1446 gen_helper_fcmpes(r_rs1, r_rs2);
1447 break;
1448 case 1:
1449 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1450 break;
1451 case 2:
1452 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1453 break;
1454 case 3:
1455 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1456 break;
1460 static inline void gen_op_fcmped(int fccno)
1462 switch (fccno) {
1463 case 0:
1464 gen_helper_fcmped();
1465 break;
1466 case 1:
1467 gen_helper_fcmped_fcc1();
1468 break;
1469 case 2:
1470 gen_helper_fcmped_fcc2();
1471 break;
1472 case 3:
1473 gen_helper_fcmped_fcc3();
1474 break;
1478 static inline void gen_op_fcmpeq(int fccno)
1480 switch (fccno) {
1481 case 0:
1482 gen_helper_fcmpeq();
1483 break;
1484 case 1:
1485 gen_helper_fcmpeq_fcc1();
1486 break;
1487 case 2:
1488 gen_helper_fcmpeq_fcc2();
1489 break;
1490 case 3:
1491 gen_helper_fcmpeq_fcc3();
1492 break;
1496 #else
1498 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1500 gen_helper_fcmps(r_rs1, r_rs2);
1503 static inline void gen_op_fcmpd(int fccno)
1505 gen_helper_fcmpd();
1508 static inline void gen_op_fcmpq(int fccno)
1510 gen_helper_fcmpq();
1513 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1515 gen_helper_fcmpes(r_rs1, r_rs2);
1518 static inline void gen_op_fcmped(int fccno)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno)
1525 gen_helper_fcmpeq();
1527 #endif
1529 static inline void gen_op_fpexception_im(int fsr_flags)
1531 TCGv_i32 r_const;
1533 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1534 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1535 r_const = tcg_const_i32(TT_FP_EXCP);
1536 gen_helper_raise_exception(r_const);
1537 tcg_temp_free_i32(r_const);
1540 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc->fpu_enabled) {
1544 TCGv_i32 r_const;
1546 save_state(dc, r_cond);
1547 r_const = tcg_const_i32(TT_NFPU_INSN);
1548 gen_helper_raise_exception(r_const);
1549 tcg_temp_free_i32(r_const);
1550 dc->is_br = 1;
1551 return 1;
1553 #endif
1554 return 0;
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1567 /* asi moves */
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1571 int asi;
1572 TCGv_i32 r_asi;
1574 if (IS_IMM) {
1575 r_asi = tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi, cpu_asi);
1577 } else {
1578 asi = GET_FIELD(insn, 19, 26);
1579 r_asi = tcg_const_i32(asi);
1581 return r_asi;
1584 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1585 int sign)
1587 TCGv_i32 r_asi, r_size, r_sign;
1589 r_asi = gen_get_asi(insn, addr);
1590 r_size = tcg_const_i32(size);
1591 r_sign = tcg_const_i32(sign);
1592 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1593 tcg_temp_free_i32(r_sign);
1594 tcg_temp_free_i32(r_size);
1595 tcg_temp_free_i32(r_asi);
1598 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1600 TCGv_i32 r_asi, r_size;
1602 r_asi = gen_get_asi(insn, addr);
1603 r_size = tcg_const_i32(size);
1604 gen_helper_st_asi(addr, src, r_asi, r_size);
1605 tcg_temp_free_i32(r_size);
1606 tcg_temp_free_i32(r_asi);
1609 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1611 TCGv_i32 r_asi, r_size, r_rd;
1613 r_asi = gen_get_asi(insn, addr);
1614 r_size = tcg_const_i32(size);
1615 r_rd = tcg_const_i32(rd);
1616 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1617 tcg_temp_free_i32(r_rd);
1618 tcg_temp_free_i32(r_size);
1619 tcg_temp_free_i32(r_asi);
1622 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1624 TCGv_i32 r_asi, r_size, r_rd;
1626 r_asi = gen_get_asi(insn, addr);
1627 r_size = tcg_const_i32(size);
1628 r_rd = tcg_const_i32(rd);
1629 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1630 tcg_temp_free_i32(r_rd);
1631 tcg_temp_free_i32(r_size);
1632 tcg_temp_free_i32(r_asi);
1635 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1637 TCGv_i32 r_asi, r_size, r_sign;
1639 r_asi = gen_get_asi(insn, addr);
1640 r_size = tcg_const_i32(4);
1641 r_sign = tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1643 tcg_temp_free_i32(r_sign);
1644 gen_helper_st_asi(addr, dst, r_asi, r_size);
1645 tcg_temp_free_i32(r_size);
1646 tcg_temp_free_i32(r_asi);
1647 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1650 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1652 TCGv_i32 r_asi, r_rd;
1654 r_asi = gen_get_asi(insn, addr);
1655 r_rd = tcg_const_i32(rd);
1656 gen_helper_ldda_asi(addr, r_asi, r_rd);
1657 tcg_temp_free_i32(r_rd);
1658 tcg_temp_free_i32(r_asi);
1661 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1663 TCGv_i32 r_asi, r_size;
1665 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1666 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1667 r_asi = gen_get_asi(insn, addr);
1668 r_size = tcg_const_i32(8);
1669 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1670 tcg_temp_free_i32(r_size);
1671 tcg_temp_free_i32(r_asi);
1674 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1675 int rd)
1677 TCGv r_val1;
1678 TCGv_i32 r_asi;
1680 r_val1 = tcg_temp_new();
1681 gen_movl_reg_TN(rd, r_val1);
1682 r_asi = gen_get_asi(insn, addr);
1683 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1684 tcg_temp_free_i32(r_asi);
1685 tcg_temp_free(r_val1);
1688 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1689 int rd)
1691 TCGv_i32 r_asi;
1693 gen_movl_reg_TN(rd, cpu_tmp64);
1694 r_asi = gen_get_asi(insn, addr);
1695 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1696 tcg_temp_free_i32(r_asi);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1702 int sign)
1704 TCGv_i32 r_asi, r_size, r_sign;
1706 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1707 r_size = tcg_const_i32(size);
1708 r_sign = tcg_const_i32(sign);
1709 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1710 tcg_temp_free(r_sign);
1711 tcg_temp_free(r_size);
1712 tcg_temp_free(r_asi);
1713 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1716 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1718 TCGv_i32 r_asi, r_size;
1720 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1721 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1722 r_size = tcg_const_i32(size);
1723 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1724 tcg_temp_free(r_size);
1725 tcg_temp_free(r_asi);
1728 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1730 TCGv_i32 r_asi, r_size, r_sign;
1731 TCGv_i64 r_val;
1733 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1734 r_size = tcg_const_i32(4);
1735 r_sign = tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1737 tcg_temp_free(r_sign);
1738 r_val = tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val, dst);
1740 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1741 tcg_temp_free_i64(r_val);
1742 tcg_temp_free(r_size);
1743 tcg_temp_free(r_asi);
1744 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1747 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1749 TCGv_i32 r_asi, r_size, r_sign;
1751 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752 r_size = tcg_const_i32(8);
1753 r_sign = tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1755 tcg_temp_free(r_sign);
1756 tcg_temp_free(r_size);
1757 tcg_temp_free(r_asi);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1759 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1760 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1761 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1762 gen_movl_TN_reg(rd, hi);
1765 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1767 TCGv_i32 r_asi, r_size;
1769 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1770 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1771 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1772 r_size = tcg_const_i32(8);
1773 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1774 tcg_temp_free(r_size);
1775 tcg_temp_free(r_asi);
1777 #endif
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1782 TCGv_i64 r_val;
1783 TCGv_i32 r_asi, r_size;
1785 gen_ld_asi(dst, addr, insn, 1, 0);
1787 r_val = tcg_const_i64(0xffULL);
1788 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1789 r_size = tcg_const_i32(1);
1790 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1791 tcg_temp_free_i32(r_size);
1792 tcg_temp_free_i32(r_asi);
1793 tcg_temp_free_i64(r_val);
1795 #endif
1797 static inline TCGv get_src1(unsigned int insn, TCGv def)
1799 TCGv r_rs1 = def;
1800 unsigned int rs1;
1802 rs1 = GET_FIELD(insn, 13, 17);
1803 if (rs1 == 0) {
1804 tcg_gen_movi_tl(def, 0);
1805 } else if (rs1 < 8) {
1806 r_rs1 = cpu_gregs[rs1];
1807 } else {
1808 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1810 return r_rs1;
1813 static inline TCGv get_src2(unsigned int insn, TCGv def)
1815 TCGv r_rs2 = def;
1817 if (IS_IMM) { /* immediate */
1818 target_long simm = GET_FIELDs(insn, 19, 31);
1819 tcg_gen_movi_tl(def, simm);
1820 } else { /* register */
1821 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1822 if (rs2 == 0) {
1823 tcg_gen_movi_tl(def, 0);
1824 } else if (rs2 < 8) {
1825 r_rs2 = cpu_gregs[rs2];
1826 } else {
1827 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1830 return r_rs2;
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1836 TCGv_i32 r_tl = tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1846 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1852 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1853 tcg_temp_free_ptr(r_tl_tmp);
1856 tcg_temp_free_i32(r_tl);
1858 #endif
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1862 goto illegal_insn;
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1865 goto nfpu_insn;
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext * dc)
1870 unsigned int insn, opc, rs1, rs2, rd;
1871 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1872 target_long simm;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1875 tcg_gen_debug_insn_start(dc->pc);
1876 insn = ldl_code(dc->pc);
1877 opc = GET_FIELD(insn, 0, 1);
1879 rd = GET_FIELD(insn, 2, 6);
1881 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1882 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1884 switch (opc) {
1885 case 0: /* branches/sethi */
1887 unsigned int xop = GET_FIELD(insn, 7, 9);
1888 int32_t target;
1889 switch (xop) {
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1893 int cc;
1895 target = GET_FIELD_SP(insn, 0, 18);
1896 target = sign_extend(target, 18);
1897 target <<= 2;
1898 cc = GET_FIELD_SP(insn, 20, 21);
1899 if (cc == 0)
1900 do_branch(dc, target, insn, 0, cpu_cond);
1901 else if (cc == 2)
1902 do_branch(dc, target, insn, 1, cpu_cond);
1903 else
1904 goto illegal_insn;
1905 goto jmp_insn;
1907 case 0x3: /* V9 BPr */
1909 target = GET_FIELD_SP(insn, 0, 13) |
1910 (GET_FIELD_SP(insn, 20, 21) << 14);
1911 target = sign_extend(target, 16);
1912 target <<= 2;
1913 cpu_src1 = get_src1(insn, cpu_src1);
1914 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1915 goto jmp_insn;
1917 case 0x5: /* V9 FBPcc */
1919 int cc = GET_FIELD_SP(insn, 20, 21);
1920 if (gen_trap_ifnofpu(dc, cpu_cond))
1921 goto jmp_insn;
1922 target = GET_FIELD_SP(insn, 0, 18);
1923 target = sign_extend(target, 19);
1924 target <<= 2;
1925 do_fbranch(dc, target, insn, cc, cpu_cond);
1926 goto jmp_insn;
1928 #else
1929 case 0x7: /* CBN+x */
1931 goto ncp_insn;
1933 #endif
1934 case 0x2: /* BN+x */
1936 target = GET_FIELD(insn, 10, 31);
1937 target = sign_extend(target, 22);
1938 target <<= 2;
1939 do_branch(dc, target, insn, 0, cpu_cond);
1940 goto jmp_insn;
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc, cpu_cond))
1945 goto jmp_insn;
1946 target = GET_FIELD(insn, 10, 31);
1947 target = sign_extend(target, 22);
1948 target <<= 2;
1949 do_fbranch(dc, target, insn, 0, cpu_cond);
1950 goto jmp_insn;
1952 case 0x4: /* SETHI */
1953 if (rd) { // nop
1954 uint32_t value = GET_FIELD(insn, 10, 31);
1955 TCGv r_const;
1957 r_const = tcg_const_tl(value << 10);
1958 gen_movl_TN_reg(rd, r_const);
1959 tcg_temp_free(r_const);
1961 break;
1962 case 0x0: /* UNIMPL */
1963 default:
1964 goto illegal_insn;
1966 break;
1968 break;
1969 case 1: /*CALL*/
1971 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1972 TCGv r_const;
1974 r_const = tcg_const_tl(dc->pc);
1975 gen_movl_TN_reg(15, r_const);
1976 tcg_temp_free(r_const);
1977 target += dc->pc;
1978 gen_mov_pc_npc(dc, cpu_cond);
1979 dc->npc = target;
1981 goto jmp_insn;
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop = GET_FIELD(insn, 7, 12);
1985 if (xop == 0x3a) { /* generate trap */
1986 int cond;
1988 cpu_src1 = get_src1(insn, cpu_src1);
1989 if (IS_IMM) {
1990 rs2 = GET_FIELD(insn, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1992 } else {
1993 rs2 = GET_FIELD(insn, 27, 31);
1994 if (rs2 != 0) {
1995 gen_movl_reg_TN(rs2, cpu_src2);
1996 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1997 } else
1998 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2001 cond = GET_FIELD(insn, 3, 6);
2002 if (cond == 0x8) { /* Trap Always */
2003 save_state(dc, cpu_cond);
2004 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2005 supervisor(dc))
2006 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2007 else
2008 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2009 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2010 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2012 if (rs2 == 0 &&
2013 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2015 gen_helper_shutdown();
2017 } else {
2018 gen_helper_raise_exception(cpu_tmp32);
2020 } else if (cond != 0) {
2021 TCGv r_cond = tcg_temp_new();
2022 int l1;
2023 #ifdef TARGET_SPARC64
2024 /* V9 icc/xcc */
2025 int cc = GET_FIELD_SP(insn, 11, 12);
2027 save_state(dc, cpu_cond);
2028 if (cc == 0)
2029 gen_cond(r_cond, 0, cond, dc);
2030 else if (cc == 2)
2031 gen_cond(r_cond, 1, cond, dc);
2032 else
2033 goto illegal_insn;
2034 #else
2035 save_state(dc, cpu_cond);
2036 gen_cond(r_cond, 0, cond, dc);
2037 #endif
2038 l1 = gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2041 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2042 supervisor(dc))
2043 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2044 else
2045 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2046 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2047 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2048 gen_helper_raise_exception(cpu_tmp32);
2050 gen_set_label(l1);
2051 tcg_temp_free(r_cond);
2053 gen_op_next_insn();
2054 tcg_gen_exit_tb(0);
2055 dc->is_br = 1;
2056 goto jmp_insn;
2057 } else if (xop == 0x28) {
2058 rs1 = GET_FIELD(insn, 13, 17);
2059 switch(rs1) {
2060 case 0: /* rdy */
2061 #ifndef TARGET_SPARC64
2062 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063 manual, rdy on the microSPARC
2064 II */
2065 case 0x0f: /* stbar in the SPARCv8 manual,
2066 rdy on the microSPARC II */
2067 case 0x10 ... 0x1f: /* implementation-dependent in the
2068 SPARCv8 manual, rdy on the
2069 microSPARC II */
2070 #endif
2071 gen_movl_TN_reg(rd, cpu_y);
2072 break;
2073 #ifdef TARGET_SPARC64
2074 case 0x2: /* V9 rdccr */
2075 gen_helper_compute_psr();
2076 gen_helper_rdccr(cpu_dst);
2077 gen_movl_TN_reg(rd, cpu_dst);
2078 break;
2079 case 0x3: /* V9 rdasi */
2080 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2081 gen_movl_TN_reg(rd, cpu_dst);
2082 break;
2083 case 0x4: /* V9 rdtick */
2085 TCGv_ptr r_tickptr;
2087 r_tickptr = tcg_temp_new_ptr();
2088 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2089 offsetof(CPUState, tick));
2090 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2091 tcg_temp_free_ptr(r_tickptr);
2092 gen_movl_TN_reg(rd, cpu_dst);
2094 break;
2095 case 0x5: /* V9 rdpc */
2097 TCGv r_const;
2099 r_const = tcg_const_tl(dc->pc);
2100 gen_movl_TN_reg(rd, r_const);
2101 tcg_temp_free(r_const);
2103 break;
2104 case 0x6: /* V9 rdfprs */
2105 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2106 gen_movl_TN_reg(rd, cpu_dst);
2107 break;
2108 case 0xf: /* V9 membar */
2109 break; /* no effect */
2110 case 0x13: /* Graphics Status */
2111 if (gen_trap_ifnofpu(dc, cpu_cond))
2112 goto jmp_insn;
2113 gen_movl_TN_reg(rd, cpu_gsr);
2114 break;
2115 case 0x16: /* Softint */
2116 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2117 gen_movl_TN_reg(rd, cpu_dst);
2118 break;
2119 case 0x17: /* Tick compare */
2120 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2121 break;
2122 case 0x18: /* System tick */
2124 TCGv_ptr r_tickptr;
2126 r_tickptr = tcg_temp_new_ptr();
2127 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2128 offsetof(CPUState, stick));
2129 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2130 tcg_temp_free_ptr(r_tickptr);
2131 gen_movl_TN_reg(rd, cpu_dst);
2133 break;
2134 case 0x19: /* System tick compare */
2135 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2136 break;
2137 case 0x10: /* Performance Control */
2138 case 0x11: /* Performance Instrumentation Counter */
2139 case 0x12: /* Dispatch Control */
2140 case 0x14: /* Softint set, WO */
2141 case 0x15: /* Softint clear, WO */
2142 #endif
2143 default:
2144 goto illegal_insn;
2146 #if !defined(CONFIG_USER_ONLY)
2147 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2148 #ifndef TARGET_SPARC64
2149 if (!supervisor(dc))
2150 goto priv_insn;
2151 gen_helper_compute_psr();
2152 dc->cc_op = CC_OP_FLAGS;
2153 gen_helper_rdpsr(cpu_dst);
2154 #else
2155 CHECK_IU_FEATURE(dc, HYPV);
2156 if (!hypervisor(dc))
2157 goto priv_insn;
2158 rs1 = GET_FIELD(insn, 13, 17);
2159 switch (rs1) {
2160 case 0: // hpstate
2161 // gen_op_rdhpstate();
2162 break;
2163 case 1: // htstate
2164 // gen_op_rdhtstate();
2165 break;
2166 case 3: // hintp
2167 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2168 break;
2169 case 5: // htba
2170 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2171 break;
2172 case 6: // hver
2173 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2174 break;
2175 case 31: // hstick_cmpr
2176 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2177 break;
2178 default:
2179 goto illegal_insn;
2181 #endif
2182 gen_movl_TN_reg(rd, cpu_dst);
2183 break;
2184 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2185 if (!supervisor(dc))
2186 goto priv_insn;
2187 #ifdef TARGET_SPARC64
2188 rs1 = GET_FIELD(insn, 13, 17);
2189 switch (rs1) {
2190 case 0: // tpc
2192 TCGv_ptr r_tsptr;
2194 r_tsptr = tcg_temp_new_ptr();
2195 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2196 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2197 offsetof(trap_state, tpc));
2198 tcg_temp_free_ptr(r_tsptr);
2200 break;
2201 case 1: // tnpc
2203 TCGv_ptr r_tsptr;
2205 r_tsptr = tcg_temp_new_ptr();
2206 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2207 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2208 offsetof(trap_state, tnpc));
2209 tcg_temp_free_ptr(r_tsptr);
2211 break;
2212 case 2: // tstate
2214 TCGv_ptr r_tsptr;
2216 r_tsptr = tcg_temp_new_ptr();
2217 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2218 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2219 offsetof(trap_state, tstate));
2220 tcg_temp_free_ptr(r_tsptr);
2222 break;
2223 case 3: // tt
2225 TCGv_ptr r_tsptr;
2227 r_tsptr = tcg_temp_new_ptr();
2228 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2229 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2230 offsetof(trap_state, tt));
2231 tcg_temp_free_ptr(r_tsptr);
2232 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2234 break;
2235 case 4: // tick
2237 TCGv_ptr r_tickptr;
2239 r_tickptr = tcg_temp_new_ptr();
2240 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2241 offsetof(CPUState, tick));
2242 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2243 gen_movl_TN_reg(rd, cpu_tmp0);
2244 tcg_temp_free_ptr(r_tickptr);
2246 break;
2247 case 5: // tba
2248 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2249 break;
2250 case 6: // pstate
2251 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2252 offsetof(CPUSPARCState, pstate));
2253 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2254 break;
2255 case 7: // tl
2256 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2257 offsetof(CPUSPARCState, tl));
2258 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2259 break;
2260 case 8: // pil
2261 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2262 offsetof(CPUSPARCState, psrpil));
2263 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2264 break;
2265 case 9: // cwp
2266 gen_helper_rdcwp(cpu_tmp0);
2267 break;
2268 case 10: // cansave
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, cansave));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2272 break;
2273 case 11: // canrestore
2274 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2275 offsetof(CPUSPARCState, canrestore));
2276 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2277 break;
2278 case 12: // cleanwin
2279 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2280 offsetof(CPUSPARCState, cleanwin));
2281 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2282 break;
2283 case 13: // otherwin
2284 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285 offsetof(CPUSPARCState, otherwin));
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287 break;
2288 case 14: // wstate
2289 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2290 offsetof(CPUSPARCState, wstate));
2291 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 break;
2293 case 16: // UA2005 gl
2294 CHECK_IU_FEATURE(dc, GL);
2295 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296 offsetof(CPUSPARCState, gl));
2297 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298 break;
2299 case 26: // UA2005 strand status
2300 CHECK_IU_FEATURE(dc, HYPV);
2301 if (!hypervisor(dc))
2302 goto priv_insn;
2303 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2304 break;
2305 case 31: // ver
2306 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2307 break;
2308 case 15: // fq
2309 default:
2310 goto illegal_insn;
2312 #else
2313 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2314 #endif
2315 gen_movl_TN_reg(rd, cpu_tmp0);
2316 break;
2317 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2318 #ifdef TARGET_SPARC64
2319 save_state(dc, cpu_cond);
2320 gen_helper_flushw();
2321 #else
2322 if (!supervisor(dc))
2323 goto priv_insn;
2324 gen_movl_TN_reg(rd, cpu_tbr);
2325 #endif
2326 break;
2327 #endif
2328 } else if (xop == 0x34) { /* FPU Operations */
2329 if (gen_trap_ifnofpu(dc, cpu_cond))
2330 goto jmp_insn;
2331 gen_op_clear_ieee_excp_and_FTT();
2332 rs1 = GET_FIELD(insn, 13, 17);
2333 rs2 = GET_FIELD(insn, 27, 31);
2334 xop = GET_FIELD(insn, 18, 26);
2335 save_state(dc, cpu_cond);
2336 switch (xop) {
2337 case 0x1: /* fmovs */
2338 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2339 break;
2340 case 0x5: /* fnegs */
2341 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2342 break;
2343 case 0x9: /* fabss */
2344 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2345 break;
2346 case 0x29: /* fsqrts */
2347 CHECK_FPU_FEATURE(dc, FSQRT);
2348 gen_clear_float_exceptions();
2349 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2350 gen_helper_check_ieee_exceptions();
2351 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2352 break;
2353 case 0x2a: /* fsqrtd */
2354 CHECK_FPU_FEATURE(dc, FSQRT);
2355 gen_op_load_fpr_DT1(DFPREG(rs2));
2356 gen_clear_float_exceptions();
2357 gen_helper_fsqrtd();
2358 gen_helper_check_ieee_exceptions();
2359 gen_op_store_DT0_fpr(DFPREG(rd));
2360 break;
2361 case 0x2b: /* fsqrtq */
2362 CHECK_FPU_FEATURE(dc, FLOAT128);
2363 gen_op_load_fpr_QT1(QFPREG(rs2));
2364 gen_clear_float_exceptions();
2365 gen_helper_fsqrtq();
2366 gen_helper_check_ieee_exceptions();
2367 gen_op_store_QT0_fpr(QFPREG(rd));
2368 break;
2369 case 0x41: /* fadds */
2370 gen_clear_float_exceptions();
2371 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2372 gen_helper_check_ieee_exceptions();
2373 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2374 break;
2375 case 0x42: /* faddd */
2376 gen_op_load_fpr_DT0(DFPREG(rs1));
2377 gen_op_load_fpr_DT1(DFPREG(rs2));
2378 gen_clear_float_exceptions();
2379 gen_helper_faddd();
2380 gen_helper_check_ieee_exceptions();
2381 gen_op_store_DT0_fpr(DFPREG(rd));
2382 break;
2383 case 0x43: /* faddq */
2384 CHECK_FPU_FEATURE(dc, FLOAT128);
2385 gen_op_load_fpr_QT0(QFPREG(rs1));
2386 gen_op_load_fpr_QT1(QFPREG(rs2));
2387 gen_clear_float_exceptions();
2388 gen_helper_faddq();
2389 gen_helper_check_ieee_exceptions();
2390 gen_op_store_QT0_fpr(QFPREG(rd));
2391 break;
2392 case 0x45: /* fsubs */
2393 gen_clear_float_exceptions();
2394 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2395 gen_helper_check_ieee_exceptions();
2396 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2397 break;
2398 case 0x46: /* fsubd */
2399 gen_op_load_fpr_DT0(DFPREG(rs1));
2400 gen_op_load_fpr_DT1(DFPREG(rs2));
2401 gen_clear_float_exceptions();
2402 gen_helper_fsubd();
2403 gen_helper_check_ieee_exceptions();
2404 gen_op_store_DT0_fpr(DFPREG(rd));
2405 break;
2406 case 0x47: /* fsubq */
2407 CHECK_FPU_FEATURE(dc, FLOAT128);
2408 gen_op_load_fpr_QT0(QFPREG(rs1));
2409 gen_op_load_fpr_QT1(QFPREG(rs2));
2410 gen_clear_float_exceptions();
2411 gen_helper_fsubq();
2412 gen_helper_check_ieee_exceptions();
2413 gen_op_store_QT0_fpr(QFPREG(rd));
2414 break;
2415 case 0x49: /* fmuls */
2416 CHECK_FPU_FEATURE(dc, FMUL);
2417 gen_clear_float_exceptions();
2418 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2419 gen_helper_check_ieee_exceptions();
2420 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2421 break;
2422 case 0x4a: /* fmuld */
2423 CHECK_FPU_FEATURE(dc, FMUL);
2424 gen_op_load_fpr_DT0(DFPREG(rs1));
2425 gen_op_load_fpr_DT1(DFPREG(rs2));
2426 gen_clear_float_exceptions();
2427 gen_helper_fmuld();
2428 gen_helper_check_ieee_exceptions();
2429 gen_op_store_DT0_fpr(DFPREG(rd));
2430 break;
2431 case 0x4b: /* fmulq */
2432 CHECK_FPU_FEATURE(dc, FLOAT128);
2433 CHECK_FPU_FEATURE(dc, FMUL);
2434 gen_op_load_fpr_QT0(QFPREG(rs1));
2435 gen_op_load_fpr_QT1(QFPREG(rs2));
2436 gen_clear_float_exceptions();
2437 gen_helper_fmulq();
2438 gen_helper_check_ieee_exceptions();
2439 gen_op_store_QT0_fpr(QFPREG(rd));
2440 break;
2441 case 0x4d: /* fdivs */
2442 gen_clear_float_exceptions();
2443 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2444 gen_helper_check_ieee_exceptions();
2445 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2446 break;
2447 case 0x4e: /* fdivd */
2448 gen_op_load_fpr_DT0(DFPREG(rs1));
2449 gen_op_load_fpr_DT1(DFPREG(rs2));
2450 gen_clear_float_exceptions();
2451 gen_helper_fdivd();
2452 gen_helper_check_ieee_exceptions();
2453 gen_op_store_DT0_fpr(DFPREG(rd));
2454 break;
2455 case 0x4f: /* fdivq */
2456 CHECK_FPU_FEATURE(dc, FLOAT128);
2457 gen_op_load_fpr_QT0(QFPREG(rs1));
2458 gen_op_load_fpr_QT1(QFPREG(rs2));
2459 gen_clear_float_exceptions();
2460 gen_helper_fdivq();
2461 gen_helper_check_ieee_exceptions();
2462 gen_op_store_QT0_fpr(QFPREG(rd));
2463 break;
2464 case 0x69: /* fsmuld */
2465 CHECK_FPU_FEATURE(dc, FSMULD);
2466 gen_clear_float_exceptions();
2467 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2468 gen_helper_check_ieee_exceptions();
2469 gen_op_store_DT0_fpr(DFPREG(rd));
2470 break;
2471 case 0x6e: /* fdmulq */
2472 CHECK_FPU_FEATURE(dc, FLOAT128);
2473 gen_op_load_fpr_DT0(DFPREG(rs1));
2474 gen_op_load_fpr_DT1(DFPREG(rs2));
2475 gen_clear_float_exceptions();
2476 gen_helper_fdmulq();
2477 gen_helper_check_ieee_exceptions();
2478 gen_op_store_QT0_fpr(QFPREG(rd));
2479 break;
2480 case 0xc4: /* fitos */
2481 gen_clear_float_exceptions();
2482 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2483 gen_helper_check_ieee_exceptions();
2484 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2485 break;
2486 case 0xc6: /* fdtos */
2487 gen_op_load_fpr_DT1(DFPREG(rs2));
2488 gen_clear_float_exceptions();
2489 gen_helper_fdtos(cpu_tmp32);
2490 gen_helper_check_ieee_exceptions();
2491 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2492 break;
2493 case 0xc7: /* fqtos */
2494 CHECK_FPU_FEATURE(dc, FLOAT128);
2495 gen_op_load_fpr_QT1(QFPREG(rs2));
2496 gen_clear_float_exceptions();
2497 gen_helper_fqtos(cpu_tmp32);
2498 gen_helper_check_ieee_exceptions();
2499 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2500 break;
2501 case 0xc8: /* fitod */
2502 gen_helper_fitod(cpu_fpr[rs2]);
2503 gen_op_store_DT0_fpr(DFPREG(rd));
2504 break;
2505 case 0xc9: /* fstod */
2506 gen_helper_fstod(cpu_fpr[rs2]);
2507 gen_op_store_DT0_fpr(DFPREG(rd));
2508 break;
2509 case 0xcb: /* fqtod */
2510 CHECK_FPU_FEATURE(dc, FLOAT128);
2511 gen_op_load_fpr_QT1(QFPREG(rs2));
2512 gen_clear_float_exceptions();
2513 gen_helper_fqtod();
2514 gen_helper_check_ieee_exceptions();
2515 gen_op_store_DT0_fpr(DFPREG(rd));
2516 break;
2517 case 0xcc: /* fitoq */
2518 CHECK_FPU_FEATURE(dc, FLOAT128);
2519 gen_helper_fitoq(cpu_fpr[rs2]);
2520 gen_op_store_QT0_fpr(QFPREG(rd));
2521 break;
2522 case 0xcd: /* fstoq */
2523 CHECK_FPU_FEATURE(dc, FLOAT128);
2524 gen_helper_fstoq(cpu_fpr[rs2]);
2525 gen_op_store_QT0_fpr(QFPREG(rd));
2526 break;
2527 case 0xce: /* fdtoq */
2528 CHECK_FPU_FEATURE(dc, FLOAT128);
2529 gen_op_load_fpr_DT1(DFPREG(rs2));
2530 gen_helper_fdtoq();
2531 gen_op_store_QT0_fpr(QFPREG(rd));
2532 break;
2533 case 0xd1: /* fstoi */
2534 gen_clear_float_exceptions();
2535 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2536 gen_helper_check_ieee_exceptions();
2537 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2538 break;
2539 case 0xd2: /* fdtoi */
2540 gen_op_load_fpr_DT1(DFPREG(rs2));
2541 gen_clear_float_exceptions();
2542 gen_helper_fdtoi(cpu_tmp32);
2543 gen_helper_check_ieee_exceptions();
2544 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2545 break;
2546 case 0xd3: /* fqtoi */
2547 CHECK_FPU_FEATURE(dc, FLOAT128);
2548 gen_op_load_fpr_QT1(QFPREG(rs2));
2549 gen_clear_float_exceptions();
2550 gen_helper_fqtoi(cpu_tmp32);
2551 gen_helper_check_ieee_exceptions();
2552 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2553 break;
2554 #ifdef TARGET_SPARC64
2555 case 0x2: /* V9 fmovd */
2556 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2557 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2558 cpu_fpr[DFPREG(rs2) + 1]);
2559 break;
2560 case 0x3: /* V9 fmovq */
2561 CHECK_FPU_FEATURE(dc, FLOAT128);
2562 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2564 cpu_fpr[QFPREG(rs2) + 1]);
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2566 cpu_fpr[QFPREG(rs2) + 2]);
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2568 cpu_fpr[QFPREG(rs2) + 3]);
2569 break;
2570 case 0x6: /* V9 fnegd */
2571 gen_op_load_fpr_DT1(DFPREG(rs2));
2572 gen_helper_fnegd();
2573 gen_op_store_DT0_fpr(DFPREG(rd));
2574 break;
2575 case 0x7: /* V9 fnegq */
2576 CHECK_FPU_FEATURE(dc, FLOAT128);
2577 gen_op_load_fpr_QT1(QFPREG(rs2));
2578 gen_helper_fnegq();
2579 gen_op_store_QT0_fpr(QFPREG(rd));
2580 break;
2581 case 0xa: /* V9 fabsd */
2582 gen_op_load_fpr_DT1(DFPREG(rs2));
2583 gen_helper_fabsd();
2584 gen_op_store_DT0_fpr(DFPREG(rd));
2585 break;
2586 case 0xb: /* V9 fabsq */
2587 CHECK_FPU_FEATURE(dc, FLOAT128);
2588 gen_op_load_fpr_QT1(QFPREG(rs2));
2589 gen_helper_fabsq();
2590 gen_op_store_QT0_fpr(QFPREG(rd));
2591 break;
2592 case 0x81: /* V9 fstox */
2593 gen_clear_float_exceptions();
2594 gen_helper_fstox(cpu_fpr[rs2]);
2595 gen_helper_check_ieee_exceptions();
2596 gen_op_store_DT0_fpr(DFPREG(rd));
2597 break;
2598 case 0x82: /* V9 fdtox */
2599 gen_op_load_fpr_DT1(DFPREG(rs2));
2600 gen_clear_float_exceptions();
2601 gen_helper_fdtox();
2602 gen_helper_check_ieee_exceptions();
2603 gen_op_store_DT0_fpr(DFPREG(rd));
2604 break;
2605 case 0x83: /* V9 fqtox */
2606 CHECK_FPU_FEATURE(dc, FLOAT128);
2607 gen_op_load_fpr_QT1(QFPREG(rs2));
2608 gen_clear_float_exceptions();
2609 gen_helper_fqtox();
2610 gen_helper_check_ieee_exceptions();
2611 gen_op_store_DT0_fpr(DFPREG(rd));
2612 break;
2613 case 0x84: /* V9 fxtos */
2614 gen_op_load_fpr_DT1(DFPREG(rs2));
2615 gen_clear_float_exceptions();
2616 gen_helper_fxtos(cpu_tmp32);
2617 gen_helper_check_ieee_exceptions();
2618 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2619 break;
2620 case 0x88: /* V9 fxtod */
2621 gen_op_load_fpr_DT1(DFPREG(rs2));
2622 gen_clear_float_exceptions();
2623 gen_helper_fxtod();
2624 gen_helper_check_ieee_exceptions();
2625 gen_op_store_DT0_fpr(DFPREG(rd));
2626 break;
2627 case 0x8c: /* V9 fxtoq */
2628 CHECK_FPU_FEATURE(dc, FLOAT128);
2629 gen_op_load_fpr_DT1(DFPREG(rs2));
2630 gen_clear_float_exceptions();
2631 gen_helper_fxtoq();
2632 gen_helper_check_ieee_exceptions();
2633 gen_op_store_QT0_fpr(QFPREG(rd));
2634 break;
2635 #endif
2636 default:
2637 goto illegal_insn;
2639 } else if (xop == 0x35) { /* FPU Operations */
2640 #ifdef TARGET_SPARC64
2641 int cond;
2642 #endif
2643 if (gen_trap_ifnofpu(dc, cpu_cond))
2644 goto jmp_insn;
2645 gen_op_clear_ieee_excp_and_FTT();
2646 rs1 = GET_FIELD(insn, 13, 17);
2647 rs2 = GET_FIELD(insn, 27, 31);
2648 xop = GET_FIELD(insn, 18, 26);
2649 save_state(dc, cpu_cond);
2650 #ifdef TARGET_SPARC64
2651 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2652 int l1;
2654 l1 = gen_new_label();
2655 cond = GET_FIELD_SP(insn, 14, 17);
2656 cpu_src1 = get_src1(insn, cpu_src1);
2657 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2658 0, l1);
2659 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2660 gen_set_label(l1);
2661 break;
2662 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2663 int l1;
2665 l1 = gen_new_label();
2666 cond = GET_FIELD_SP(insn, 14, 17);
2667 cpu_src1 = get_src1(insn, cpu_src1);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2669 0, l1);
2670 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2671 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2672 gen_set_label(l1);
2673 break;
2674 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2675 int l1;
2677 CHECK_FPU_FEATURE(dc, FLOAT128);
2678 l1 = gen_new_label();
2679 cond = GET_FIELD_SP(insn, 14, 17);
2680 cpu_src1 = get_src1(insn, cpu_src1);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2682 0, l1);
2683 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2684 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2685 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2686 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2687 gen_set_label(l1);
2688 break;
2690 #endif
2691 switch (xop) {
2692 #ifdef TARGET_SPARC64
2693 #define FMOVSCC(fcc) \
2695 TCGv r_cond; \
2696 int l1; \
2698 l1 = gen_new_label(); \
2699 r_cond = tcg_temp_new(); \
2700 cond = GET_FIELD_SP(insn, 14, 17); \
2701 gen_fcond(r_cond, fcc, cond); \
2702 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2703 0, l1); \
2704 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2705 gen_set_label(l1); \
2706 tcg_temp_free(r_cond); \
2708 #define FMOVDCC(fcc) \
2710 TCGv r_cond; \
2711 int l1; \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2718 0, l1); \
2719 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2720 cpu_fpr[DFPREG(rs2)]); \
2721 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2722 cpu_fpr[DFPREG(rs2) + 1]); \
2723 gen_set_label(l1); \
2724 tcg_temp_free(r_cond); \
2726 #define FMOVQCC(fcc) \
2728 TCGv r_cond; \
2729 int l1; \
2731 l1 = gen_new_label(); \
2732 r_cond = tcg_temp_new(); \
2733 cond = GET_FIELD_SP(insn, 14, 17); \
2734 gen_fcond(r_cond, fcc, cond); \
2735 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2736 0, l1); \
2737 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2738 cpu_fpr[QFPREG(rs2)]); \
2739 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2740 cpu_fpr[QFPREG(rs2) + 1]); \
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2742 cpu_fpr[QFPREG(rs2) + 2]); \
2743 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2744 cpu_fpr[QFPREG(rs2) + 3]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2748 case 0x001: /* V9 fmovscc %fcc0 */
2749 FMOVSCC(0);
2750 break;
2751 case 0x002: /* V9 fmovdcc %fcc0 */
2752 FMOVDCC(0);
2753 break;
2754 case 0x003: /* V9 fmovqcc %fcc0 */
2755 CHECK_FPU_FEATURE(dc, FLOAT128);
2756 FMOVQCC(0);
2757 break;
2758 case 0x041: /* V9 fmovscc %fcc1 */
2759 FMOVSCC(1);
2760 break;
2761 case 0x042: /* V9 fmovdcc %fcc1 */
2762 FMOVDCC(1);
2763 break;
2764 case 0x043: /* V9 fmovqcc %fcc1 */
2765 CHECK_FPU_FEATURE(dc, FLOAT128);
2766 FMOVQCC(1);
2767 break;
2768 case 0x081: /* V9 fmovscc %fcc2 */
2769 FMOVSCC(2);
2770 break;
2771 case 0x082: /* V9 fmovdcc %fcc2 */
2772 FMOVDCC(2);
2773 break;
2774 case 0x083: /* V9 fmovqcc %fcc2 */
2775 CHECK_FPU_FEATURE(dc, FLOAT128);
2776 FMOVQCC(2);
2777 break;
2778 case 0x0c1: /* V9 fmovscc %fcc3 */
2779 FMOVSCC(3);
2780 break;
2781 case 0x0c2: /* V9 fmovdcc %fcc3 */
2782 FMOVDCC(3);
2783 break;
2784 case 0x0c3: /* V9 fmovqcc %fcc3 */
2785 CHECK_FPU_FEATURE(dc, FLOAT128);
2786 FMOVQCC(3);
2787 break;
2788 #undef FMOVSCC
2789 #undef FMOVDCC
2790 #undef FMOVQCC
2791 #define FMOVSCC(icc) \
2793 TCGv r_cond; \
2794 int l1; \
2796 l1 = gen_new_label(); \
2797 r_cond = tcg_temp_new(); \
2798 cond = GET_FIELD_SP(insn, 14, 17); \
2799 gen_cond(r_cond, icc, cond, dc); \
2800 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2801 0, l1); \
2802 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2803 gen_set_label(l1); \
2804 tcg_temp_free(r_cond); \
2806 #define FMOVDCC(icc) \
2808 TCGv r_cond; \
2809 int l1; \
2811 l1 = gen_new_label(); \
2812 r_cond = tcg_temp_new(); \
2813 cond = GET_FIELD_SP(insn, 14, 17); \
2814 gen_cond(r_cond, icc, cond, dc); \
2815 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2816 0, l1); \
2817 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2818 cpu_fpr[DFPREG(rs2)]); \
2819 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2820 cpu_fpr[DFPREG(rs2) + 1]); \
2821 gen_set_label(l1); \
2822 tcg_temp_free(r_cond); \
2824 #define FMOVQCC(icc) \
2826 TCGv r_cond; \
2827 int l1; \
2829 l1 = gen_new_label(); \
2830 r_cond = tcg_temp_new(); \
2831 cond = GET_FIELD_SP(insn, 14, 17); \
2832 gen_cond(r_cond, icc, cond, dc); \
2833 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2834 0, l1); \
2835 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2836 cpu_fpr[QFPREG(rs2)]); \
2837 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2838 cpu_fpr[QFPREG(rs2) + 1]); \
2839 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2840 cpu_fpr[QFPREG(rs2) + 2]); \
2841 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2842 cpu_fpr[QFPREG(rs2) + 3]); \
2843 gen_set_label(l1); \
2844 tcg_temp_free(r_cond); \
2847 case 0x101: /* V9 fmovscc %icc */
2848 FMOVSCC(0);
2849 break;
2850 case 0x102: /* V9 fmovdcc %icc */
2851 FMOVDCC(0);
2852 case 0x103: /* V9 fmovqcc %icc */
2853 CHECK_FPU_FEATURE(dc, FLOAT128);
2854 FMOVQCC(0);
2855 break;
2856 case 0x181: /* V9 fmovscc %xcc */
2857 FMOVSCC(1);
2858 break;
2859 case 0x182: /* V9 fmovdcc %xcc */
2860 FMOVDCC(1);
2861 break;
2862 case 0x183: /* V9 fmovqcc %xcc */
2863 CHECK_FPU_FEATURE(dc, FLOAT128);
2864 FMOVQCC(1);
2865 break;
2866 #undef FMOVSCC
2867 #undef FMOVDCC
2868 #undef FMOVQCC
2869 #endif
2870 case 0x51: /* fcmps, V9 %fcc */
2871 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2872 break;
2873 case 0x52: /* fcmpd, V9 %fcc */
2874 gen_op_load_fpr_DT0(DFPREG(rs1));
2875 gen_op_load_fpr_DT1(DFPREG(rs2));
2876 gen_op_fcmpd(rd & 3);
2877 break;
2878 case 0x53: /* fcmpq, V9 %fcc */
2879 CHECK_FPU_FEATURE(dc, FLOAT128);
2880 gen_op_load_fpr_QT0(QFPREG(rs1));
2881 gen_op_load_fpr_QT1(QFPREG(rs2));
2882 gen_op_fcmpq(rd & 3);
2883 break;
2884 case 0x55: /* fcmpes, V9 %fcc */
2885 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2886 break;
2887 case 0x56: /* fcmped, V9 %fcc */
2888 gen_op_load_fpr_DT0(DFPREG(rs1));
2889 gen_op_load_fpr_DT1(DFPREG(rs2));
2890 gen_op_fcmped(rd & 3);
2891 break;
2892 case 0x57: /* fcmpeq, V9 %fcc */
2893 CHECK_FPU_FEATURE(dc, FLOAT128);
2894 gen_op_load_fpr_QT0(QFPREG(rs1));
2895 gen_op_load_fpr_QT1(QFPREG(rs2));
2896 gen_op_fcmpeq(rd & 3);
2897 break;
2898 default:
2899 goto illegal_insn;
2901 } else if (xop == 0x2) {
2902 // clr/mov shortcut
2904 rs1 = GET_FIELD(insn, 13, 17);
2905 if (rs1 == 0) {
2906 // or %g0, x, y -> mov T0, x; mov y, T0
2907 if (IS_IMM) { /* immediate */
2908 TCGv r_const;
2910 simm = GET_FIELDs(insn, 19, 31);
2911 r_const = tcg_const_tl(simm);
2912 gen_movl_TN_reg(rd, r_const);
2913 tcg_temp_free(r_const);
2914 } else { /* register */
2915 rs2 = GET_FIELD(insn, 27, 31);
2916 gen_movl_reg_TN(rs2, cpu_dst);
2917 gen_movl_TN_reg(rd, cpu_dst);
2919 } else {
2920 cpu_src1 = get_src1(insn, cpu_src1);
2921 if (IS_IMM) { /* immediate */
2922 simm = GET_FIELDs(insn, 19, 31);
2923 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2924 gen_movl_TN_reg(rd, cpu_dst);
2925 } else { /* register */
2926 // or x, %g0, y -> mov T1, x; mov y, T1
2927 rs2 = GET_FIELD(insn, 27, 31);
2928 if (rs2 != 0) {
2929 gen_movl_reg_TN(rs2, cpu_src2);
2930 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2931 gen_movl_TN_reg(rd, cpu_dst);
2932 } else
2933 gen_movl_TN_reg(rd, cpu_src1);
2936 #ifdef TARGET_SPARC64
2937 } else if (xop == 0x25) { /* sll, V9 sllx */
2938 cpu_src1 = get_src1(insn, cpu_src1);
2939 if (IS_IMM) { /* immediate */
2940 simm = GET_FIELDs(insn, 20, 31);
2941 if (insn & (1 << 12)) {
2942 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2943 } else {
2944 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2946 } else { /* register */
2947 rs2 = GET_FIELD(insn, 27, 31);
2948 gen_movl_reg_TN(rs2, cpu_src2);
2949 if (insn & (1 << 12)) {
2950 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2951 } else {
2952 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2954 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2956 gen_movl_TN_reg(rd, cpu_dst);
2957 } else if (xop == 0x26) { /* srl, V9 srlx */
2958 cpu_src1 = get_src1(insn, cpu_src1);
2959 if (IS_IMM) { /* immediate */
2960 simm = GET_FIELDs(insn, 20, 31);
2961 if (insn & (1 << 12)) {
2962 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2963 } else {
2964 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2965 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2967 } else { /* register */
2968 rs2 = GET_FIELD(insn, 27, 31);
2969 gen_movl_reg_TN(rs2, cpu_src2);
2970 if (insn & (1 << 12)) {
2971 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2972 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2973 } else {
2974 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2975 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2976 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2979 gen_movl_TN_reg(rd, cpu_dst);
2980 } else if (xop == 0x27) { /* sra, V9 srax */
2981 cpu_src1 = get_src1(insn, cpu_src1);
2982 if (IS_IMM) { /* immediate */
2983 simm = GET_FIELDs(insn, 20, 31);
2984 if (insn & (1 << 12)) {
2985 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2986 } else {
2987 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2988 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2989 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2991 } else { /* register */
2992 rs2 = GET_FIELD(insn, 27, 31);
2993 gen_movl_reg_TN(rs2, cpu_src2);
2994 if (insn & (1 << 12)) {
2995 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2996 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2997 } else {
2998 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2999 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3000 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3001 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3004 gen_movl_TN_reg(rd, cpu_dst);
3005 #endif
3006 } else if (xop < 0x36) {
3007 if (xop < 0x20) {
3008 cpu_src1 = get_src1(insn, cpu_src1);
3009 cpu_src2 = get_src2(insn, cpu_src2);
3010 switch (xop & ~0x10) {
3011 case 0x0: /* add */
3012 if (IS_IMM) {
3013 simm = GET_FIELDs(insn, 19, 31);
3014 if (xop & 0x10) {
3015 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3016 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3017 dc->cc_op = CC_OP_ADD;
3018 } else {
3019 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3021 } else {
3022 if (xop & 0x10) {
3023 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3024 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3025 dc->cc_op = CC_OP_ADD;
3026 } else {
3027 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3030 break;
3031 case 0x1: /* and */
3032 if (IS_IMM) {
3033 simm = GET_FIELDs(insn, 19, 31);
3034 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3035 } else {
3036 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3038 if (xop & 0x10) {
3039 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3040 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3041 dc->cc_op = CC_OP_LOGIC;
3043 break;
3044 case 0x2: /* or */
3045 if (IS_IMM) {
3046 simm = GET_FIELDs(insn, 19, 31);
3047 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3048 } else {
3049 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3051 if (xop & 0x10) {
3052 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3053 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3054 dc->cc_op = CC_OP_LOGIC;
3056 break;
3057 case 0x3: /* xor */
3058 if (IS_IMM) {
3059 simm = GET_FIELDs(insn, 19, 31);
3060 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3061 } else {
3062 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3064 if (xop & 0x10) {
3065 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3066 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3067 dc->cc_op = CC_OP_LOGIC;
3069 break;
3070 case 0x4: /* sub */
3071 if (IS_IMM) {
3072 simm = GET_FIELDs(insn, 19, 31);
3073 if (xop & 0x10) {
3074 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3075 } else {
3076 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3078 } else {
3079 if (xop & 0x10) {
3080 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3081 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3082 dc->cc_op = CC_OP_SUB;
3083 } else {
3084 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3087 break;
3088 case 0x5: /* andn */
3089 if (IS_IMM) {
3090 simm = GET_FIELDs(insn, 19, 31);
3091 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3092 } else {
3093 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3095 if (xop & 0x10) {
3096 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3097 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3098 dc->cc_op = CC_OP_LOGIC;
3100 break;
3101 case 0x6: /* orn */
3102 if (IS_IMM) {
3103 simm = GET_FIELDs(insn, 19, 31);
3104 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3105 } else {
3106 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3108 if (xop & 0x10) {
3109 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3110 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3111 dc->cc_op = CC_OP_LOGIC;
3113 break;
3114 case 0x7: /* xorn */
3115 if (IS_IMM) {
3116 simm = GET_FIELDs(insn, 19, 31);
3117 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3118 } else {
3119 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3120 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3122 if (xop & 0x10) {
3123 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3124 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3125 dc->cc_op = CC_OP_LOGIC;
3127 break;
3128 case 0x8: /* addx, V9 addc */
3129 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3130 (xop & 0x10));
3131 break;
3132 #ifdef TARGET_SPARC64
3133 case 0x9: /* V9 mulx */
3134 if (IS_IMM) {
3135 simm = GET_FIELDs(insn, 19, 31);
3136 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3137 } else {
3138 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3140 break;
3141 #endif
3142 case 0xa: /* umul */
3143 CHECK_IU_FEATURE(dc, MUL);
3144 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3145 if (xop & 0x10) {
3146 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3147 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3148 dc->cc_op = CC_OP_LOGIC;
3150 break;
3151 case 0xb: /* smul */
3152 CHECK_IU_FEATURE(dc, MUL);
3153 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3154 if (xop & 0x10) {
3155 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3156 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3157 dc->cc_op = CC_OP_LOGIC;
3159 break;
3160 case 0xc: /* subx, V9 subc */
3161 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3162 (xop & 0x10));
3163 break;
3164 #ifdef TARGET_SPARC64
3165 case 0xd: /* V9 udivx */
3166 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3167 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3168 gen_trap_ifdivzero_tl(cpu_cc_src2);
3169 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3170 break;
3171 #endif
3172 case 0xe: /* udiv */
3173 CHECK_IU_FEATURE(dc, DIV);
3174 if (xop & 0x10) {
3175 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3176 dc->cc_op = CC_OP_DIV;
3177 } else {
3178 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3180 break;
3181 case 0xf: /* sdiv */
3182 CHECK_IU_FEATURE(dc, DIV);
3183 if (xop & 0x10) {
3184 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3185 dc->cc_op = CC_OP_DIV;
3186 } else {
3187 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3189 break;
3190 default:
3191 goto illegal_insn;
3193 gen_movl_TN_reg(rd, cpu_dst);
3194 } else {
3195 cpu_src1 = get_src1(insn, cpu_src1);
3196 cpu_src2 = get_src2(insn, cpu_src2);
3197 switch (xop) {
3198 case 0x20: /* taddcc */
3199 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3200 gen_movl_TN_reg(rd, cpu_dst);
3201 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3202 dc->cc_op = CC_OP_TADD;
3203 break;
3204 case 0x21: /* tsubcc */
3205 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3206 gen_movl_TN_reg(rd, cpu_dst);
3207 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3208 dc->cc_op = CC_OP_TSUB;
3209 break;
3210 case 0x22: /* taddcctv */
3211 save_state(dc, cpu_cond);
3212 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3213 gen_movl_TN_reg(rd, cpu_dst);
3214 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3215 dc->cc_op = CC_OP_TADDTV;
3216 break;
3217 case 0x23: /* tsubcctv */
3218 save_state(dc, cpu_cond);
3219 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3220 gen_movl_TN_reg(rd, cpu_dst);
3221 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3222 dc->cc_op = CC_OP_TSUBTV;
3223 break;
3224 case 0x24: /* mulscc */
3225 gen_helper_compute_psr();
3226 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3227 gen_movl_TN_reg(rd, cpu_dst);
3228 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3229 dc->cc_op = CC_OP_ADD;
3230 break;
3231 #ifndef TARGET_SPARC64
3232 case 0x25: /* sll */
3233 if (IS_IMM) { /* immediate */
3234 simm = GET_FIELDs(insn, 20, 31);
3235 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3236 } else { /* register */
3237 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3238 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3240 gen_movl_TN_reg(rd, cpu_dst);
3241 break;
3242 case 0x26: /* srl */
3243 if (IS_IMM) { /* immediate */
3244 simm = GET_FIELDs(insn, 20, 31);
3245 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3246 } else { /* register */
3247 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3248 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250 gen_movl_TN_reg(rd, cpu_dst);
3251 break;
3252 case 0x27: /* sra */
3253 if (IS_IMM) { /* immediate */
3254 simm = GET_FIELDs(insn, 20, 31);
3255 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3256 } else { /* register */
3257 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3258 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3260 gen_movl_TN_reg(rd, cpu_dst);
3261 break;
3262 #endif
3263 case 0x30:
3265 switch(rd) {
3266 case 0: /* wry */
3267 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3268 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3269 break;
3270 #ifndef TARGET_SPARC64
3271 case 0x01 ... 0x0f: /* undefined in the
3272 SPARCv8 manual, nop
3273 on the microSPARC
3274 II */
3275 case 0x10 ... 0x1f: /* implementation-dependent
3276 in the SPARCv8
3277 manual, nop on the
3278 microSPARC II */
3279 break;
3280 #else
3281 case 0x2: /* V9 wrccr */
3282 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3283 gen_helper_wrccr(cpu_dst);
3284 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3285 dc->cc_op = CC_OP_FLAGS;
3286 break;
3287 case 0x3: /* V9 wrasi */
3288 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3289 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3290 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3291 break;
3292 case 0x6: /* V9 wrfprs */
3293 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3294 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3295 save_state(dc, cpu_cond);
3296 gen_op_next_insn();
3297 tcg_gen_exit_tb(0);
3298 dc->is_br = 1;
3299 break;
3300 case 0xf: /* V9 sir, nop if user */
3301 #if !defined(CONFIG_USER_ONLY)
3302 if (supervisor(dc)) {
3303 ; // XXX
3305 #endif
3306 break;
3307 case 0x13: /* Graphics Status */
3308 if (gen_trap_ifnofpu(dc, cpu_cond))
3309 goto jmp_insn;
3310 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3311 break;
3312 case 0x14: /* Softint set */
3313 if (!supervisor(dc))
3314 goto illegal_insn;
3315 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3316 gen_helper_set_softint(cpu_tmp64);
3317 break;
3318 case 0x15: /* Softint clear */
3319 if (!supervisor(dc))
3320 goto illegal_insn;
3321 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3322 gen_helper_clear_softint(cpu_tmp64);
3323 break;
3324 case 0x16: /* Softint write */
3325 if (!supervisor(dc))
3326 goto illegal_insn;
3327 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3328 gen_helper_write_softint(cpu_tmp64);
3329 break;
3330 case 0x17: /* Tick compare */
3331 #if !defined(CONFIG_USER_ONLY)
3332 if (!supervisor(dc))
3333 goto illegal_insn;
3334 #endif
3336 TCGv_ptr r_tickptr;
3338 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3339 cpu_src2);
3340 r_tickptr = tcg_temp_new_ptr();
3341 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3342 offsetof(CPUState, tick));
3343 gen_helper_tick_set_limit(r_tickptr,
3344 cpu_tick_cmpr);
3345 tcg_temp_free_ptr(r_tickptr);
3347 break;
3348 case 0x18: /* System tick */
3349 #if !defined(CONFIG_USER_ONLY)
3350 if (!supervisor(dc))
3351 goto illegal_insn;
3352 #endif
3354 TCGv_ptr r_tickptr;
3356 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3357 cpu_src2);
3358 r_tickptr = tcg_temp_new_ptr();
3359 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3360 offsetof(CPUState, stick));
3361 gen_helper_tick_set_count(r_tickptr,
3362 cpu_dst);
3363 tcg_temp_free_ptr(r_tickptr);
3365 break;
3366 case 0x19: /* System tick compare */
3367 #if !defined(CONFIG_USER_ONLY)
3368 if (!supervisor(dc))
3369 goto illegal_insn;
3370 #endif
3372 TCGv_ptr r_tickptr;
3374 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3375 cpu_src2);
3376 r_tickptr = tcg_temp_new_ptr();
3377 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3378 offsetof(CPUState, stick));
3379 gen_helper_tick_set_limit(r_tickptr,
3380 cpu_stick_cmpr);
3381 tcg_temp_free_ptr(r_tickptr);
3383 break;
3385 case 0x10: /* Performance Control */
3386 case 0x11: /* Performance Instrumentation
3387 Counter */
3388 case 0x12: /* Dispatch Control */
3389 #endif
3390 default:
3391 goto illegal_insn;
3394 break;
3395 #if !defined(CONFIG_USER_ONLY)
3396 case 0x31: /* wrpsr, V9 saved, restored */
3398 if (!supervisor(dc))
3399 goto priv_insn;
3400 #ifdef TARGET_SPARC64
3401 switch (rd) {
3402 case 0:
3403 gen_helper_saved();
3404 break;
3405 case 1:
3406 gen_helper_restored();
3407 break;
3408 case 2: /* UA2005 allclean */
3409 case 3: /* UA2005 otherw */
3410 case 4: /* UA2005 normalw */
3411 case 5: /* UA2005 invalw */
3412 // XXX
3413 default:
3414 goto illegal_insn;
3416 #else
3417 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3418 gen_helper_wrpsr(cpu_dst);
3419 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3420 dc->cc_op = CC_OP_FLAGS;
3421 save_state(dc, cpu_cond);
3422 gen_op_next_insn();
3423 tcg_gen_exit_tb(0);
3424 dc->is_br = 1;
3425 #endif
3427 break;
3428 case 0x32: /* wrwim, V9 wrpr */
3430 if (!supervisor(dc))
3431 goto priv_insn;
3432 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3433 #ifdef TARGET_SPARC64
3434 switch (rd) {
3435 case 0: // tpc
3437 TCGv_ptr r_tsptr;
3439 r_tsptr = tcg_temp_new_ptr();
3440 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3441 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3442 offsetof(trap_state, tpc));
3443 tcg_temp_free_ptr(r_tsptr);
3445 break;
3446 case 1: // tnpc
3448 TCGv_ptr r_tsptr;
3450 r_tsptr = tcg_temp_new_ptr();
3451 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3452 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3453 offsetof(trap_state, tnpc));
3454 tcg_temp_free_ptr(r_tsptr);
3456 break;
3457 case 2: // tstate
3459 TCGv_ptr r_tsptr;
3461 r_tsptr = tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3463 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3464 offsetof(trap_state,
3465 tstate));
3466 tcg_temp_free_ptr(r_tsptr);
3468 break;
3469 case 3: // tt
3471 TCGv_ptr r_tsptr;
3473 r_tsptr = tcg_temp_new_ptr();
3474 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3475 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3476 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3477 offsetof(trap_state, tt));
3478 tcg_temp_free_ptr(r_tsptr);
3480 break;
3481 case 4: // tick
3483 TCGv_ptr r_tickptr;
3485 r_tickptr = tcg_temp_new_ptr();
3486 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3487 offsetof(CPUState, tick));
3488 gen_helper_tick_set_count(r_tickptr,
3489 cpu_tmp0);
3490 tcg_temp_free_ptr(r_tickptr);
3492 break;
3493 case 5: // tba
3494 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3495 break;
3496 case 6: // pstate
3497 save_state(dc, cpu_cond);
3498 gen_helper_wrpstate(cpu_tmp0);
3499 dc->npc = DYNAMIC_PC;
3500 break;
3501 case 7: // tl
3502 save_state(dc, cpu_cond);
3503 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3504 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3505 offsetof(CPUSPARCState, tl));
3506 dc->npc = DYNAMIC_PC;
3507 break;
3508 case 8: // pil
3509 gen_helper_wrpil(cpu_tmp0);
3510 break;
3511 case 9: // cwp
3512 gen_helper_wrcwp(cpu_tmp0);
3513 break;
3514 case 10: // cansave
3515 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3516 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3517 offsetof(CPUSPARCState,
3518 cansave));
3519 break;
3520 case 11: // canrestore
3521 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3522 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3523 offsetof(CPUSPARCState,
3524 canrestore));
3525 break;
3526 case 12: // cleanwin
3527 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3528 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3529 offsetof(CPUSPARCState,
3530 cleanwin));
3531 break;
3532 case 13: // otherwin
3533 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3534 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3535 offsetof(CPUSPARCState,
3536 otherwin));
3537 break;
3538 case 14: // wstate
3539 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3540 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3541 offsetof(CPUSPARCState,
3542 wstate));
3543 break;
3544 case 16: // UA2005 gl
3545 CHECK_IU_FEATURE(dc, GL);
3546 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3547 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3548 offsetof(CPUSPARCState, gl));
3549 break;
3550 case 26: // UA2005 strand status
3551 CHECK_IU_FEATURE(dc, HYPV);
3552 if (!hypervisor(dc))
3553 goto priv_insn;
3554 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3555 break;
3556 default:
3557 goto illegal_insn;
3559 #else
3560 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3561 if (dc->def->nwindows != 32)
3562 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3563 (1 << dc->def->nwindows) - 1);
3564 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3565 #endif
3567 break;
3568 case 0x33: /* wrtbr, UA2005 wrhpr */
3570 #ifndef TARGET_SPARC64
3571 if (!supervisor(dc))
3572 goto priv_insn;
3573 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3574 #else
3575 CHECK_IU_FEATURE(dc, HYPV);
3576 if (!hypervisor(dc))
3577 goto priv_insn;
3578 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3579 switch (rd) {
3580 case 0: // hpstate
3581 // XXX gen_op_wrhpstate();
3582 save_state(dc, cpu_cond);
3583 gen_op_next_insn();
3584 tcg_gen_exit_tb(0);
3585 dc->is_br = 1;
3586 break;
3587 case 1: // htstate
3588 // XXX gen_op_wrhtstate();
3589 break;
3590 case 3: // hintp
3591 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3592 break;
3593 case 5: // htba
3594 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3595 break;
3596 case 31: // hstick_cmpr
3598 TCGv_ptr r_tickptr;
3600 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3601 r_tickptr = tcg_temp_new_ptr();
3602 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3603 offsetof(CPUState, hstick));
3604 gen_helper_tick_set_limit(r_tickptr,
3605 cpu_hstick_cmpr);
3606 tcg_temp_free_ptr(r_tickptr);
3608 break;
3609 case 6: // hver readonly
3610 default:
3611 goto illegal_insn;
3613 #endif
3615 break;
3616 #endif
3617 #ifdef TARGET_SPARC64
3618 case 0x2c: /* V9 movcc */
3620 int cc = GET_FIELD_SP(insn, 11, 12);
3621 int cond = GET_FIELD_SP(insn, 14, 17);
3622 TCGv r_cond;
3623 int l1;
3625 r_cond = tcg_temp_new();
3626 if (insn & (1 << 18)) {
3627 if (cc == 0)
3628 gen_cond(r_cond, 0, cond, dc);
3629 else if (cc == 2)
3630 gen_cond(r_cond, 1, cond, dc);
3631 else
3632 goto illegal_insn;
3633 } else {
3634 gen_fcond(r_cond, cc, cond);
3637 l1 = gen_new_label();
3639 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3640 if (IS_IMM) { /* immediate */
3641 TCGv r_const;
3643 simm = GET_FIELD_SPs(insn, 0, 10);
3644 r_const = tcg_const_tl(simm);
3645 gen_movl_TN_reg(rd, r_const);
3646 tcg_temp_free(r_const);
3647 } else {
3648 rs2 = GET_FIELD_SP(insn, 0, 4);
3649 gen_movl_reg_TN(rs2, cpu_tmp0);
3650 gen_movl_TN_reg(rd, cpu_tmp0);
3652 gen_set_label(l1);
3653 tcg_temp_free(r_cond);
3654 break;
3656 case 0x2d: /* V9 sdivx */
3657 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3658 gen_movl_TN_reg(rd, cpu_dst);
3659 break;
3660 case 0x2e: /* V9 popc */
3662 cpu_src2 = get_src2(insn, cpu_src2);
3663 gen_helper_popc(cpu_dst, cpu_src2);
3664 gen_movl_TN_reg(rd, cpu_dst);
3666 case 0x2f: /* V9 movr */
3668 int cond = GET_FIELD_SP(insn, 10, 12);
3669 int l1;
3671 cpu_src1 = get_src1(insn, cpu_src1);
3673 l1 = gen_new_label();
3675 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3676 cpu_src1, 0, l1);
3677 if (IS_IMM) { /* immediate */
3678 TCGv r_const;
3680 simm = GET_FIELD_SPs(insn, 0, 9);
3681 r_const = tcg_const_tl(simm);
3682 gen_movl_TN_reg(rd, r_const);
3683 tcg_temp_free(r_const);
3684 } else {
3685 rs2 = GET_FIELD_SP(insn, 0, 4);
3686 gen_movl_reg_TN(rs2, cpu_tmp0);
3687 gen_movl_TN_reg(rd, cpu_tmp0);
3689 gen_set_label(l1);
3690 break;
3692 #endif
3693 default:
3694 goto illegal_insn;
3697 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3698 #ifdef TARGET_SPARC64
3699 int opf = GET_FIELD_SP(insn, 5, 13);
3700 rs1 = GET_FIELD(insn, 13, 17);
3701 rs2 = GET_FIELD(insn, 27, 31);
3702 if (gen_trap_ifnofpu(dc, cpu_cond))
3703 goto jmp_insn;
3705 switch (opf) {
3706 case 0x000: /* VIS I edge8cc */
3707 case 0x001: /* VIS II edge8n */
3708 case 0x002: /* VIS I edge8lcc */
3709 case 0x003: /* VIS II edge8ln */
3710 case 0x004: /* VIS I edge16cc */
3711 case 0x005: /* VIS II edge16n */
3712 case 0x006: /* VIS I edge16lcc */
3713 case 0x007: /* VIS II edge16ln */
3714 case 0x008: /* VIS I edge32cc */
3715 case 0x009: /* VIS II edge32n */
3716 case 0x00a: /* VIS I edge32lcc */
3717 case 0x00b: /* VIS II edge32ln */
3718 // XXX
3719 goto illegal_insn;
3720 case 0x010: /* VIS I array8 */
3721 CHECK_FPU_FEATURE(dc, VIS1);
3722 cpu_src1 = get_src1(insn, cpu_src1);
3723 gen_movl_reg_TN(rs2, cpu_src2);
3724 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3725 gen_movl_TN_reg(rd, cpu_dst);
3726 break;
3727 case 0x012: /* VIS I array16 */
3728 CHECK_FPU_FEATURE(dc, VIS1);
3729 cpu_src1 = get_src1(insn, cpu_src1);
3730 gen_movl_reg_TN(rs2, cpu_src2);
3731 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3732 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3733 gen_movl_TN_reg(rd, cpu_dst);
3734 break;
3735 case 0x014: /* VIS I array32 */
3736 CHECK_FPU_FEATURE(dc, VIS1);
3737 cpu_src1 = get_src1(insn, cpu_src1);
3738 gen_movl_reg_TN(rs2, cpu_src2);
3739 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3740 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3741 gen_movl_TN_reg(rd, cpu_dst);
3742 break;
3743 case 0x018: /* VIS I alignaddr */
3744 CHECK_FPU_FEATURE(dc, VIS1);
3745 cpu_src1 = get_src1(insn, cpu_src1);
3746 gen_movl_reg_TN(rs2, cpu_src2);
3747 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3748 gen_movl_TN_reg(rd, cpu_dst);
3749 break;
3750 case 0x019: /* VIS II bmask */
3751 case 0x01a: /* VIS I alignaddrl */
3752 // XXX
3753 goto illegal_insn;
3754 case 0x020: /* VIS I fcmple16 */
3755 CHECK_FPU_FEATURE(dc, VIS1);
3756 gen_op_load_fpr_DT0(DFPREG(rs1));
3757 gen_op_load_fpr_DT1(DFPREG(rs2));
3758 gen_helper_fcmple16();
3759 gen_op_store_DT0_fpr(DFPREG(rd));
3760 break;
3761 case 0x022: /* VIS I fcmpne16 */
3762 CHECK_FPU_FEATURE(dc, VIS1);
3763 gen_op_load_fpr_DT0(DFPREG(rs1));
3764 gen_op_load_fpr_DT1(DFPREG(rs2));
3765 gen_helper_fcmpne16();
3766 gen_op_store_DT0_fpr(DFPREG(rd));
3767 break;
3768 case 0x024: /* VIS I fcmple32 */
3769 CHECK_FPU_FEATURE(dc, VIS1);
3770 gen_op_load_fpr_DT0(DFPREG(rs1));
3771 gen_op_load_fpr_DT1(DFPREG(rs2));
3772 gen_helper_fcmple32();
3773 gen_op_store_DT0_fpr(DFPREG(rd));
3774 break;
3775 case 0x026: /* VIS I fcmpne32 */
3776 CHECK_FPU_FEATURE(dc, VIS1);
3777 gen_op_load_fpr_DT0(DFPREG(rs1));
3778 gen_op_load_fpr_DT1(DFPREG(rs2));
3779 gen_helper_fcmpne32();
3780 gen_op_store_DT0_fpr(DFPREG(rd));
3781 break;
3782 case 0x028: /* VIS I fcmpgt16 */
3783 CHECK_FPU_FEATURE(dc, VIS1);
3784 gen_op_load_fpr_DT0(DFPREG(rs1));
3785 gen_op_load_fpr_DT1(DFPREG(rs2));
3786 gen_helper_fcmpgt16();
3787 gen_op_store_DT0_fpr(DFPREG(rd));
3788 break;
3789 case 0x02a: /* VIS I fcmpeq16 */
3790 CHECK_FPU_FEATURE(dc, VIS1);
3791 gen_op_load_fpr_DT0(DFPREG(rs1));
3792 gen_op_load_fpr_DT1(DFPREG(rs2));
3793 gen_helper_fcmpeq16();
3794 gen_op_store_DT0_fpr(DFPREG(rd));
3795 break;
3796 case 0x02c: /* VIS I fcmpgt32 */
3797 CHECK_FPU_FEATURE(dc, VIS1);
3798 gen_op_load_fpr_DT0(DFPREG(rs1));
3799 gen_op_load_fpr_DT1(DFPREG(rs2));
3800 gen_helper_fcmpgt32();
3801 gen_op_store_DT0_fpr(DFPREG(rd));
3802 break;
3803 case 0x02e: /* VIS I fcmpeq32 */
3804 CHECK_FPU_FEATURE(dc, VIS1);
3805 gen_op_load_fpr_DT0(DFPREG(rs1));
3806 gen_op_load_fpr_DT1(DFPREG(rs2));
3807 gen_helper_fcmpeq32();
3808 gen_op_store_DT0_fpr(DFPREG(rd));
3809 break;
3810 case 0x031: /* VIS I fmul8x16 */
3811 CHECK_FPU_FEATURE(dc, VIS1);
3812 gen_op_load_fpr_DT0(DFPREG(rs1));
3813 gen_op_load_fpr_DT1(DFPREG(rs2));
3814 gen_helper_fmul8x16();
3815 gen_op_store_DT0_fpr(DFPREG(rd));
3816 break;
3817 case 0x033: /* VIS I fmul8x16au */
3818 CHECK_FPU_FEATURE(dc, VIS1);
3819 gen_op_load_fpr_DT0(DFPREG(rs1));
3820 gen_op_load_fpr_DT1(DFPREG(rs2));
3821 gen_helper_fmul8x16au();
3822 gen_op_store_DT0_fpr(DFPREG(rd));
3823 break;
3824 case 0x035: /* VIS I fmul8x16al */
3825 CHECK_FPU_FEATURE(dc, VIS1);
3826 gen_op_load_fpr_DT0(DFPREG(rs1));
3827 gen_op_load_fpr_DT1(DFPREG(rs2));
3828 gen_helper_fmul8x16al();
3829 gen_op_store_DT0_fpr(DFPREG(rd));
3830 break;
3831 case 0x036: /* VIS I fmul8sux16 */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 gen_op_load_fpr_DT0(DFPREG(rs1));
3834 gen_op_load_fpr_DT1(DFPREG(rs2));
3835 gen_helper_fmul8sux16();
3836 gen_op_store_DT0_fpr(DFPREG(rd));
3837 break;
3838 case 0x037: /* VIS I fmul8ulx16 */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 gen_op_load_fpr_DT0(DFPREG(rs1));
3841 gen_op_load_fpr_DT1(DFPREG(rs2));
3842 gen_helper_fmul8ulx16();
3843 gen_op_store_DT0_fpr(DFPREG(rd));
3844 break;
3845 case 0x038: /* VIS I fmuld8sux16 */
3846 CHECK_FPU_FEATURE(dc, VIS1);
3847 gen_op_load_fpr_DT0(DFPREG(rs1));
3848 gen_op_load_fpr_DT1(DFPREG(rs2));
3849 gen_helper_fmuld8sux16();
3850 gen_op_store_DT0_fpr(DFPREG(rd));
3851 break;
3852 case 0x039: /* VIS I fmuld8ulx16 */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 gen_helper_fmuld8ulx16();
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x03a: /* VIS I fpack32 */
3860 case 0x03b: /* VIS I fpack16 */
3861 case 0x03d: /* VIS I fpackfix */
3862 case 0x03e: /* VIS I pdist */
3863 // XXX
3864 goto illegal_insn;
3865 case 0x048: /* VIS I faligndata */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_DT0(DFPREG(rs1));
3868 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_helper_faligndata();
3870 gen_op_store_DT0_fpr(DFPREG(rd));
3871 break;
3872 case 0x04b: /* VIS I fpmerge */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 gen_helper_fpmerge();
3877 gen_op_store_DT0_fpr(DFPREG(rd));
3878 break;
3879 case 0x04c: /* VIS II bshuffle */
3880 // XXX
3881 goto illegal_insn;
3882 case 0x04d: /* VIS I fexpand */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 gen_op_load_fpr_DT0(DFPREG(rs1));
3885 gen_op_load_fpr_DT1(DFPREG(rs2));
3886 gen_helper_fexpand();
3887 gen_op_store_DT0_fpr(DFPREG(rd));
3888 break;
3889 case 0x050: /* VIS I fpadd16 */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 gen_op_load_fpr_DT0(DFPREG(rs1));
3892 gen_op_load_fpr_DT1(DFPREG(rs2));
3893 gen_helper_fpadd16();
3894 gen_op_store_DT0_fpr(DFPREG(rd));
3895 break;
3896 case 0x051: /* VIS I fpadd16s */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_helper_fpadd16s(cpu_fpr[rd],
3899 cpu_fpr[rs1], cpu_fpr[rs2]);
3900 break;
3901 case 0x052: /* VIS I fpadd32 */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 gen_op_load_fpr_DT0(DFPREG(rs1));
3904 gen_op_load_fpr_DT1(DFPREG(rs2));
3905 gen_helper_fpadd32();
3906 gen_op_store_DT0_fpr(DFPREG(rd));
3907 break;
3908 case 0x053: /* VIS I fpadd32s */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 gen_helper_fpadd32s(cpu_fpr[rd],
3911 cpu_fpr[rs1], cpu_fpr[rs2]);
3912 break;
3913 case 0x054: /* VIS I fpsub16 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 gen_op_load_fpr_DT0(DFPREG(rs1));
3916 gen_op_load_fpr_DT1(DFPREG(rs2));
3917 gen_helper_fpsub16();
3918 gen_op_store_DT0_fpr(DFPREG(rd));
3919 break;
3920 case 0x055: /* VIS I fpsub16s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 gen_helper_fpsub16s(cpu_fpr[rd],
3923 cpu_fpr[rs1], cpu_fpr[rs2]);
3924 break;
3925 case 0x056: /* VIS I fpsub32 */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 gen_op_load_fpr_DT0(DFPREG(rs1));
3928 gen_op_load_fpr_DT1(DFPREG(rs2));
3929 gen_helper_fpsub32();
3930 gen_op_store_DT0_fpr(DFPREG(rd));
3931 break;
3932 case 0x057: /* VIS I fpsub32s */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 gen_helper_fpsub32s(cpu_fpr[rd],
3935 cpu_fpr[rs1], cpu_fpr[rs2]);
3936 break;
3937 case 0x060: /* VIS I fzero */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3940 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3941 break;
3942 case 0x061: /* VIS I fzeros */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3945 break;
3946 case 0x062: /* VIS I fnor */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3949 cpu_fpr[DFPREG(rs2)]);
3950 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3951 cpu_fpr[DFPREG(rs2) + 1]);
3952 break;
3953 case 0x063: /* VIS I fnors */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3956 break;
3957 case 0x064: /* VIS I fandnot2 */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3960 cpu_fpr[DFPREG(rs2)]);
3961 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3962 cpu_fpr[DFPREG(rs1) + 1],
3963 cpu_fpr[DFPREG(rs2) + 1]);
3964 break;
3965 case 0x065: /* VIS I fandnot2s */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3968 break;
3969 case 0x066: /* VIS I fnot2 */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3972 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3973 cpu_fpr[DFPREG(rs2) + 1]);
3974 break;
3975 case 0x067: /* VIS I fnot2s */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3978 break;
3979 case 0x068: /* VIS I fandnot1 */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3982 cpu_fpr[DFPREG(rs1)]);
3983 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3984 cpu_fpr[DFPREG(rs2) + 1],
3985 cpu_fpr[DFPREG(rs1) + 1]);
3986 break;
3987 case 0x069: /* VIS I fandnot1s */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3990 break;
3991 case 0x06a: /* VIS I fnot1 */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3994 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3995 cpu_fpr[DFPREG(rs1) + 1]);
3996 break;
3997 case 0x06b: /* VIS I fnot1s */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4000 break;
4001 case 0x06c: /* VIS I fxor */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4004 cpu_fpr[DFPREG(rs2)]);
4005 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4006 cpu_fpr[DFPREG(rs1) + 1],
4007 cpu_fpr[DFPREG(rs2) + 1]);
4008 break;
4009 case 0x06d: /* VIS I fxors */
4010 CHECK_FPU_FEATURE(dc, VIS1);
4011 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4012 break;
4013 case 0x06e: /* VIS I fnand */
4014 CHECK_FPU_FEATURE(dc, VIS1);
4015 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4016 cpu_fpr[DFPREG(rs2)]);
4017 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4018 cpu_fpr[DFPREG(rs2) + 1]);
4019 break;
4020 case 0x06f: /* VIS I fnands */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4023 break;
4024 case 0x070: /* VIS I fand */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4027 cpu_fpr[DFPREG(rs2)]);
4028 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4029 cpu_fpr[DFPREG(rs1) + 1],
4030 cpu_fpr[DFPREG(rs2) + 1]);
4031 break;
4032 case 0x071: /* VIS I fands */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4035 break;
4036 case 0x072: /* VIS I fxnor */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4039 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4040 cpu_fpr[DFPREG(rs1)]);
4041 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4042 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4043 cpu_fpr[DFPREG(rs1) + 1]);
4044 break;
4045 case 0x073: /* VIS I fxnors */
4046 CHECK_FPU_FEATURE(dc, VIS1);
4047 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4048 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4049 break;
4050 case 0x074: /* VIS I fsrc1 */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4053 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4054 cpu_fpr[DFPREG(rs1) + 1]);
4055 break;
4056 case 0x075: /* VIS I fsrc1s */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4059 break;
4060 case 0x076: /* VIS I fornot2 */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4063 cpu_fpr[DFPREG(rs2)]);
4064 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4065 cpu_fpr[DFPREG(rs1) + 1],
4066 cpu_fpr[DFPREG(rs2) + 1]);
4067 break;
4068 case 0x077: /* VIS I fornot2s */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4071 break;
4072 case 0x078: /* VIS I fsrc2 */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 gen_op_load_fpr_DT0(DFPREG(rs2));
4075 gen_op_store_DT0_fpr(DFPREG(rd));
4076 break;
4077 case 0x079: /* VIS I fsrc2s */
4078 CHECK_FPU_FEATURE(dc, VIS1);
4079 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4080 break;
4081 case 0x07a: /* VIS I fornot1 */
4082 CHECK_FPU_FEATURE(dc, VIS1);
4083 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4084 cpu_fpr[DFPREG(rs1)]);
4085 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4086 cpu_fpr[DFPREG(rs2) + 1],
4087 cpu_fpr[DFPREG(rs1) + 1]);
4088 break;
4089 case 0x07b: /* VIS I fornot1s */
4090 CHECK_FPU_FEATURE(dc, VIS1);
4091 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4092 break;
4093 case 0x07c: /* VIS I for */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4096 cpu_fpr[DFPREG(rs2)]);
4097 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4098 cpu_fpr[DFPREG(rs1) + 1],
4099 cpu_fpr[DFPREG(rs2) + 1]);
4100 break;
4101 case 0x07d: /* VIS I fors */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4104 break;
4105 case 0x07e: /* VIS I fone */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4108 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4109 break;
4110 case 0x07f: /* VIS I fones */
4111 CHECK_FPU_FEATURE(dc, VIS1);
4112 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4113 break;
4114 case 0x080: /* VIS I shutdown */
4115 case 0x081: /* VIS II siam */
4116 // XXX
4117 goto illegal_insn;
4118 default:
4119 goto illegal_insn;
4121 #else
4122 goto ncp_insn;
4123 #endif
4124 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4125 #ifdef TARGET_SPARC64
4126 goto illegal_insn;
4127 #else
4128 goto ncp_insn;
4129 #endif
4130 #ifdef TARGET_SPARC64
4131 } else if (xop == 0x39) { /* V9 return */
4132 TCGv_i32 r_const;
4134 save_state(dc, cpu_cond);
4135 cpu_src1 = get_src1(insn, cpu_src1);
4136 if (IS_IMM) { /* immediate */
4137 simm = GET_FIELDs(insn, 19, 31);
4138 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4139 } else { /* register */
4140 rs2 = GET_FIELD(insn, 27, 31);
4141 if (rs2) {
4142 gen_movl_reg_TN(rs2, cpu_src2);
4143 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4144 } else
4145 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4147 gen_helper_restore();
4148 gen_mov_pc_npc(dc, cpu_cond);
4149 r_const = tcg_const_i32(3);
4150 gen_helper_check_align(cpu_dst, r_const);
4151 tcg_temp_free_i32(r_const);
4152 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4153 dc->npc = DYNAMIC_PC;
4154 goto jmp_insn;
4155 #endif
4156 } else {
4157 cpu_src1 = get_src1(insn, cpu_src1);
4158 if (IS_IMM) { /* immediate */
4159 simm = GET_FIELDs(insn, 19, 31);
4160 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4161 } else { /* register */
4162 rs2 = GET_FIELD(insn, 27, 31);
4163 if (rs2) {
4164 gen_movl_reg_TN(rs2, cpu_src2);
4165 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4166 } else
4167 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4169 switch (xop) {
4170 case 0x38: /* jmpl */
4172 TCGv r_pc;
4173 TCGv_i32 r_const;
4175 r_pc = tcg_const_tl(dc->pc);
4176 gen_movl_TN_reg(rd, r_pc);
4177 tcg_temp_free(r_pc);
4178 gen_mov_pc_npc(dc, cpu_cond);
4179 r_const = tcg_const_i32(3);
4180 gen_helper_check_align(cpu_dst, r_const);
4181 tcg_temp_free_i32(r_const);
4182 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4183 dc->npc = DYNAMIC_PC;
4185 goto jmp_insn;
4186 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4187 case 0x39: /* rett, V9 return */
4189 TCGv_i32 r_const;
4191 if (!supervisor(dc))
4192 goto priv_insn;
4193 gen_mov_pc_npc(dc, cpu_cond);
4194 r_const = tcg_const_i32(3);
4195 gen_helper_check_align(cpu_dst, r_const);
4196 tcg_temp_free_i32(r_const);
4197 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4198 dc->npc = DYNAMIC_PC;
4199 gen_helper_rett();
4201 goto jmp_insn;
4202 #endif
4203 case 0x3b: /* flush */
4204 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4205 goto unimp_flush;
4206 gen_helper_flush(cpu_dst);
4207 break;
4208 case 0x3c: /* save */
4209 save_state(dc, cpu_cond);
4210 gen_helper_save();
4211 gen_movl_TN_reg(rd, cpu_dst);
4212 break;
4213 case 0x3d: /* restore */
4214 save_state(dc, cpu_cond);
4215 gen_helper_restore();
4216 gen_movl_TN_reg(rd, cpu_dst);
4217 break;
4218 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4219 case 0x3e: /* V9 done/retry */
4221 switch (rd) {
4222 case 0:
4223 if (!supervisor(dc))
4224 goto priv_insn;
4225 dc->npc = DYNAMIC_PC;
4226 dc->pc = DYNAMIC_PC;
4227 gen_helper_done();
4228 goto jmp_insn;
4229 case 1:
4230 if (!supervisor(dc))
4231 goto priv_insn;
4232 dc->npc = DYNAMIC_PC;
4233 dc->pc = DYNAMIC_PC;
4234 gen_helper_retry();
4235 goto jmp_insn;
4236 default:
4237 goto illegal_insn;
4240 break;
4241 #endif
4242 default:
4243 goto illegal_insn;
4246 break;
4248 break;
4249 case 3: /* load/store instructions */
4251 unsigned int xop = GET_FIELD(insn, 7, 12);
4253 /* flush pending conditional evaluations before exposing
4254 cpu state */
4255 if (dc->cc_op != CC_OP_FLAGS) {
4256 dc->cc_op = CC_OP_FLAGS;
4257 gen_helper_compute_psr();
4259 cpu_src1 = get_src1(insn, cpu_src1);
4260 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4261 rs2 = GET_FIELD(insn, 27, 31);
4262 gen_movl_reg_TN(rs2, cpu_src2);
4263 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4264 } else if (IS_IMM) { /* immediate */
4265 simm = GET_FIELDs(insn, 19, 31);
4266 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4267 } else { /* register */
4268 rs2 = GET_FIELD(insn, 27, 31);
4269 if (rs2 != 0) {
4270 gen_movl_reg_TN(rs2, cpu_src2);
4271 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4272 } else
4273 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4275 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4276 (xop > 0x17 && xop <= 0x1d ) ||
4277 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4278 switch (xop) {
4279 case 0x0: /* ld, V9 lduw, load unsigned word */
4280 gen_address_mask(dc, cpu_addr);
4281 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4282 break;
4283 case 0x1: /* ldub, load unsigned byte */
4284 gen_address_mask(dc, cpu_addr);
4285 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4286 break;
4287 case 0x2: /* lduh, load unsigned halfword */
4288 gen_address_mask(dc, cpu_addr);
4289 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4290 break;
4291 case 0x3: /* ldd, load double word */
4292 if (rd & 1)
4293 goto illegal_insn;
4294 else {
4295 TCGv_i32 r_const;
4297 save_state(dc, cpu_cond);
4298 r_const = tcg_const_i32(7);
4299 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4300 tcg_temp_free_i32(r_const);
4301 gen_address_mask(dc, cpu_addr);
4302 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4303 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4304 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4305 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4306 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4307 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4308 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4310 break;
4311 case 0x9: /* ldsb, load signed byte */
4312 gen_address_mask(dc, cpu_addr);
4313 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4314 break;
4315 case 0xa: /* ldsh, load signed halfword */
4316 gen_address_mask(dc, cpu_addr);
4317 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4318 break;
4319 case 0xd: /* ldstub -- XXX: should be atomically */
4321 TCGv r_const;
4323 gen_address_mask(dc, cpu_addr);
4324 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4325 r_const = tcg_const_tl(0xff);
4326 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4327 tcg_temp_free(r_const);
4329 break;
4330 case 0x0f: /* swap, swap register with memory. Also
4331 atomically */
4332 CHECK_IU_FEATURE(dc, SWAP);
4333 gen_movl_reg_TN(rd, cpu_val);
4334 gen_address_mask(dc, cpu_addr);
4335 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4336 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4337 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4338 break;
4339 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4340 case 0x10: /* lda, V9 lduwa, load word alternate */
4341 #ifndef TARGET_SPARC64
4342 if (IS_IMM)
4343 goto illegal_insn;
4344 if (!supervisor(dc))
4345 goto priv_insn;
4346 #endif
4347 save_state(dc, cpu_cond);
4348 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4349 break;
4350 case 0x11: /* lduba, load unsigned byte alternate */
4351 #ifndef TARGET_SPARC64
4352 if (IS_IMM)
4353 goto illegal_insn;
4354 if (!supervisor(dc))
4355 goto priv_insn;
4356 #endif
4357 save_state(dc, cpu_cond);
4358 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4359 break;
4360 case 0x12: /* lduha, load unsigned halfword alternate */
4361 #ifndef TARGET_SPARC64
4362 if (IS_IMM)
4363 goto illegal_insn;
4364 if (!supervisor(dc))
4365 goto priv_insn;
4366 #endif
4367 save_state(dc, cpu_cond);
4368 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4369 break;
4370 case 0x13: /* ldda, load double word alternate */
4371 #ifndef TARGET_SPARC64
4372 if (IS_IMM)
4373 goto illegal_insn;
4374 if (!supervisor(dc))
4375 goto priv_insn;
4376 #endif
4377 if (rd & 1)
4378 goto illegal_insn;
4379 save_state(dc, cpu_cond);
4380 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4381 goto skip_move;
4382 case 0x19: /* ldsba, load signed byte alternate */
4383 #ifndef TARGET_SPARC64
4384 if (IS_IMM)
4385 goto illegal_insn;
4386 if (!supervisor(dc))
4387 goto priv_insn;
4388 #endif
4389 save_state(dc, cpu_cond);
4390 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4391 break;
4392 case 0x1a: /* ldsha, load signed halfword alternate */
4393 #ifndef TARGET_SPARC64
4394 if (IS_IMM)
4395 goto illegal_insn;
4396 if (!supervisor(dc))
4397 goto priv_insn;
4398 #endif
4399 save_state(dc, cpu_cond);
4400 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4401 break;
4402 case 0x1d: /* ldstuba -- XXX: should be atomically */
4403 #ifndef TARGET_SPARC64
4404 if (IS_IMM)
4405 goto illegal_insn;
4406 if (!supervisor(dc))
4407 goto priv_insn;
4408 #endif
4409 save_state(dc, cpu_cond);
4410 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4411 break;
4412 case 0x1f: /* swapa, swap reg with alt. memory. Also
4413 atomically */
4414 CHECK_IU_FEATURE(dc, SWAP);
4415 #ifndef TARGET_SPARC64
4416 if (IS_IMM)
4417 goto illegal_insn;
4418 if (!supervisor(dc))
4419 goto priv_insn;
4420 #endif
4421 save_state(dc, cpu_cond);
4422 gen_movl_reg_TN(rd, cpu_val);
4423 gen_swap_asi(cpu_val, cpu_addr, insn);
4424 break;
4426 #ifndef TARGET_SPARC64
4427 case 0x30: /* ldc */
4428 case 0x31: /* ldcsr */
4429 case 0x33: /* lddc */
4430 goto ncp_insn;
4431 #endif
4432 #endif
4433 #ifdef TARGET_SPARC64
4434 case 0x08: /* V9 ldsw */
4435 gen_address_mask(dc, cpu_addr);
4436 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4437 break;
4438 case 0x0b: /* V9 ldx */
4439 gen_address_mask(dc, cpu_addr);
4440 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4441 break;
4442 case 0x18: /* V9 ldswa */
4443 save_state(dc, cpu_cond);
4444 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4445 break;
4446 case 0x1b: /* V9 ldxa */
4447 save_state(dc, cpu_cond);
4448 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4449 break;
4450 case 0x2d: /* V9 prefetch, no effect */
4451 goto skip_move;
4452 case 0x30: /* V9 ldfa */
4453 save_state(dc, cpu_cond);
4454 gen_ldf_asi(cpu_addr, insn, 4, rd);
4455 goto skip_move;
4456 case 0x33: /* V9 lddfa */
4457 save_state(dc, cpu_cond);
4458 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4459 goto skip_move;
4460 case 0x3d: /* V9 prefetcha, no effect */
4461 goto skip_move;
4462 case 0x32: /* V9 ldqfa */
4463 CHECK_FPU_FEATURE(dc, FLOAT128);
4464 save_state(dc, cpu_cond);
4465 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4466 goto skip_move;
4467 #endif
4468 default:
4469 goto illegal_insn;
4471 gen_movl_TN_reg(rd, cpu_val);
4472 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4473 skip_move: ;
4474 #endif
4475 } else if (xop >= 0x20 && xop < 0x24) {
4476 if (gen_trap_ifnofpu(dc, cpu_cond))
4477 goto jmp_insn;
4478 save_state(dc, cpu_cond);
4479 switch (xop) {
4480 case 0x20: /* ldf, load fpreg */
4481 gen_address_mask(dc, cpu_addr);
4482 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4483 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4484 break;
4485 case 0x21: /* ldfsr, V9 ldxfsr */
4486 #ifdef TARGET_SPARC64
4487 gen_address_mask(dc, cpu_addr);
4488 if (rd == 1) {
4489 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4490 gen_helper_ldxfsr(cpu_tmp64);
4491 } else {
4492 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4493 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4494 gen_helper_ldfsr(cpu_tmp32);
4496 #else
4498 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4499 gen_helper_ldfsr(cpu_tmp32);
4501 #endif
4502 break;
4503 case 0x22: /* ldqf, load quad fpreg */
4505 TCGv_i32 r_const;
4507 CHECK_FPU_FEATURE(dc, FLOAT128);
4508 r_const = tcg_const_i32(dc->mem_idx);
4509 gen_address_mask(dc, cpu_addr);
4510 gen_helper_ldqf(cpu_addr, r_const);
4511 tcg_temp_free_i32(r_const);
4512 gen_op_store_QT0_fpr(QFPREG(rd));
4514 break;
4515 case 0x23: /* lddf, load double fpreg */
4517 TCGv_i32 r_const;
4519 r_const = tcg_const_i32(dc->mem_idx);
4520 gen_address_mask(dc, cpu_addr);
4521 gen_helper_lddf(cpu_addr, r_const);
4522 tcg_temp_free_i32(r_const);
4523 gen_op_store_DT0_fpr(DFPREG(rd));
4525 break;
4526 default:
4527 goto illegal_insn;
4529 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4530 xop == 0xe || xop == 0x1e) {
4531 gen_movl_reg_TN(rd, cpu_val);
4532 switch (xop) {
4533 case 0x4: /* st, store word */
4534 gen_address_mask(dc, cpu_addr);
4535 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4536 break;
4537 case 0x5: /* stb, store byte */
4538 gen_address_mask(dc, cpu_addr);
4539 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4540 break;
4541 case 0x6: /* sth, store halfword */
4542 gen_address_mask(dc, cpu_addr);
4543 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4544 break;
4545 case 0x7: /* std, store double word */
4546 if (rd & 1)
4547 goto illegal_insn;
4548 else {
4549 TCGv_i32 r_const;
4551 save_state(dc, cpu_cond);
4552 gen_address_mask(dc, cpu_addr);
4553 r_const = tcg_const_i32(7);
4554 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4555 tcg_temp_free_i32(r_const);
4556 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4557 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4558 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4560 break;
4561 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4562 case 0x14: /* sta, V9 stwa, store word alternate */
4563 #ifndef TARGET_SPARC64
4564 if (IS_IMM)
4565 goto illegal_insn;
4566 if (!supervisor(dc))
4567 goto priv_insn;
4568 #endif
4569 save_state(dc, cpu_cond);
4570 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4571 dc->npc = DYNAMIC_PC;
4572 break;
4573 case 0x15: /* stba, store byte alternate */
4574 #ifndef TARGET_SPARC64
4575 if (IS_IMM)
4576 goto illegal_insn;
4577 if (!supervisor(dc))
4578 goto priv_insn;
4579 #endif
4580 save_state(dc, cpu_cond);
4581 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4582 dc->npc = DYNAMIC_PC;
4583 break;
4584 case 0x16: /* stha, store halfword alternate */
4585 #ifndef TARGET_SPARC64
4586 if (IS_IMM)
4587 goto illegal_insn;
4588 if (!supervisor(dc))
4589 goto priv_insn;
4590 #endif
4591 save_state(dc, cpu_cond);
4592 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4593 dc->npc = DYNAMIC_PC;
4594 break;
4595 case 0x17: /* stda, store double word alternate */
4596 #ifndef TARGET_SPARC64
4597 if (IS_IMM)
4598 goto illegal_insn;
4599 if (!supervisor(dc))
4600 goto priv_insn;
4601 #endif
4602 if (rd & 1)
4603 goto illegal_insn;
4604 else {
4605 save_state(dc, cpu_cond);
4606 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4608 break;
4609 #endif
4610 #ifdef TARGET_SPARC64
4611 case 0x0e: /* V9 stx */
4612 gen_address_mask(dc, cpu_addr);
4613 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4614 break;
4615 case 0x1e: /* V9 stxa */
4616 save_state(dc, cpu_cond);
4617 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4618 dc->npc = DYNAMIC_PC;
4619 break;
4620 #endif
4621 default:
4622 goto illegal_insn;
4624 } else if (xop > 0x23 && xop < 0x28) {
4625 if (gen_trap_ifnofpu(dc, cpu_cond))
4626 goto jmp_insn;
4627 save_state(dc, cpu_cond);
4628 switch (xop) {
4629 case 0x24: /* stf, store fpreg */
4630 gen_address_mask(dc, cpu_addr);
4631 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4632 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4633 break;
4634 case 0x25: /* stfsr, V9 stxfsr */
4635 #ifdef TARGET_SPARC64
4636 gen_address_mask(dc, cpu_addr);
4637 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4638 if (rd == 1)
4639 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4640 else
4641 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4642 #else
4643 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4644 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4645 #endif
4646 break;
4647 case 0x26:
4648 #ifdef TARGET_SPARC64
4649 /* V9 stqf, store quad fpreg */
4651 TCGv_i32 r_const;
4653 CHECK_FPU_FEATURE(dc, FLOAT128);
4654 gen_op_load_fpr_QT0(QFPREG(rd));
4655 r_const = tcg_const_i32(dc->mem_idx);
4656 gen_address_mask(dc, cpu_addr);
4657 gen_helper_stqf(cpu_addr, r_const);
4658 tcg_temp_free_i32(r_const);
4660 break;
4661 #else /* !TARGET_SPARC64 */
4662 /* stdfq, store floating point queue */
4663 #if defined(CONFIG_USER_ONLY)
4664 goto illegal_insn;
4665 #else
4666 if (!supervisor(dc))
4667 goto priv_insn;
4668 if (gen_trap_ifnofpu(dc, cpu_cond))
4669 goto jmp_insn;
4670 goto nfq_insn;
4671 #endif
4672 #endif
4673 case 0x27: /* stdf, store double fpreg */
4675 TCGv_i32 r_const;
4677 gen_op_load_fpr_DT0(DFPREG(rd));
4678 r_const = tcg_const_i32(dc->mem_idx);
4679 gen_address_mask(dc, cpu_addr);
4680 gen_helper_stdf(cpu_addr, r_const);
4681 tcg_temp_free_i32(r_const);
4683 break;
4684 default:
4685 goto illegal_insn;
4687 } else if (xop > 0x33 && xop < 0x3f) {
4688 save_state(dc, cpu_cond);
4689 switch (xop) {
4690 #ifdef TARGET_SPARC64
4691 case 0x34: /* V9 stfa */
4692 gen_stf_asi(cpu_addr, insn, 4, rd);
4693 break;
4694 case 0x36: /* V9 stqfa */
4696 TCGv_i32 r_const;
4698 CHECK_FPU_FEATURE(dc, FLOAT128);
4699 r_const = tcg_const_i32(7);
4700 gen_helper_check_align(cpu_addr, r_const);
4701 tcg_temp_free_i32(r_const);
4702 gen_op_load_fpr_QT0(QFPREG(rd));
4703 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4705 break;
4706 case 0x37: /* V9 stdfa */
4707 gen_op_load_fpr_DT0(DFPREG(rd));
4708 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4709 break;
4710 case 0x3c: /* V9 casa */
4711 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4712 gen_movl_TN_reg(rd, cpu_val);
4713 break;
4714 case 0x3e: /* V9 casxa */
4715 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4716 gen_movl_TN_reg(rd, cpu_val);
4717 break;
4718 #else
4719 case 0x34: /* stc */
4720 case 0x35: /* stcsr */
4721 case 0x36: /* stdcq */
4722 case 0x37: /* stdc */
4723 goto ncp_insn;
4724 #endif
4725 default:
4726 goto illegal_insn;
4728 } else
4729 goto illegal_insn;
4731 break;
4733 /* default case for non jump instructions */
4734 if (dc->npc == DYNAMIC_PC) {
4735 dc->pc = DYNAMIC_PC;
4736 gen_op_next_insn();
4737 } else if (dc->npc == JUMP_PC) {
4738 /* we can do a static jump */
4739 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4740 dc->is_br = 1;
4741 } else {
4742 dc->pc = dc->npc;
4743 dc->npc = dc->npc + 4;
4745 jmp_insn:
4746 goto egress;
4747 illegal_insn:
4749 TCGv_i32 r_const;
4751 save_state(dc, cpu_cond);
4752 r_const = tcg_const_i32(TT_ILL_INSN);
4753 gen_helper_raise_exception(r_const);
4754 tcg_temp_free_i32(r_const);
4755 dc->is_br = 1;
4757 goto egress;
4758 unimp_flush:
4760 TCGv_i32 r_const;
4762 save_state(dc, cpu_cond);
4763 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4764 gen_helper_raise_exception(r_const);
4765 tcg_temp_free_i32(r_const);
4766 dc->is_br = 1;
4768 goto egress;
4769 #if !defined(CONFIG_USER_ONLY)
4770 priv_insn:
4772 TCGv_i32 r_const;
4774 save_state(dc, cpu_cond);
4775 r_const = tcg_const_i32(TT_PRIV_INSN);
4776 gen_helper_raise_exception(r_const);
4777 tcg_temp_free_i32(r_const);
4778 dc->is_br = 1;
4780 goto egress;
4781 #endif
4782 nfpu_insn:
4783 save_state(dc, cpu_cond);
4784 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4785 dc->is_br = 1;
4786 goto egress;
4787 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4788 nfq_insn:
4789 save_state(dc, cpu_cond);
4790 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4791 dc->is_br = 1;
4792 goto egress;
4793 #endif
4794 #ifndef TARGET_SPARC64
4795 ncp_insn:
4797 TCGv r_const;
4799 save_state(dc, cpu_cond);
4800 r_const = tcg_const_i32(TT_NCP_INSN);
4801 gen_helper_raise_exception(r_const);
4802 tcg_temp_free(r_const);
4803 dc->is_br = 1;
4805 goto egress;
4806 #endif
4807 egress:
4808 tcg_temp_free(cpu_tmp1);
4809 tcg_temp_free(cpu_tmp2);
4812 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4813 int spc, CPUSPARCState *env)
4815 target_ulong pc_start, last_pc;
4816 uint16_t *gen_opc_end;
4817 DisasContext dc1, *dc = &dc1;
4818 CPUBreakpoint *bp;
4819 int j, lj = -1;
4820 int num_insns;
4821 int max_insns;
4823 memset(dc, 0, sizeof(DisasContext));
4824 dc->tb = tb;
4825 pc_start = tb->pc;
4826 dc->pc = pc_start;
4827 last_pc = dc->pc;
4828 dc->npc = (target_ulong) tb->cs_base;
4829 dc->cc_op = CC_OP_DYNAMIC;
4830 dc->mem_idx = cpu_mmu_index(env);
4831 dc->def = env->def;
4832 if ((dc->def->features & CPU_FEATURE_FLOAT))
4833 dc->fpu_enabled = cpu_fpu_enabled(env);
4834 else
4835 dc->fpu_enabled = 0;
4836 #ifdef TARGET_SPARC64
4837 dc->address_mask_32bit = env->pstate & PS_AM;
4838 #endif
4839 dc->singlestep = (env->singlestep_enabled || singlestep);
4840 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4842 cpu_tmp0 = tcg_temp_new();
4843 cpu_tmp32 = tcg_temp_new_i32();
4844 cpu_tmp64 = tcg_temp_new_i64();
4846 cpu_dst = tcg_temp_local_new();
4848 // loads and stores
4849 cpu_val = tcg_temp_local_new();
4850 cpu_addr = tcg_temp_local_new();
4852 num_insns = 0;
4853 max_insns = tb->cflags & CF_COUNT_MASK;
4854 if (max_insns == 0)
4855 max_insns = CF_COUNT_MASK;
4856 gen_icount_start();
4857 do {
4858 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4859 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4860 if (bp->pc == dc->pc) {
4861 if (dc->pc != pc_start)
4862 save_state(dc, cpu_cond);
4863 gen_helper_debug();
4864 tcg_gen_exit_tb(0);
4865 dc->is_br = 1;
4866 goto exit_gen_loop;
4870 if (spc) {
4871 qemu_log("Search PC...\n");
4872 j = gen_opc_ptr - gen_opc_buf;
4873 if (lj < j) {
4874 lj++;
4875 while (lj < j)
4876 gen_opc_instr_start[lj++] = 0;
4877 gen_opc_pc[lj] = dc->pc;
4878 gen_opc_npc[lj] = dc->npc;
4879 gen_opc_instr_start[lj] = 1;
4880 gen_opc_icount[lj] = num_insns;
4883 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4884 gen_io_start();
4885 last_pc = dc->pc;
4886 disas_sparc_insn(dc);
4887 num_insns++;
4889 if (dc->is_br)
4890 break;
4891 /* if the next PC is different, we abort now */
4892 if (dc->pc != (last_pc + 4))
4893 break;
4894 /* if we reach a page boundary, we stop generation so that the
4895 PC of a TT_TFAULT exception is always in the right page */
4896 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4897 break;
4898 /* if single step mode, we generate only one instruction and
4899 generate an exception */
4900 if (dc->singlestep) {
4901 break;
4903 } while ((gen_opc_ptr < gen_opc_end) &&
4904 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4905 num_insns < max_insns);
4907 exit_gen_loop:
4908 tcg_temp_free(cpu_addr);
4909 tcg_temp_free(cpu_val);
4910 tcg_temp_free(cpu_dst);
4911 tcg_temp_free_i64(cpu_tmp64);
4912 tcg_temp_free_i32(cpu_tmp32);
4913 tcg_temp_free(cpu_tmp0);
4914 if (tb->cflags & CF_LAST_IO)
4915 gen_io_end();
4916 if (!dc->is_br) {
4917 if (dc->pc != DYNAMIC_PC &&
4918 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4919 /* static PC and NPC: we can use direct chaining */
4920 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4921 } else {
4922 if (dc->pc != DYNAMIC_PC)
4923 tcg_gen_movi_tl(cpu_pc, dc->pc);
4924 save_npc(dc, cpu_cond);
4925 tcg_gen_exit_tb(0);
4928 gen_icount_end(tb, num_insns);
4929 *gen_opc_ptr = INDEX_op_end;
4930 if (spc) {
4931 j = gen_opc_ptr - gen_opc_buf;
4932 lj++;
4933 while (lj <= j)
4934 gen_opc_instr_start[lj++] = 0;
4935 #if 0
4936 log_page_dump();
4937 #endif
4938 gen_opc_jump_pc[0] = dc->jump_pc[0];
4939 gen_opc_jump_pc[1] = dc->jump_pc[1];
4940 } else {
4941 tb->size = last_pc + 4 - pc_start;
4942 tb->icount = num_insns;
4944 #ifdef DEBUG_DISAS
4945 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4946 qemu_log("--------------\n");
4947 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4948 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4949 qemu_log("\n");
4951 #endif
4954 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4956 gen_intermediate_code_internal(tb, 0, env);
4959 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4961 gen_intermediate_code_internal(tb, 1, env);
4964 void gen_intermediate_code_init(CPUSPARCState *env)
4966 unsigned int i;
4967 static int inited;
4968 static const char * const gregnames[8] = {
4969 NULL, // g0 not used
4970 "g1",
4971 "g2",
4972 "g3",
4973 "g4",
4974 "g5",
4975 "g6",
4976 "g7",
4978 static const char * const fregnames[64] = {
4979 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4980 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4981 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4982 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4983 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4984 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4985 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4986 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4989 /* init various static tables */
4990 if (!inited) {
4991 inited = 1;
4993 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4994 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4995 offsetof(CPUState, regwptr),
4996 "regwptr");
4997 #ifdef TARGET_SPARC64
4998 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4999 "xcc");
5000 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5001 "asi");
5002 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5003 "fprs");
5004 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5005 "gsr");
5006 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5007 offsetof(CPUState, tick_cmpr),
5008 "tick_cmpr");
5009 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5010 offsetof(CPUState, stick_cmpr),
5011 "stick_cmpr");
5012 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5013 offsetof(CPUState, hstick_cmpr),
5014 "hstick_cmpr");
5015 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5016 "hintp");
5017 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5018 "htba");
5019 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5020 "hver");
5021 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5022 offsetof(CPUState, ssr), "ssr");
5023 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5024 offsetof(CPUState, version), "ver");
5025 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5026 offsetof(CPUState, softint),
5027 "softint");
5028 #else
5029 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5030 "wim");
5031 #endif
5032 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5033 "cond");
5034 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5035 "cc_src");
5036 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5037 offsetof(CPUState, cc_src2),
5038 "cc_src2");
5039 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5040 "cc_dst");
5041 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5042 "cc_op");
5043 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5044 "psr");
5045 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5046 "fsr");
5047 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5048 "pc");
5049 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5050 "npc");
5051 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5052 #ifndef CONFIG_USER_ONLY
5053 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5054 "tbr");
5055 #endif
5056 for (i = 1; i < 8; i++)
5057 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5058 offsetof(CPUState, gregs[i]),
5059 gregnames[i]);
5060 for (i = 0; i < TARGET_FPREGS; i++)
5061 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5062 offsetof(CPUState, fpr[i]),
5063 fregnames[i]);
5065 /* register helpers */
5067 #define GEN_HELPER 2
5068 #include "helper.h"
5072 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5073 unsigned long searched_pc, int pc_pos, void *puc)
5075 target_ulong npc;
5076 env->pc = gen_opc_pc[pc_pos];
5077 npc = gen_opc_npc[pc_pos];
5078 if (npc == 1) {
5079 /* dynamic NPC: already stored */
5080 } else if (npc == 2) {
5081 /* jump PC: use 'cond' and the jump targets of the translation */
5082 if (env->cond) {
5083 env->npc = gen_opc_jump_pc[0];
5084 } else {
5085 env->npc = gen_opc_jump_pc[1];
5087 } else {
5088 env->npc = npc;
5091 /* flush pending conditional evaluations before exposing cpu state */
5092 if (CC_OP != CC_OP_FLAGS) {
5093 helper_compute_psr();