Convert udiv/sdiv
[qemu-kvm/fedora.git] / target-sparc / translate.c
bloba69c708dda6da483781c03121df1c9f24158071d
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
287 #endif
289 /* old op:
290 if (!T0)
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
293 env->psr |= PSR_NEG;
295 static inline void gen_cc_NZ_icc(TCGv dst)
297 TCGv r_temp;
298 int l1, l2;
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306 gen_set_label(l1);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310 gen_set_label(l2);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
317 int l1, l2;
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323 gen_set_label(l1);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326 gen_set_label(l2);
328 #endif
330 /* old op:
331 if (T0 < src1)
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
337 int l1;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346 gen_set_label(l1);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
354 int l1;
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359 gen_set_label(l1);
361 #endif
363 /* old op:
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365 env->psr |= PSR_OVF;
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
369 TCGv r_temp;
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
386 TCGv r_temp;
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
399 #endif
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
403 TCGv r_temp;
404 TCGv_i32 r_const;
405 int l1;
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
419 gen_set_label(l1);
420 tcg_temp_free(r_temp);
423 static inline void gen_tag_tv(TCGv src1, TCGv src2)
425 int l1;
426 TCGv_i32 r_const;
428 l1 = gen_new_label();
429 tcg_gen_or_tl(cpu_tmp0, src1, src2);
430 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
431 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
432 r_const = tcg_const_i32(TT_TOVF);
433 gen_helper_raise_exception(r_const);
434 tcg_temp_free_i32(r_const);
435 gen_set_label(l1);
438 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
440 tcg_gen_mov_tl(cpu_cc_src, src1);
441 tcg_gen_movi_tl(cpu_cc_src2, src2);
442 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
443 tcg_gen_mov_tl(dst, cpu_cc_dst);
446 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
448 tcg_gen_mov_tl(cpu_cc_src, src1);
449 tcg_gen_mov_tl(cpu_cc_src2, src2);
450 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451 tcg_gen_mov_tl(dst, cpu_cc_dst);
454 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
456 tcg_gen_mov_tl(cpu_cc_src, src1);
457 tcg_gen_movi_tl(cpu_cc_src2, src2);
458 gen_mov_reg_C(cpu_tmp0, cpu_psr);
459 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
460 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
461 tcg_gen_mov_tl(dst, cpu_cc_dst);
464 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
466 tcg_gen_mov_tl(cpu_cc_src, src1);
467 tcg_gen_mov_tl(cpu_cc_src2, src2);
468 gen_mov_reg_C(cpu_tmp0, cpu_psr);
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
470 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
471 tcg_gen_mov_tl(dst, cpu_cc_dst);
474 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
476 tcg_gen_mov_tl(cpu_cc_src, src1);
477 tcg_gen_mov_tl(cpu_cc_src2, src2);
478 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
479 tcg_gen_mov_tl(dst, cpu_cc_dst);
482 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
484 tcg_gen_mov_tl(cpu_cc_src, src1);
485 tcg_gen_mov_tl(cpu_cc_src2, src2);
486 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
487 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 tcg_gen_mov_tl(dst, cpu_cc_dst);
492 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
494 TCGv r_temp;
495 TCGv_i32 r_const;
496 int l1;
498 l1 = gen_new_label();
500 r_temp = tcg_temp_new();
501 tcg_gen_xor_tl(r_temp, src1, src2);
502 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
503 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
504 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
505 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
506 r_const = tcg_const_i32(TT_TOVF);
507 gen_helper_raise_exception(r_const);
508 tcg_temp_free_i32(r_const);
509 gen_set_label(l1);
510 tcg_temp_free(r_temp);
513 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_movi_tl(cpu_cc_src2, src2);
517 if (src2 == 0) {
518 tcg_gen_mov_tl(cpu_cc_dst, src1);
519 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
520 dc->cc_op = CC_OP_LOGIC;
521 } else {
522 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
523 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
524 dc->cc_op = CC_OP_SUB;
526 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
531 tcg_gen_mov_tl(cpu_cc_src, src1);
532 tcg_gen_mov_tl(cpu_cc_src2, src2);
533 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
534 tcg_gen_mov_tl(dst, cpu_cc_dst);
537 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
539 tcg_gen_mov_tl(cpu_cc_src, src1);
540 tcg_gen_movi_tl(cpu_cc_src2, src2);
541 gen_mov_reg_C(cpu_tmp0, cpu_psr);
542 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
543 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
544 tcg_gen_mov_tl(dst, cpu_cc_dst);
547 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
549 tcg_gen_mov_tl(cpu_cc_src, src1);
550 tcg_gen_mov_tl(cpu_cc_src2, src2);
551 gen_mov_reg_C(cpu_tmp0, cpu_psr);
552 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
553 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
554 tcg_gen_mov_tl(dst, cpu_cc_dst);
557 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
559 tcg_gen_mov_tl(cpu_cc_src, src1);
560 tcg_gen_mov_tl(cpu_cc_src2, src2);
561 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
562 tcg_gen_mov_tl(dst, cpu_cc_dst);
565 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
567 tcg_gen_mov_tl(cpu_cc_src, src1);
568 tcg_gen_mov_tl(cpu_cc_src2, src2);
569 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
570 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
571 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
572 tcg_gen_mov_tl(dst, cpu_cc_dst);
575 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
577 TCGv r_temp;
578 int l1;
580 l1 = gen_new_label();
581 r_temp = tcg_temp_new();
583 /* old op:
584 if (!(env->y & 1))
585 T1 = 0;
587 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
588 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
589 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
590 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
591 tcg_gen_movi_tl(cpu_cc_src2, 0);
592 gen_set_label(l1);
594 // b2 = T0 & 1;
595 // env->y = (b2 << 31) | (env->y >> 1);
596 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
597 tcg_gen_shli_tl(r_temp, r_temp, 31);
598 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
599 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
600 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
601 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
603 // b1 = N ^ V;
604 gen_mov_reg_N(cpu_tmp0, cpu_psr);
605 gen_mov_reg_V(r_temp, cpu_psr);
606 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
607 tcg_temp_free(r_temp);
609 // T0 = (b1 << 31) | (T0 >> 1);
610 // src1 = T0;
611 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
612 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
613 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
615 /* do addition and update flags */
616 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
618 gen_cc_clear_icc();
619 gen_cc_NZ_icc(cpu_cc_dst);
620 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
621 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
622 tcg_gen_mov_tl(dst, cpu_cc_dst);
625 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
627 TCGv_i64 r_temp, r_temp2;
629 r_temp = tcg_temp_new_i64();
630 r_temp2 = tcg_temp_new_i64();
632 tcg_gen_extu_tl_i64(r_temp, src2);
633 tcg_gen_extu_tl_i64(r_temp2, src1);
634 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
636 tcg_gen_shri_i64(r_temp, r_temp2, 32);
637 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
638 tcg_temp_free_i64(r_temp);
639 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
640 #ifdef TARGET_SPARC64
641 tcg_gen_mov_i64(dst, r_temp2);
642 #else
643 tcg_gen_trunc_i64_tl(dst, r_temp2);
644 #endif
645 tcg_temp_free_i64(r_temp2);
648 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
650 TCGv_i64 r_temp, r_temp2;
652 r_temp = tcg_temp_new_i64();
653 r_temp2 = tcg_temp_new_i64();
655 tcg_gen_ext_tl_i64(r_temp, src2);
656 tcg_gen_ext_tl_i64(r_temp2, src1);
657 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
659 tcg_gen_shri_i64(r_temp, r_temp2, 32);
660 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
661 tcg_temp_free_i64(r_temp);
662 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
663 #ifdef TARGET_SPARC64
664 tcg_gen_mov_i64(dst, r_temp2);
665 #else
666 tcg_gen_trunc_i64_tl(dst, r_temp2);
667 #endif
668 tcg_temp_free_i64(r_temp2);
671 #ifdef TARGET_SPARC64
672 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
674 TCGv_i32 r_const;
675 int l1;
677 l1 = gen_new_label();
678 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
679 r_const = tcg_const_i32(TT_DIV_ZERO);
680 gen_helper_raise_exception(r_const);
681 tcg_temp_free_i32(r_const);
682 gen_set_label(l1);
685 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
687 int l1, l2;
689 l1 = gen_new_label();
690 l2 = gen_new_label();
691 tcg_gen_mov_tl(cpu_cc_src, src1);
692 tcg_gen_mov_tl(cpu_cc_src2, src2);
693 gen_trap_ifdivzero_tl(cpu_cc_src2);
694 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
695 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
696 tcg_gen_movi_i64(dst, INT64_MIN);
697 tcg_gen_br(l2);
698 gen_set_label(l1);
699 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
700 gen_set_label(l2);
702 #endif
704 // 1
705 static inline void gen_op_eval_ba(TCGv dst)
707 tcg_gen_movi_tl(dst, 1);
710 // Z
711 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
713 gen_mov_reg_Z(dst, src);
716 // Z | (N ^ V)
717 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
719 gen_mov_reg_N(cpu_tmp0, src);
720 gen_mov_reg_V(dst, src);
721 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
722 gen_mov_reg_Z(cpu_tmp0, src);
723 tcg_gen_or_tl(dst, dst, cpu_tmp0);
726 // N ^ V
727 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
729 gen_mov_reg_V(cpu_tmp0, src);
730 gen_mov_reg_N(dst, src);
731 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
734 // C | Z
735 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
737 gen_mov_reg_Z(cpu_tmp0, src);
738 gen_mov_reg_C(dst, src);
739 tcg_gen_or_tl(dst, dst, cpu_tmp0);
742 // C
743 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
745 gen_mov_reg_C(dst, src);
748 // V
749 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
751 gen_mov_reg_V(dst, src);
754 // 0
755 static inline void gen_op_eval_bn(TCGv dst)
757 tcg_gen_movi_tl(dst, 0);
760 // N
761 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
763 gen_mov_reg_N(dst, src);
766 // !Z
767 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
769 gen_mov_reg_Z(dst, src);
770 tcg_gen_xori_tl(dst, dst, 0x1);
773 // !(Z | (N ^ V))
774 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
776 gen_mov_reg_N(cpu_tmp0, src);
777 gen_mov_reg_V(dst, src);
778 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
779 gen_mov_reg_Z(cpu_tmp0, src);
780 tcg_gen_or_tl(dst, dst, cpu_tmp0);
781 tcg_gen_xori_tl(dst, dst, 0x1);
784 // !(N ^ V)
785 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
787 gen_mov_reg_V(cpu_tmp0, src);
788 gen_mov_reg_N(dst, src);
789 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
790 tcg_gen_xori_tl(dst, dst, 0x1);
793 // !(C | Z)
794 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
796 gen_mov_reg_Z(cpu_tmp0, src);
797 gen_mov_reg_C(dst, src);
798 tcg_gen_or_tl(dst, dst, cpu_tmp0);
799 tcg_gen_xori_tl(dst, dst, 0x1);
802 // !C
803 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
805 gen_mov_reg_C(dst, src);
806 tcg_gen_xori_tl(dst, dst, 0x1);
809 // !N
810 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
812 gen_mov_reg_N(dst, src);
813 tcg_gen_xori_tl(dst, dst, 0x1);
816 // !V
817 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
819 gen_mov_reg_V(dst, src);
820 tcg_gen_xori_tl(dst, dst, 0x1);
824 FPSR bit field FCC1 | FCC0:
828 3 unordered
830 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
831 unsigned int fcc_offset)
833 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
834 tcg_gen_andi_tl(reg, reg, 0x1);
837 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
838 unsigned int fcc_offset)
840 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
841 tcg_gen_andi_tl(reg, reg, 0x1);
844 // !0: FCC0 | FCC1
845 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
846 unsigned int fcc_offset)
848 gen_mov_reg_FCC0(dst, src, fcc_offset);
849 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
850 tcg_gen_or_tl(dst, dst, cpu_tmp0);
853 // 1 or 2: FCC0 ^ FCC1
854 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
855 unsigned int fcc_offset)
857 gen_mov_reg_FCC0(dst, src, fcc_offset);
858 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
859 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
862 // 1 or 3: FCC0
863 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
864 unsigned int fcc_offset)
866 gen_mov_reg_FCC0(dst, src, fcc_offset);
869 // 1: FCC0 & !FCC1
870 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
871 unsigned int fcc_offset)
873 gen_mov_reg_FCC0(dst, src, fcc_offset);
874 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
875 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
876 tcg_gen_and_tl(dst, dst, cpu_tmp0);
879 // 2 or 3: FCC1
880 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
881 unsigned int fcc_offset)
883 gen_mov_reg_FCC1(dst, src, fcc_offset);
886 // 2: !FCC0 & FCC1
887 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
888 unsigned int fcc_offset)
890 gen_mov_reg_FCC0(dst, src, fcc_offset);
891 tcg_gen_xori_tl(dst, dst, 0x1);
892 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
893 tcg_gen_and_tl(dst, dst, cpu_tmp0);
896 // 3: FCC0 & FCC1
897 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
898 unsigned int fcc_offset)
900 gen_mov_reg_FCC0(dst, src, fcc_offset);
901 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
902 tcg_gen_and_tl(dst, dst, cpu_tmp0);
905 // 0: !(FCC0 | FCC1)
906 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
907 unsigned int fcc_offset)
909 gen_mov_reg_FCC0(dst, src, fcc_offset);
910 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
911 tcg_gen_or_tl(dst, dst, cpu_tmp0);
912 tcg_gen_xori_tl(dst, dst, 0x1);
915 // 0 or 3: !(FCC0 ^ FCC1)
916 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
917 unsigned int fcc_offset)
919 gen_mov_reg_FCC0(dst, src, fcc_offset);
920 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
921 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
922 tcg_gen_xori_tl(dst, dst, 0x1);
925 // 0 or 2: !FCC0
926 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
927 unsigned int fcc_offset)
929 gen_mov_reg_FCC0(dst, src, fcc_offset);
930 tcg_gen_xori_tl(dst, dst, 0x1);
933 // !1: !(FCC0 & !FCC1)
934 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
937 gen_mov_reg_FCC0(dst, src, fcc_offset);
938 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
939 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
940 tcg_gen_and_tl(dst, dst, cpu_tmp0);
941 tcg_gen_xori_tl(dst, dst, 0x1);
944 // 0 or 1: !FCC1
945 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
946 unsigned int fcc_offset)
948 gen_mov_reg_FCC1(dst, src, fcc_offset);
949 tcg_gen_xori_tl(dst, dst, 0x1);
952 // !2: !(!FCC0 & FCC1)
953 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
956 gen_mov_reg_FCC0(dst, src, fcc_offset);
957 tcg_gen_xori_tl(dst, dst, 0x1);
958 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
959 tcg_gen_and_tl(dst, dst, cpu_tmp0);
960 tcg_gen_xori_tl(dst, dst, 0x1);
963 // !3: !(FCC0 & FCC1)
964 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
965 unsigned int fcc_offset)
967 gen_mov_reg_FCC0(dst, src, fcc_offset);
968 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
969 tcg_gen_and_tl(dst, dst, cpu_tmp0);
970 tcg_gen_xori_tl(dst, dst, 0x1);
973 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
974 target_ulong pc2, TCGv r_cond)
976 int l1;
978 l1 = gen_new_label();
980 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
982 gen_goto_tb(dc, 0, pc1, pc1 + 4);
984 gen_set_label(l1);
985 gen_goto_tb(dc, 1, pc2, pc2 + 4);
988 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
989 target_ulong pc2, TCGv r_cond)
991 int l1;
993 l1 = gen_new_label();
995 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
997 gen_goto_tb(dc, 0, pc2, pc1);
999 gen_set_label(l1);
1000 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1003 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1004 TCGv r_cond)
1006 int l1, l2;
1008 l1 = gen_new_label();
1009 l2 = gen_new_label();
1011 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1013 tcg_gen_movi_tl(cpu_npc, npc1);
1014 tcg_gen_br(l2);
1016 gen_set_label(l1);
1017 tcg_gen_movi_tl(cpu_npc, npc2);
1018 gen_set_label(l2);
1021 /* call this function before using the condition register as it may
1022 have been set for a jump */
1023 static inline void flush_cond(DisasContext *dc, TCGv cond)
1025 if (dc->npc == JUMP_PC) {
1026 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1027 dc->npc = DYNAMIC_PC;
1031 static inline void save_npc(DisasContext *dc, TCGv cond)
1033 if (dc->npc == JUMP_PC) {
1034 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1035 dc->npc = DYNAMIC_PC;
1036 } else if (dc->npc != DYNAMIC_PC) {
1037 tcg_gen_movi_tl(cpu_npc, dc->npc);
1041 static inline void save_state(DisasContext *dc, TCGv cond)
1043 tcg_gen_movi_tl(cpu_pc, dc->pc);
1044 save_npc(dc, cond);
1047 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1049 if (dc->npc == JUMP_PC) {
1050 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1051 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1052 dc->pc = DYNAMIC_PC;
1053 } else if (dc->npc == DYNAMIC_PC) {
1054 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1055 dc->pc = DYNAMIC_PC;
1056 } else {
1057 dc->pc = dc->npc;
1061 static inline void gen_op_next_insn(void)
1063 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1064 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1067 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1068 DisasContext *dc)
1070 TCGv_i32 r_src;
1072 #ifdef TARGET_SPARC64
1073 if (cc)
1074 r_src = cpu_xcc;
1075 else
1076 r_src = cpu_psr;
1077 #else
1078 r_src = cpu_psr;
1079 #endif
1080 switch (dc->cc_op) {
1081 case CC_OP_FLAGS:
1082 break;
1083 default:
1084 gen_helper_compute_psr();
1085 dc->cc_op = CC_OP_FLAGS;
1086 break;
1088 switch (cond) {
1089 case 0x0:
1090 gen_op_eval_bn(r_dst);
1091 break;
1092 case 0x1:
1093 gen_op_eval_be(r_dst, r_src);
1094 break;
1095 case 0x2:
1096 gen_op_eval_ble(r_dst, r_src);
1097 break;
1098 case 0x3:
1099 gen_op_eval_bl(r_dst, r_src);
1100 break;
1101 case 0x4:
1102 gen_op_eval_bleu(r_dst, r_src);
1103 break;
1104 case 0x5:
1105 gen_op_eval_bcs(r_dst, r_src);
1106 break;
1107 case 0x6:
1108 gen_op_eval_bneg(r_dst, r_src);
1109 break;
1110 case 0x7:
1111 gen_op_eval_bvs(r_dst, r_src);
1112 break;
1113 case 0x8:
1114 gen_op_eval_ba(r_dst);
1115 break;
1116 case 0x9:
1117 gen_op_eval_bne(r_dst, r_src);
1118 break;
1119 case 0xa:
1120 gen_op_eval_bg(r_dst, r_src);
1121 break;
1122 case 0xb:
1123 gen_op_eval_bge(r_dst, r_src);
1124 break;
1125 case 0xc:
1126 gen_op_eval_bgu(r_dst, r_src);
1127 break;
1128 case 0xd:
1129 gen_op_eval_bcc(r_dst, r_src);
1130 break;
1131 case 0xe:
1132 gen_op_eval_bpos(r_dst, r_src);
1133 break;
1134 case 0xf:
1135 gen_op_eval_bvc(r_dst, r_src);
1136 break;
1140 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1142 unsigned int offset;
1144 switch (cc) {
1145 default:
1146 case 0x0:
1147 offset = 0;
1148 break;
1149 case 0x1:
1150 offset = 32 - 10;
1151 break;
1152 case 0x2:
1153 offset = 34 - 10;
1154 break;
1155 case 0x3:
1156 offset = 36 - 10;
1157 break;
1160 switch (cond) {
1161 case 0x0:
1162 gen_op_eval_bn(r_dst);
1163 break;
1164 case 0x1:
1165 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1166 break;
1167 case 0x2:
1168 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1169 break;
1170 case 0x3:
1171 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1172 break;
1173 case 0x4:
1174 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1175 break;
1176 case 0x5:
1177 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1178 break;
1179 case 0x6:
1180 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1181 break;
1182 case 0x7:
1183 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1184 break;
1185 case 0x8:
1186 gen_op_eval_ba(r_dst);
1187 break;
1188 case 0x9:
1189 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1190 break;
1191 case 0xa:
1192 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1193 break;
1194 case 0xb:
1195 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1196 break;
1197 case 0xc:
1198 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1199 break;
1200 case 0xd:
1201 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1202 break;
1203 case 0xe:
1204 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1205 break;
1206 case 0xf:
1207 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1208 break;
1212 #ifdef TARGET_SPARC64
1213 // Inverted logic
1214 static const int gen_tcg_cond_reg[8] = {
1216 TCG_COND_NE,
1217 TCG_COND_GT,
1218 TCG_COND_GE,
1220 TCG_COND_EQ,
1221 TCG_COND_LE,
1222 TCG_COND_LT,
1225 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1227 int l1;
1229 l1 = gen_new_label();
1230 tcg_gen_movi_tl(r_dst, 0);
1231 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1232 tcg_gen_movi_tl(r_dst, 1);
1233 gen_set_label(l1);
1235 #endif
1237 /* XXX: potentially incorrect if dynamic npc */
1238 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1239 TCGv r_cond)
1241 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1242 target_ulong target = dc->pc + offset;
1244 if (cond == 0x0) {
1245 /* unconditional not taken */
1246 if (a) {
1247 dc->pc = dc->npc + 4;
1248 dc->npc = dc->pc + 4;
1249 } else {
1250 dc->pc = dc->npc;
1251 dc->npc = dc->pc + 4;
1253 } else if (cond == 0x8) {
1254 /* unconditional taken */
1255 if (a) {
1256 dc->pc = target;
1257 dc->npc = dc->pc + 4;
1258 } else {
1259 dc->pc = dc->npc;
1260 dc->npc = target;
1262 } else {
1263 flush_cond(dc, r_cond);
1264 gen_cond(r_cond, cc, cond, dc);
1265 if (a) {
1266 gen_branch_a(dc, target, dc->npc, r_cond);
1267 dc->is_br = 1;
1268 } else {
1269 dc->pc = dc->npc;
1270 dc->jump_pc[0] = target;
1271 dc->jump_pc[1] = dc->npc + 4;
1272 dc->npc = JUMP_PC;
1277 /* XXX: potentially incorrect if dynamic npc */
1278 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1279 TCGv r_cond)
1281 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1282 target_ulong target = dc->pc + offset;
1284 if (cond == 0x0) {
1285 /* unconditional not taken */
1286 if (a) {
1287 dc->pc = dc->npc + 4;
1288 dc->npc = dc->pc + 4;
1289 } else {
1290 dc->pc = dc->npc;
1291 dc->npc = dc->pc + 4;
1293 } else if (cond == 0x8) {
1294 /* unconditional taken */
1295 if (a) {
1296 dc->pc = target;
1297 dc->npc = dc->pc + 4;
1298 } else {
1299 dc->pc = dc->npc;
1300 dc->npc = target;
1302 } else {
1303 flush_cond(dc, r_cond);
1304 gen_fcond(r_cond, cc, cond);
1305 if (a) {
1306 gen_branch_a(dc, target, dc->npc, r_cond);
1307 dc->is_br = 1;
1308 } else {
1309 dc->pc = dc->npc;
1310 dc->jump_pc[0] = target;
1311 dc->jump_pc[1] = dc->npc + 4;
1312 dc->npc = JUMP_PC;
1317 #ifdef TARGET_SPARC64
1318 /* XXX: potentially incorrect if dynamic npc */
1319 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1320 TCGv r_cond, TCGv r_reg)
1322 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1323 target_ulong target = dc->pc + offset;
1325 flush_cond(dc, r_cond);
1326 gen_cond_reg(r_cond, cond, r_reg);
1327 if (a) {
1328 gen_branch_a(dc, target, dc->npc, r_cond);
1329 dc->is_br = 1;
1330 } else {
1331 dc->pc = dc->npc;
1332 dc->jump_pc[0] = target;
1333 dc->jump_pc[1] = dc->npc + 4;
1334 dc->npc = JUMP_PC;
1338 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1340 switch (fccno) {
1341 case 0:
1342 gen_helper_fcmps(r_rs1, r_rs2);
1343 break;
1344 case 1:
1345 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1346 break;
1347 case 2:
1348 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1349 break;
1350 case 3:
1351 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1352 break;
1356 static inline void gen_op_fcmpd(int fccno)
1358 switch (fccno) {
1359 case 0:
1360 gen_helper_fcmpd();
1361 break;
1362 case 1:
1363 gen_helper_fcmpd_fcc1();
1364 break;
1365 case 2:
1366 gen_helper_fcmpd_fcc2();
1367 break;
1368 case 3:
1369 gen_helper_fcmpd_fcc3();
1370 break;
1374 static inline void gen_op_fcmpq(int fccno)
1376 switch (fccno) {
1377 case 0:
1378 gen_helper_fcmpq();
1379 break;
1380 case 1:
1381 gen_helper_fcmpq_fcc1();
1382 break;
1383 case 2:
1384 gen_helper_fcmpq_fcc2();
1385 break;
1386 case 3:
1387 gen_helper_fcmpq_fcc3();
1388 break;
1392 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1394 switch (fccno) {
1395 case 0:
1396 gen_helper_fcmpes(r_rs1, r_rs2);
1397 break;
1398 case 1:
1399 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1400 break;
1401 case 2:
1402 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1403 break;
1404 case 3:
1405 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1406 break;
1410 static inline void gen_op_fcmped(int fccno)
1412 switch (fccno) {
1413 case 0:
1414 gen_helper_fcmped();
1415 break;
1416 case 1:
1417 gen_helper_fcmped_fcc1();
1418 break;
1419 case 2:
1420 gen_helper_fcmped_fcc2();
1421 break;
1422 case 3:
1423 gen_helper_fcmped_fcc3();
1424 break;
1428 static inline void gen_op_fcmpeq(int fccno)
1430 switch (fccno) {
1431 case 0:
1432 gen_helper_fcmpeq();
1433 break;
1434 case 1:
1435 gen_helper_fcmpeq_fcc1();
1436 break;
1437 case 2:
1438 gen_helper_fcmpeq_fcc2();
1439 break;
1440 case 3:
1441 gen_helper_fcmpeq_fcc3();
1442 break;
1446 #else
1448 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1450 gen_helper_fcmps(r_rs1, r_rs2);
1453 static inline void gen_op_fcmpd(int fccno)
1455 gen_helper_fcmpd();
1458 static inline void gen_op_fcmpq(int fccno)
1460 gen_helper_fcmpq();
1463 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1465 gen_helper_fcmpes(r_rs1, r_rs2);
1468 static inline void gen_op_fcmped(int fccno)
1470 gen_helper_fcmped();
1473 static inline void gen_op_fcmpeq(int fccno)
1475 gen_helper_fcmpeq();
1477 #endif
1479 static inline void gen_op_fpexception_im(int fsr_flags)
1481 TCGv_i32 r_const;
1483 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1484 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1485 r_const = tcg_const_i32(TT_FP_EXCP);
1486 gen_helper_raise_exception(r_const);
1487 tcg_temp_free_i32(r_const);
1490 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1492 #if !defined(CONFIG_USER_ONLY)
1493 if (!dc->fpu_enabled) {
1494 TCGv_i32 r_const;
1496 save_state(dc, r_cond);
1497 r_const = tcg_const_i32(TT_NFPU_INSN);
1498 gen_helper_raise_exception(r_const);
1499 tcg_temp_free_i32(r_const);
1500 dc->is_br = 1;
1501 return 1;
1503 #endif
1504 return 0;
1507 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1509 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1512 static inline void gen_clear_float_exceptions(void)
1514 gen_helper_clear_float_exceptions();
1517 /* asi moves */
1518 #ifdef TARGET_SPARC64
1519 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1521 int asi;
1522 TCGv_i32 r_asi;
1524 if (IS_IMM) {
1525 r_asi = tcg_temp_new_i32();
1526 tcg_gen_mov_i32(r_asi, cpu_asi);
1527 } else {
1528 asi = GET_FIELD(insn, 19, 26);
1529 r_asi = tcg_const_i32(asi);
1531 return r_asi;
1534 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1535 int sign)
1537 TCGv_i32 r_asi, r_size, r_sign;
1539 r_asi = gen_get_asi(insn, addr);
1540 r_size = tcg_const_i32(size);
1541 r_sign = tcg_const_i32(sign);
1542 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1543 tcg_temp_free_i32(r_sign);
1544 tcg_temp_free_i32(r_size);
1545 tcg_temp_free_i32(r_asi);
1548 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1550 TCGv_i32 r_asi, r_size;
1552 r_asi = gen_get_asi(insn, addr);
1553 r_size = tcg_const_i32(size);
1554 gen_helper_st_asi(addr, src, r_asi, r_size);
1555 tcg_temp_free_i32(r_size);
1556 tcg_temp_free_i32(r_asi);
1559 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1561 TCGv_i32 r_asi, r_size, r_rd;
1563 r_asi = gen_get_asi(insn, addr);
1564 r_size = tcg_const_i32(size);
1565 r_rd = tcg_const_i32(rd);
1566 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1567 tcg_temp_free_i32(r_rd);
1568 tcg_temp_free_i32(r_size);
1569 tcg_temp_free_i32(r_asi);
1572 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1574 TCGv_i32 r_asi, r_size, r_rd;
1576 r_asi = gen_get_asi(insn, addr);
1577 r_size = tcg_const_i32(size);
1578 r_rd = tcg_const_i32(rd);
1579 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1580 tcg_temp_free_i32(r_rd);
1581 tcg_temp_free_i32(r_size);
1582 tcg_temp_free_i32(r_asi);
1585 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1587 TCGv_i32 r_asi, r_size, r_sign;
1589 r_asi = gen_get_asi(insn, addr);
1590 r_size = tcg_const_i32(4);
1591 r_sign = tcg_const_i32(0);
1592 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1593 tcg_temp_free_i32(r_sign);
1594 gen_helper_st_asi(addr, dst, r_asi, r_size);
1595 tcg_temp_free_i32(r_size);
1596 tcg_temp_free_i32(r_asi);
1597 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1600 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1602 TCGv_i32 r_asi, r_rd;
1604 r_asi = gen_get_asi(insn, addr);
1605 r_rd = tcg_const_i32(rd);
1606 gen_helper_ldda_asi(addr, r_asi, r_rd);
1607 tcg_temp_free_i32(r_rd);
1608 tcg_temp_free_i32(r_asi);
1611 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1613 TCGv_i32 r_asi, r_size;
1615 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1616 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1617 r_asi = gen_get_asi(insn, addr);
1618 r_size = tcg_const_i32(8);
1619 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1620 tcg_temp_free_i32(r_size);
1621 tcg_temp_free_i32(r_asi);
1624 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1625 int rd)
1627 TCGv r_val1;
1628 TCGv_i32 r_asi;
1630 r_val1 = tcg_temp_new();
1631 gen_movl_reg_TN(rd, r_val1);
1632 r_asi = gen_get_asi(insn, addr);
1633 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1634 tcg_temp_free_i32(r_asi);
1635 tcg_temp_free(r_val1);
1638 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1639 int rd)
1641 TCGv_i32 r_asi;
1643 gen_movl_reg_TN(rd, cpu_tmp64);
1644 r_asi = gen_get_asi(insn, addr);
1645 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1646 tcg_temp_free_i32(r_asi);
1649 #elif !defined(CONFIG_USER_ONLY)
1651 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1652 int sign)
1654 TCGv_i32 r_asi, r_size, r_sign;
1656 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1657 r_size = tcg_const_i32(size);
1658 r_sign = tcg_const_i32(sign);
1659 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1660 tcg_temp_free(r_sign);
1661 tcg_temp_free(r_size);
1662 tcg_temp_free(r_asi);
1663 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1666 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1668 TCGv_i32 r_asi, r_size;
1670 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1671 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1672 r_size = tcg_const_i32(size);
1673 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1674 tcg_temp_free(r_size);
1675 tcg_temp_free(r_asi);
1678 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1680 TCGv_i32 r_asi, r_size, r_sign;
1681 TCGv_i64 r_val;
1683 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1684 r_size = tcg_const_i32(4);
1685 r_sign = tcg_const_i32(0);
1686 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1687 tcg_temp_free(r_sign);
1688 r_val = tcg_temp_new_i64();
1689 tcg_gen_extu_tl_i64(r_val, dst);
1690 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1691 tcg_temp_free_i64(r_val);
1692 tcg_temp_free(r_size);
1693 tcg_temp_free(r_asi);
1694 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1697 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1699 TCGv_i32 r_asi, r_size, r_sign;
1701 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1702 r_size = tcg_const_i32(8);
1703 r_sign = tcg_const_i32(0);
1704 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1705 tcg_temp_free(r_sign);
1706 tcg_temp_free(r_size);
1707 tcg_temp_free(r_asi);
1708 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1709 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1710 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1711 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1712 gen_movl_TN_reg(rd, hi);
1715 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1717 TCGv_i32 r_asi, r_size;
1719 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1720 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1721 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1722 r_size = tcg_const_i32(8);
1723 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1724 tcg_temp_free(r_size);
1725 tcg_temp_free(r_asi);
1727 #endif
1729 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1730 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1732 TCGv_i64 r_val;
1733 TCGv_i32 r_asi, r_size;
1735 gen_ld_asi(dst, addr, insn, 1, 0);
1737 r_val = tcg_const_i64(0xffULL);
1738 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1739 r_size = tcg_const_i32(1);
1740 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1741 tcg_temp_free_i32(r_size);
1742 tcg_temp_free_i32(r_asi);
1743 tcg_temp_free_i64(r_val);
1745 #endif
1747 static inline TCGv get_src1(unsigned int insn, TCGv def)
1749 TCGv r_rs1 = def;
1750 unsigned int rs1;
1752 rs1 = GET_FIELD(insn, 13, 17);
1753 if (rs1 == 0)
1754 r_rs1 = tcg_const_tl(0); // XXX how to free?
1755 else if (rs1 < 8)
1756 r_rs1 = cpu_gregs[rs1];
1757 else
1758 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1759 return r_rs1;
1762 static inline TCGv get_src2(unsigned int insn, TCGv def)
1764 TCGv r_rs2 = def;
1766 if (IS_IMM) { /* immediate */
1767 target_long simm;
1769 simm = GET_FIELDs(insn, 19, 31);
1770 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1771 } else { /* register */
1772 unsigned int rs2;
1774 rs2 = GET_FIELD(insn, 27, 31);
1775 if (rs2 == 0)
1776 r_rs2 = tcg_const_tl(0); // XXX how to free?
1777 else if (rs2 < 8)
1778 r_rs2 = cpu_gregs[rs2];
1779 else
1780 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1782 return r_rs2;
1785 #define CHECK_IU_FEATURE(dc, FEATURE) \
1786 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1787 goto illegal_insn;
1788 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1789 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1790 goto nfpu_insn;
1792 /* before an instruction, dc->pc must be static */
1793 static void disas_sparc_insn(DisasContext * dc)
1795 unsigned int insn, opc, rs1, rs2, rd;
1796 target_long simm;
1798 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1799 tcg_gen_debug_insn_start(dc->pc);
1800 insn = ldl_code(dc->pc);
1801 opc = GET_FIELD(insn, 0, 1);
1803 rd = GET_FIELD(insn, 2, 6);
1805 cpu_src1 = tcg_temp_new(); // const
1806 cpu_src2 = tcg_temp_new(); // const
1808 switch (opc) {
1809 case 0: /* branches/sethi */
1811 unsigned int xop = GET_FIELD(insn, 7, 9);
1812 int32_t target;
1813 switch (xop) {
1814 #ifdef TARGET_SPARC64
1815 case 0x1: /* V9 BPcc */
1817 int cc;
1819 target = GET_FIELD_SP(insn, 0, 18);
1820 target = sign_extend(target, 18);
1821 target <<= 2;
1822 cc = GET_FIELD_SP(insn, 20, 21);
1823 if (cc == 0)
1824 do_branch(dc, target, insn, 0, cpu_cond);
1825 else if (cc == 2)
1826 do_branch(dc, target, insn, 1, cpu_cond);
1827 else
1828 goto illegal_insn;
1829 goto jmp_insn;
1831 case 0x3: /* V9 BPr */
1833 target = GET_FIELD_SP(insn, 0, 13) |
1834 (GET_FIELD_SP(insn, 20, 21) << 14);
1835 target = sign_extend(target, 16);
1836 target <<= 2;
1837 cpu_src1 = get_src1(insn, cpu_src1);
1838 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1839 goto jmp_insn;
1841 case 0x5: /* V9 FBPcc */
1843 int cc = GET_FIELD_SP(insn, 20, 21);
1844 if (gen_trap_ifnofpu(dc, cpu_cond))
1845 goto jmp_insn;
1846 target = GET_FIELD_SP(insn, 0, 18);
1847 target = sign_extend(target, 19);
1848 target <<= 2;
1849 do_fbranch(dc, target, insn, cc, cpu_cond);
1850 goto jmp_insn;
1852 #else
1853 case 0x7: /* CBN+x */
1855 goto ncp_insn;
1857 #endif
1858 case 0x2: /* BN+x */
1860 target = GET_FIELD(insn, 10, 31);
1861 target = sign_extend(target, 22);
1862 target <<= 2;
1863 do_branch(dc, target, insn, 0, cpu_cond);
1864 goto jmp_insn;
1866 case 0x6: /* FBN+x */
1868 if (gen_trap_ifnofpu(dc, cpu_cond))
1869 goto jmp_insn;
1870 target = GET_FIELD(insn, 10, 31);
1871 target = sign_extend(target, 22);
1872 target <<= 2;
1873 do_fbranch(dc, target, insn, 0, cpu_cond);
1874 goto jmp_insn;
1876 case 0x4: /* SETHI */
1877 if (rd) { // nop
1878 uint32_t value = GET_FIELD(insn, 10, 31);
1879 TCGv r_const;
1881 r_const = tcg_const_tl(value << 10);
1882 gen_movl_TN_reg(rd, r_const);
1883 tcg_temp_free(r_const);
1885 break;
1886 case 0x0: /* UNIMPL */
1887 default:
1888 goto illegal_insn;
1890 break;
1892 break;
1893 case 1: /*CALL*/
1895 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1896 TCGv r_const;
1898 r_const = tcg_const_tl(dc->pc);
1899 gen_movl_TN_reg(15, r_const);
1900 tcg_temp_free(r_const);
1901 target += dc->pc;
1902 gen_mov_pc_npc(dc, cpu_cond);
1903 dc->npc = target;
1905 goto jmp_insn;
1906 case 2: /* FPU & Logical Operations */
1908 unsigned int xop = GET_FIELD(insn, 7, 12);
1909 if (xop == 0x3a) { /* generate trap */
1910 int cond;
1912 cpu_src1 = get_src1(insn, cpu_src1);
1913 if (IS_IMM) {
1914 rs2 = GET_FIELD(insn, 25, 31);
1915 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1916 } else {
1917 rs2 = GET_FIELD(insn, 27, 31);
1918 if (rs2 != 0) {
1919 gen_movl_reg_TN(rs2, cpu_src2);
1920 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1921 } else
1922 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1924 cond = GET_FIELD(insn, 3, 6);
1925 if (cond == 0x8) {
1926 save_state(dc, cpu_cond);
1927 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1928 supervisor(dc))
1929 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1930 else
1931 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1932 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1933 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1934 gen_helper_raise_exception(cpu_tmp32);
1935 } else if (cond != 0) {
1936 TCGv r_cond = tcg_temp_new();
1937 int l1;
1938 #ifdef TARGET_SPARC64
1939 /* V9 icc/xcc */
1940 int cc = GET_FIELD_SP(insn, 11, 12);
1942 save_state(dc, cpu_cond);
1943 if (cc == 0)
1944 gen_cond(r_cond, 0, cond, dc);
1945 else if (cc == 2)
1946 gen_cond(r_cond, 1, cond, dc);
1947 else
1948 goto illegal_insn;
1949 #else
1950 save_state(dc, cpu_cond);
1951 gen_cond(r_cond, 0, cond, dc);
1952 #endif
1953 l1 = gen_new_label();
1954 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1956 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1957 supervisor(dc))
1958 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1959 else
1960 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1961 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1962 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1963 gen_helper_raise_exception(cpu_tmp32);
1965 gen_set_label(l1);
1966 tcg_temp_free(r_cond);
1968 gen_op_next_insn();
1969 tcg_gen_exit_tb(0);
1970 dc->is_br = 1;
1971 goto jmp_insn;
1972 } else if (xop == 0x28) {
1973 rs1 = GET_FIELD(insn, 13, 17);
1974 switch(rs1) {
1975 case 0: /* rdy */
1976 #ifndef TARGET_SPARC64
1977 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1978 manual, rdy on the microSPARC
1979 II */
1980 case 0x0f: /* stbar in the SPARCv8 manual,
1981 rdy on the microSPARC II */
1982 case 0x10 ... 0x1f: /* implementation-dependent in the
1983 SPARCv8 manual, rdy on the
1984 microSPARC II */
1985 #endif
1986 gen_movl_TN_reg(rd, cpu_y);
1987 break;
1988 #ifdef TARGET_SPARC64
1989 case 0x2: /* V9 rdccr */
1990 gen_helper_compute_psr();
1991 gen_helper_rdccr(cpu_dst);
1992 gen_movl_TN_reg(rd, cpu_dst);
1993 break;
1994 case 0x3: /* V9 rdasi */
1995 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1996 gen_movl_TN_reg(rd, cpu_dst);
1997 break;
1998 case 0x4: /* V9 rdtick */
2000 TCGv_ptr r_tickptr;
2002 r_tickptr = tcg_temp_new_ptr();
2003 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2004 offsetof(CPUState, tick));
2005 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2006 tcg_temp_free_ptr(r_tickptr);
2007 gen_movl_TN_reg(rd, cpu_dst);
2009 break;
2010 case 0x5: /* V9 rdpc */
2012 TCGv r_const;
2014 r_const = tcg_const_tl(dc->pc);
2015 gen_movl_TN_reg(rd, r_const);
2016 tcg_temp_free(r_const);
2018 break;
2019 case 0x6: /* V9 rdfprs */
2020 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2021 gen_movl_TN_reg(rd, cpu_dst);
2022 break;
2023 case 0xf: /* V9 membar */
2024 break; /* no effect */
2025 case 0x13: /* Graphics Status */
2026 if (gen_trap_ifnofpu(dc, cpu_cond))
2027 goto jmp_insn;
2028 gen_movl_TN_reg(rd, cpu_gsr);
2029 break;
2030 case 0x16: /* Softint */
2031 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2032 gen_movl_TN_reg(rd, cpu_dst);
2033 break;
2034 case 0x17: /* Tick compare */
2035 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2036 break;
2037 case 0x18: /* System tick */
2039 TCGv_ptr r_tickptr;
2041 r_tickptr = tcg_temp_new_ptr();
2042 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2043 offsetof(CPUState, stick));
2044 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2045 tcg_temp_free_ptr(r_tickptr);
2046 gen_movl_TN_reg(rd, cpu_dst);
2048 break;
2049 case 0x19: /* System tick compare */
2050 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2051 break;
2052 case 0x10: /* Performance Control */
2053 case 0x11: /* Performance Instrumentation Counter */
2054 case 0x12: /* Dispatch Control */
2055 case 0x14: /* Softint set, WO */
2056 case 0x15: /* Softint clear, WO */
2057 #endif
2058 default:
2059 goto illegal_insn;
2061 #if !defined(CONFIG_USER_ONLY)
2062 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2063 #ifndef TARGET_SPARC64
2064 if (!supervisor(dc))
2065 goto priv_insn;
2066 gen_helper_compute_psr();
2067 dc->cc_op = CC_OP_FLAGS;
2068 gen_helper_rdpsr(cpu_dst);
2069 #else
2070 CHECK_IU_FEATURE(dc, HYPV);
2071 if (!hypervisor(dc))
2072 goto priv_insn;
2073 rs1 = GET_FIELD(insn, 13, 17);
2074 switch (rs1) {
2075 case 0: // hpstate
2076 // gen_op_rdhpstate();
2077 break;
2078 case 1: // htstate
2079 // gen_op_rdhtstate();
2080 break;
2081 case 3: // hintp
2082 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2083 break;
2084 case 5: // htba
2085 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2086 break;
2087 case 6: // hver
2088 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2089 break;
2090 case 31: // hstick_cmpr
2091 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2092 break;
2093 default:
2094 goto illegal_insn;
2096 #endif
2097 gen_movl_TN_reg(rd, cpu_dst);
2098 break;
2099 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2100 if (!supervisor(dc))
2101 goto priv_insn;
2102 #ifdef TARGET_SPARC64
2103 rs1 = GET_FIELD(insn, 13, 17);
2104 switch (rs1) {
2105 case 0: // tpc
2107 TCGv_ptr r_tsptr;
2109 r_tsptr = tcg_temp_new_ptr();
2110 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2111 offsetof(CPUState, tsptr));
2112 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2113 offsetof(trap_state, tpc));
2114 tcg_temp_free_ptr(r_tsptr);
2116 break;
2117 case 1: // tnpc
2119 TCGv_ptr r_tsptr;
2121 r_tsptr = tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2123 offsetof(CPUState, tsptr));
2124 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2125 offsetof(trap_state, tnpc));
2126 tcg_temp_free_ptr(r_tsptr);
2128 break;
2129 case 2: // tstate
2131 TCGv_ptr r_tsptr;
2133 r_tsptr = tcg_temp_new_ptr();
2134 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2135 offsetof(CPUState, tsptr));
2136 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2137 offsetof(trap_state, tstate));
2138 tcg_temp_free_ptr(r_tsptr);
2140 break;
2141 case 3: // tt
2143 TCGv_ptr r_tsptr;
2145 r_tsptr = tcg_temp_new_ptr();
2146 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2147 offsetof(CPUState, tsptr));
2148 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2149 offsetof(trap_state, tt));
2150 tcg_temp_free_ptr(r_tsptr);
2151 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2153 break;
2154 case 4: // tick
2156 TCGv_ptr r_tickptr;
2158 r_tickptr = tcg_temp_new_ptr();
2159 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2160 offsetof(CPUState, tick));
2161 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2162 gen_movl_TN_reg(rd, cpu_tmp0);
2163 tcg_temp_free_ptr(r_tickptr);
2165 break;
2166 case 5: // tba
2167 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2168 break;
2169 case 6: // pstate
2170 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2171 offsetof(CPUSPARCState, pstate));
2172 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2173 break;
2174 case 7: // tl
2175 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2176 offsetof(CPUSPARCState, tl));
2177 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2178 break;
2179 case 8: // pil
2180 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2181 offsetof(CPUSPARCState, psrpil));
2182 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2183 break;
2184 case 9: // cwp
2185 gen_helper_rdcwp(cpu_tmp0);
2186 break;
2187 case 10: // cansave
2188 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2189 offsetof(CPUSPARCState, cansave));
2190 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2191 break;
2192 case 11: // canrestore
2193 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2194 offsetof(CPUSPARCState, canrestore));
2195 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2196 break;
2197 case 12: // cleanwin
2198 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2199 offsetof(CPUSPARCState, cleanwin));
2200 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2201 break;
2202 case 13: // otherwin
2203 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2204 offsetof(CPUSPARCState, otherwin));
2205 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2206 break;
2207 case 14: // wstate
2208 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2209 offsetof(CPUSPARCState, wstate));
2210 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2211 break;
2212 case 16: // UA2005 gl
2213 CHECK_IU_FEATURE(dc, GL);
2214 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2215 offsetof(CPUSPARCState, gl));
2216 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2217 break;
2218 case 26: // UA2005 strand status
2219 CHECK_IU_FEATURE(dc, HYPV);
2220 if (!hypervisor(dc))
2221 goto priv_insn;
2222 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2223 break;
2224 case 31: // ver
2225 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2226 break;
2227 case 15: // fq
2228 default:
2229 goto illegal_insn;
2231 #else
2232 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2233 #endif
2234 gen_movl_TN_reg(rd, cpu_tmp0);
2235 break;
2236 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2237 #ifdef TARGET_SPARC64
2238 save_state(dc, cpu_cond);
2239 gen_helper_flushw();
2240 #else
2241 if (!supervisor(dc))
2242 goto priv_insn;
2243 gen_movl_TN_reg(rd, cpu_tbr);
2244 #endif
2245 break;
2246 #endif
2247 } else if (xop == 0x34) { /* FPU Operations */
2248 if (gen_trap_ifnofpu(dc, cpu_cond))
2249 goto jmp_insn;
2250 gen_op_clear_ieee_excp_and_FTT();
2251 rs1 = GET_FIELD(insn, 13, 17);
2252 rs2 = GET_FIELD(insn, 27, 31);
2253 xop = GET_FIELD(insn, 18, 26);
2254 switch (xop) {
2255 case 0x1: /* fmovs */
2256 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2257 break;
2258 case 0x5: /* fnegs */
2259 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2260 break;
2261 case 0x9: /* fabss */
2262 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2263 break;
2264 case 0x29: /* fsqrts */
2265 CHECK_FPU_FEATURE(dc, FSQRT);
2266 gen_clear_float_exceptions();
2267 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2268 gen_helper_check_ieee_exceptions();
2269 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2270 break;
2271 case 0x2a: /* fsqrtd */
2272 CHECK_FPU_FEATURE(dc, FSQRT);
2273 gen_op_load_fpr_DT1(DFPREG(rs2));
2274 gen_clear_float_exceptions();
2275 gen_helper_fsqrtd();
2276 gen_helper_check_ieee_exceptions();
2277 gen_op_store_DT0_fpr(DFPREG(rd));
2278 break;
2279 case 0x2b: /* fsqrtq */
2280 CHECK_FPU_FEATURE(dc, FLOAT128);
2281 gen_op_load_fpr_QT1(QFPREG(rs2));
2282 gen_clear_float_exceptions();
2283 gen_helper_fsqrtq();
2284 gen_helper_check_ieee_exceptions();
2285 gen_op_store_QT0_fpr(QFPREG(rd));
2286 break;
2287 case 0x41: /* fadds */
2288 gen_clear_float_exceptions();
2289 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2290 gen_helper_check_ieee_exceptions();
2291 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2292 break;
2293 case 0x42: /* faddd */
2294 gen_op_load_fpr_DT0(DFPREG(rs1));
2295 gen_op_load_fpr_DT1(DFPREG(rs2));
2296 gen_clear_float_exceptions();
2297 gen_helper_faddd();
2298 gen_helper_check_ieee_exceptions();
2299 gen_op_store_DT0_fpr(DFPREG(rd));
2300 break;
2301 case 0x43: /* faddq */
2302 CHECK_FPU_FEATURE(dc, FLOAT128);
2303 gen_op_load_fpr_QT0(QFPREG(rs1));
2304 gen_op_load_fpr_QT1(QFPREG(rs2));
2305 gen_clear_float_exceptions();
2306 gen_helper_faddq();
2307 gen_helper_check_ieee_exceptions();
2308 gen_op_store_QT0_fpr(QFPREG(rd));
2309 break;
2310 case 0x45: /* fsubs */
2311 gen_clear_float_exceptions();
2312 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2313 gen_helper_check_ieee_exceptions();
2314 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2315 break;
2316 case 0x46: /* fsubd */
2317 gen_op_load_fpr_DT0(DFPREG(rs1));
2318 gen_op_load_fpr_DT1(DFPREG(rs2));
2319 gen_clear_float_exceptions();
2320 gen_helper_fsubd();
2321 gen_helper_check_ieee_exceptions();
2322 gen_op_store_DT0_fpr(DFPREG(rd));
2323 break;
2324 case 0x47: /* fsubq */
2325 CHECK_FPU_FEATURE(dc, FLOAT128);
2326 gen_op_load_fpr_QT0(QFPREG(rs1));
2327 gen_op_load_fpr_QT1(QFPREG(rs2));
2328 gen_clear_float_exceptions();
2329 gen_helper_fsubq();
2330 gen_helper_check_ieee_exceptions();
2331 gen_op_store_QT0_fpr(QFPREG(rd));
2332 break;
2333 case 0x49: /* fmuls */
2334 CHECK_FPU_FEATURE(dc, FMUL);
2335 gen_clear_float_exceptions();
2336 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2337 gen_helper_check_ieee_exceptions();
2338 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2339 break;
2340 case 0x4a: /* fmuld */
2341 CHECK_FPU_FEATURE(dc, FMUL);
2342 gen_op_load_fpr_DT0(DFPREG(rs1));
2343 gen_op_load_fpr_DT1(DFPREG(rs2));
2344 gen_clear_float_exceptions();
2345 gen_helper_fmuld();
2346 gen_helper_check_ieee_exceptions();
2347 gen_op_store_DT0_fpr(DFPREG(rd));
2348 break;
2349 case 0x4b: /* fmulq */
2350 CHECK_FPU_FEATURE(dc, FLOAT128);
2351 CHECK_FPU_FEATURE(dc, FMUL);
2352 gen_op_load_fpr_QT0(QFPREG(rs1));
2353 gen_op_load_fpr_QT1(QFPREG(rs2));
2354 gen_clear_float_exceptions();
2355 gen_helper_fmulq();
2356 gen_helper_check_ieee_exceptions();
2357 gen_op_store_QT0_fpr(QFPREG(rd));
2358 break;
2359 case 0x4d: /* fdivs */
2360 gen_clear_float_exceptions();
2361 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2362 gen_helper_check_ieee_exceptions();
2363 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2364 break;
2365 case 0x4e: /* fdivd */
2366 gen_op_load_fpr_DT0(DFPREG(rs1));
2367 gen_op_load_fpr_DT1(DFPREG(rs2));
2368 gen_clear_float_exceptions();
2369 gen_helper_fdivd();
2370 gen_helper_check_ieee_exceptions();
2371 gen_op_store_DT0_fpr(DFPREG(rd));
2372 break;
2373 case 0x4f: /* fdivq */
2374 CHECK_FPU_FEATURE(dc, FLOAT128);
2375 gen_op_load_fpr_QT0(QFPREG(rs1));
2376 gen_op_load_fpr_QT1(QFPREG(rs2));
2377 gen_clear_float_exceptions();
2378 gen_helper_fdivq();
2379 gen_helper_check_ieee_exceptions();
2380 gen_op_store_QT0_fpr(QFPREG(rd));
2381 break;
2382 case 0x69: /* fsmuld */
2383 CHECK_FPU_FEATURE(dc, FSMULD);
2384 gen_clear_float_exceptions();
2385 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2386 gen_helper_check_ieee_exceptions();
2387 gen_op_store_DT0_fpr(DFPREG(rd));
2388 break;
2389 case 0x6e: /* fdmulq */
2390 CHECK_FPU_FEATURE(dc, FLOAT128);
2391 gen_op_load_fpr_DT0(DFPREG(rs1));
2392 gen_op_load_fpr_DT1(DFPREG(rs2));
2393 gen_clear_float_exceptions();
2394 gen_helper_fdmulq();
2395 gen_helper_check_ieee_exceptions();
2396 gen_op_store_QT0_fpr(QFPREG(rd));
2397 break;
2398 case 0xc4: /* fitos */
2399 gen_clear_float_exceptions();
2400 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2401 gen_helper_check_ieee_exceptions();
2402 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2403 break;
2404 case 0xc6: /* fdtos */
2405 gen_op_load_fpr_DT1(DFPREG(rs2));
2406 gen_clear_float_exceptions();
2407 gen_helper_fdtos(cpu_tmp32);
2408 gen_helper_check_ieee_exceptions();
2409 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2410 break;
2411 case 0xc7: /* fqtos */
2412 CHECK_FPU_FEATURE(dc, FLOAT128);
2413 gen_op_load_fpr_QT1(QFPREG(rs2));
2414 gen_clear_float_exceptions();
2415 gen_helper_fqtos(cpu_tmp32);
2416 gen_helper_check_ieee_exceptions();
2417 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2418 break;
2419 case 0xc8: /* fitod */
2420 gen_helper_fitod(cpu_fpr[rs2]);
2421 gen_op_store_DT0_fpr(DFPREG(rd));
2422 break;
2423 case 0xc9: /* fstod */
2424 gen_helper_fstod(cpu_fpr[rs2]);
2425 gen_op_store_DT0_fpr(DFPREG(rd));
2426 break;
2427 case 0xcb: /* fqtod */
2428 CHECK_FPU_FEATURE(dc, FLOAT128);
2429 gen_op_load_fpr_QT1(QFPREG(rs2));
2430 gen_clear_float_exceptions();
2431 gen_helper_fqtod();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd));
2434 break;
2435 case 0xcc: /* fitoq */
2436 CHECK_FPU_FEATURE(dc, FLOAT128);
2437 gen_helper_fitoq(cpu_fpr[rs2]);
2438 gen_op_store_QT0_fpr(QFPREG(rd));
2439 break;
2440 case 0xcd: /* fstoq */
2441 CHECK_FPU_FEATURE(dc, FLOAT128);
2442 gen_helper_fstoq(cpu_fpr[rs2]);
2443 gen_op_store_QT0_fpr(QFPREG(rd));
2444 break;
2445 case 0xce: /* fdtoq */
2446 CHECK_FPU_FEATURE(dc, FLOAT128);
2447 gen_op_load_fpr_DT1(DFPREG(rs2));
2448 gen_helper_fdtoq();
2449 gen_op_store_QT0_fpr(QFPREG(rd));
2450 break;
2451 case 0xd1: /* fstoi */
2452 gen_clear_float_exceptions();
2453 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2454 gen_helper_check_ieee_exceptions();
2455 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2456 break;
2457 case 0xd2: /* fdtoi */
2458 gen_op_load_fpr_DT1(DFPREG(rs2));
2459 gen_clear_float_exceptions();
2460 gen_helper_fdtoi(cpu_tmp32);
2461 gen_helper_check_ieee_exceptions();
2462 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2463 break;
2464 case 0xd3: /* fqtoi */
2465 CHECK_FPU_FEATURE(dc, FLOAT128);
2466 gen_op_load_fpr_QT1(QFPREG(rs2));
2467 gen_clear_float_exceptions();
2468 gen_helper_fqtoi(cpu_tmp32);
2469 gen_helper_check_ieee_exceptions();
2470 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2471 break;
2472 #ifdef TARGET_SPARC64
2473 case 0x2: /* V9 fmovd */
2474 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2475 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2476 cpu_fpr[DFPREG(rs2) + 1]);
2477 break;
2478 case 0x3: /* V9 fmovq */
2479 CHECK_FPU_FEATURE(dc, FLOAT128);
2480 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2481 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2482 cpu_fpr[QFPREG(rs2) + 1]);
2483 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2484 cpu_fpr[QFPREG(rs2) + 2]);
2485 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2486 cpu_fpr[QFPREG(rs2) + 3]);
2487 break;
2488 case 0x6: /* V9 fnegd */
2489 gen_op_load_fpr_DT1(DFPREG(rs2));
2490 gen_helper_fnegd();
2491 gen_op_store_DT0_fpr(DFPREG(rd));
2492 break;
2493 case 0x7: /* V9 fnegq */
2494 CHECK_FPU_FEATURE(dc, FLOAT128);
2495 gen_op_load_fpr_QT1(QFPREG(rs2));
2496 gen_helper_fnegq();
2497 gen_op_store_QT0_fpr(QFPREG(rd));
2498 break;
2499 case 0xa: /* V9 fabsd */
2500 gen_op_load_fpr_DT1(DFPREG(rs2));
2501 gen_helper_fabsd();
2502 gen_op_store_DT0_fpr(DFPREG(rd));
2503 break;
2504 case 0xb: /* V9 fabsq */
2505 CHECK_FPU_FEATURE(dc, FLOAT128);
2506 gen_op_load_fpr_QT1(QFPREG(rs2));
2507 gen_helper_fabsq();
2508 gen_op_store_QT0_fpr(QFPREG(rd));
2509 break;
2510 case 0x81: /* V9 fstox */
2511 gen_clear_float_exceptions();
2512 gen_helper_fstox(cpu_fpr[rs2]);
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_DT0_fpr(DFPREG(rd));
2515 break;
2516 case 0x82: /* V9 fdtox */
2517 gen_op_load_fpr_DT1(DFPREG(rs2));
2518 gen_clear_float_exceptions();
2519 gen_helper_fdtox();
2520 gen_helper_check_ieee_exceptions();
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 break;
2523 case 0x83: /* V9 fqtox */
2524 CHECK_FPU_FEATURE(dc, FLOAT128);
2525 gen_op_load_fpr_QT1(QFPREG(rs2));
2526 gen_clear_float_exceptions();
2527 gen_helper_fqtox();
2528 gen_helper_check_ieee_exceptions();
2529 gen_op_store_DT0_fpr(DFPREG(rd));
2530 break;
2531 case 0x84: /* V9 fxtos */
2532 gen_op_load_fpr_DT1(DFPREG(rs2));
2533 gen_clear_float_exceptions();
2534 gen_helper_fxtos(cpu_tmp32);
2535 gen_helper_check_ieee_exceptions();
2536 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2537 break;
2538 case 0x88: /* V9 fxtod */
2539 gen_op_load_fpr_DT1(DFPREG(rs2));
2540 gen_clear_float_exceptions();
2541 gen_helper_fxtod();
2542 gen_helper_check_ieee_exceptions();
2543 gen_op_store_DT0_fpr(DFPREG(rd));
2544 break;
2545 case 0x8c: /* V9 fxtoq */
2546 CHECK_FPU_FEATURE(dc, FLOAT128);
2547 gen_op_load_fpr_DT1(DFPREG(rs2));
2548 gen_clear_float_exceptions();
2549 gen_helper_fxtoq();
2550 gen_helper_check_ieee_exceptions();
2551 gen_op_store_QT0_fpr(QFPREG(rd));
2552 break;
2553 #endif
2554 default:
2555 goto illegal_insn;
2557 } else if (xop == 0x35) { /* FPU Operations */
2558 #ifdef TARGET_SPARC64
2559 int cond;
2560 #endif
2561 if (gen_trap_ifnofpu(dc, cpu_cond))
2562 goto jmp_insn;
2563 gen_op_clear_ieee_excp_and_FTT();
2564 rs1 = GET_FIELD(insn, 13, 17);
2565 rs2 = GET_FIELD(insn, 27, 31);
2566 xop = GET_FIELD(insn, 18, 26);
2567 #ifdef TARGET_SPARC64
2568 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2569 int l1;
2571 l1 = gen_new_label();
2572 cond = GET_FIELD_SP(insn, 14, 17);
2573 cpu_src1 = get_src1(insn, cpu_src1);
2574 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2575 0, l1);
2576 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2577 gen_set_label(l1);
2578 break;
2579 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2580 int l1;
2582 l1 = gen_new_label();
2583 cond = GET_FIELD_SP(insn, 14, 17);
2584 cpu_src1 = get_src1(insn, cpu_src1);
2585 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2586 0, l1);
2587 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2588 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2589 gen_set_label(l1);
2590 break;
2591 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2592 int l1;
2594 CHECK_FPU_FEATURE(dc, FLOAT128);
2595 l1 = gen_new_label();
2596 cond = GET_FIELD_SP(insn, 14, 17);
2597 cpu_src1 = get_src1(insn, cpu_src1);
2598 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2599 0, l1);
2600 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2601 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2602 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2603 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2604 gen_set_label(l1);
2605 break;
2607 #endif
2608 switch (xop) {
2609 #ifdef TARGET_SPARC64
2610 #define FMOVSCC(fcc) \
2612 TCGv r_cond; \
2613 int l1; \
2615 l1 = gen_new_label(); \
2616 r_cond = tcg_temp_new(); \
2617 cond = GET_FIELD_SP(insn, 14, 17); \
2618 gen_fcond(r_cond, fcc, cond); \
2619 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2620 0, l1); \
2621 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2622 gen_set_label(l1); \
2623 tcg_temp_free(r_cond); \
2625 #define FMOVDCC(fcc) \
2627 TCGv r_cond; \
2628 int l1; \
2630 l1 = gen_new_label(); \
2631 r_cond = tcg_temp_new(); \
2632 cond = GET_FIELD_SP(insn, 14, 17); \
2633 gen_fcond(r_cond, fcc, cond); \
2634 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2635 0, l1); \
2636 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2637 cpu_fpr[DFPREG(rs2)]); \
2638 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2639 cpu_fpr[DFPREG(rs2) + 1]); \
2640 gen_set_label(l1); \
2641 tcg_temp_free(r_cond); \
2643 #define FMOVQCC(fcc) \
2645 TCGv r_cond; \
2646 int l1; \
2648 l1 = gen_new_label(); \
2649 r_cond = tcg_temp_new(); \
2650 cond = GET_FIELD_SP(insn, 14, 17); \
2651 gen_fcond(r_cond, fcc, cond); \
2652 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2653 0, l1); \
2654 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2655 cpu_fpr[QFPREG(rs2)]); \
2656 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2657 cpu_fpr[QFPREG(rs2) + 1]); \
2658 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2659 cpu_fpr[QFPREG(rs2) + 2]); \
2660 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2661 cpu_fpr[QFPREG(rs2) + 3]); \
2662 gen_set_label(l1); \
2663 tcg_temp_free(r_cond); \
2665 case 0x001: /* V9 fmovscc %fcc0 */
2666 FMOVSCC(0);
2667 break;
2668 case 0x002: /* V9 fmovdcc %fcc0 */
2669 FMOVDCC(0);
2670 break;
2671 case 0x003: /* V9 fmovqcc %fcc0 */
2672 CHECK_FPU_FEATURE(dc, FLOAT128);
2673 FMOVQCC(0);
2674 break;
2675 case 0x041: /* V9 fmovscc %fcc1 */
2676 FMOVSCC(1);
2677 break;
2678 case 0x042: /* V9 fmovdcc %fcc1 */
2679 FMOVDCC(1);
2680 break;
2681 case 0x043: /* V9 fmovqcc %fcc1 */
2682 CHECK_FPU_FEATURE(dc, FLOAT128);
2683 FMOVQCC(1);
2684 break;
2685 case 0x081: /* V9 fmovscc %fcc2 */
2686 FMOVSCC(2);
2687 break;
2688 case 0x082: /* V9 fmovdcc %fcc2 */
2689 FMOVDCC(2);
2690 break;
2691 case 0x083: /* V9 fmovqcc %fcc2 */
2692 CHECK_FPU_FEATURE(dc, FLOAT128);
2693 FMOVQCC(2);
2694 break;
2695 case 0x0c1: /* V9 fmovscc %fcc3 */
2696 FMOVSCC(3);
2697 break;
2698 case 0x0c2: /* V9 fmovdcc %fcc3 */
2699 FMOVDCC(3);
2700 break;
2701 case 0x0c3: /* V9 fmovqcc %fcc3 */
2702 CHECK_FPU_FEATURE(dc, FLOAT128);
2703 FMOVQCC(3);
2704 break;
2705 #undef FMOVSCC
2706 #undef FMOVDCC
2707 #undef FMOVQCC
2708 #define FMOVSCC(icc) \
2710 TCGv r_cond; \
2711 int l1; \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_cond(r_cond, icc, cond, dc); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2718 0, l1); \
2719 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2720 gen_set_label(l1); \
2721 tcg_temp_free(r_cond); \
2723 #define FMOVDCC(icc) \
2725 TCGv r_cond; \
2726 int l1; \
2728 l1 = gen_new_label(); \
2729 r_cond = tcg_temp_new(); \
2730 cond = GET_FIELD_SP(insn, 14, 17); \
2731 gen_cond(r_cond, icc, cond, dc); \
2732 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2733 0, l1); \
2734 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2735 cpu_fpr[DFPREG(rs2)]); \
2736 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2737 cpu_fpr[DFPREG(rs2) + 1]); \
2738 gen_set_label(l1); \
2739 tcg_temp_free(r_cond); \
2741 #define FMOVQCC(icc) \
2743 TCGv r_cond; \
2744 int l1; \
2746 l1 = gen_new_label(); \
2747 r_cond = tcg_temp_new(); \
2748 cond = GET_FIELD_SP(insn, 14, 17); \
2749 gen_cond(r_cond, icc, cond, dc); \
2750 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2751 0, l1); \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2753 cpu_fpr[QFPREG(rs2)]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2755 cpu_fpr[QFPREG(rs2) + 1]); \
2756 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2757 cpu_fpr[QFPREG(rs2) + 2]); \
2758 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2759 cpu_fpr[QFPREG(rs2) + 3]); \
2760 gen_set_label(l1); \
2761 tcg_temp_free(r_cond); \
2764 case 0x101: /* V9 fmovscc %icc */
2765 FMOVSCC(0);
2766 break;
2767 case 0x102: /* V9 fmovdcc %icc */
2768 FMOVDCC(0);
2769 case 0x103: /* V9 fmovqcc %icc */
2770 CHECK_FPU_FEATURE(dc, FLOAT128);
2771 FMOVQCC(0);
2772 break;
2773 case 0x181: /* V9 fmovscc %xcc */
2774 FMOVSCC(1);
2775 break;
2776 case 0x182: /* V9 fmovdcc %xcc */
2777 FMOVDCC(1);
2778 break;
2779 case 0x183: /* V9 fmovqcc %xcc */
2780 CHECK_FPU_FEATURE(dc, FLOAT128);
2781 FMOVQCC(1);
2782 break;
2783 #undef FMOVSCC
2784 #undef FMOVDCC
2785 #undef FMOVQCC
2786 #endif
2787 case 0x51: /* fcmps, V9 %fcc */
2788 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2789 break;
2790 case 0x52: /* fcmpd, V9 %fcc */
2791 gen_op_load_fpr_DT0(DFPREG(rs1));
2792 gen_op_load_fpr_DT1(DFPREG(rs2));
2793 gen_op_fcmpd(rd & 3);
2794 break;
2795 case 0x53: /* fcmpq, V9 %fcc */
2796 CHECK_FPU_FEATURE(dc, FLOAT128);
2797 gen_op_load_fpr_QT0(QFPREG(rs1));
2798 gen_op_load_fpr_QT1(QFPREG(rs2));
2799 gen_op_fcmpq(rd & 3);
2800 break;
2801 case 0x55: /* fcmpes, V9 %fcc */
2802 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2803 break;
2804 case 0x56: /* fcmped, V9 %fcc */
2805 gen_op_load_fpr_DT0(DFPREG(rs1));
2806 gen_op_load_fpr_DT1(DFPREG(rs2));
2807 gen_op_fcmped(rd & 3);
2808 break;
2809 case 0x57: /* fcmpeq, V9 %fcc */
2810 CHECK_FPU_FEATURE(dc, FLOAT128);
2811 gen_op_load_fpr_QT0(QFPREG(rs1));
2812 gen_op_load_fpr_QT1(QFPREG(rs2));
2813 gen_op_fcmpeq(rd & 3);
2814 break;
2815 default:
2816 goto illegal_insn;
2818 } else if (xop == 0x2) {
2819 // clr/mov shortcut
2821 rs1 = GET_FIELD(insn, 13, 17);
2822 if (rs1 == 0) {
2823 // or %g0, x, y -> mov T0, x; mov y, T0
2824 if (IS_IMM) { /* immediate */
2825 TCGv r_const;
2827 simm = GET_FIELDs(insn, 19, 31);
2828 r_const = tcg_const_tl(simm);
2829 gen_movl_TN_reg(rd, r_const);
2830 tcg_temp_free(r_const);
2831 } else { /* register */
2832 rs2 = GET_FIELD(insn, 27, 31);
2833 gen_movl_reg_TN(rs2, cpu_dst);
2834 gen_movl_TN_reg(rd, cpu_dst);
2836 } else {
2837 cpu_src1 = get_src1(insn, cpu_src1);
2838 if (IS_IMM) { /* immediate */
2839 simm = GET_FIELDs(insn, 19, 31);
2840 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2841 gen_movl_TN_reg(rd, cpu_dst);
2842 } else { /* register */
2843 // or x, %g0, y -> mov T1, x; mov y, T1
2844 rs2 = GET_FIELD(insn, 27, 31);
2845 if (rs2 != 0) {
2846 gen_movl_reg_TN(rs2, cpu_src2);
2847 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2848 gen_movl_TN_reg(rd, cpu_dst);
2849 } else
2850 gen_movl_TN_reg(rd, cpu_src1);
2853 #ifdef TARGET_SPARC64
2854 } else if (xop == 0x25) { /* sll, V9 sllx */
2855 cpu_src1 = get_src1(insn, cpu_src1);
2856 if (IS_IMM) { /* immediate */
2857 simm = GET_FIELDs(insn, 20, 31);
2858 if (insn & (1 << 12)) {
2859 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2860 } else {
2861 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2863 } else { /* register */
2864 rs2 = GET_FIELD(insn, 27, 31);
2865 gen_movl_reg_TN(rs2, cpu_src2);
2866 if (insn & (1 << 12)) {
2867 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2868 } else {
2869 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2871 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2873 gen_movl_TN_reg(rd, cpu_dst);
2874 } else if (xop == 0x26) { /* srl, V9 srlx */
2875 cpu_src1 = get_src1(insn, cpu_src1);
2876 if (IS_IMM) { /* immediate */
2877 simm = GET_FIELDs(insn, 20, 31);
2878 if (insn & (1 << 12)) {
2879 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2880 } else {
2881 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2882 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2884 } else { /* register */
2885 rs2 = GET_FIELD(insn, 27, 31);
2886 gen_movl_reg_TN(rs2, cpu_src2);
2887 if (insn & (1 << 12)) {
2888 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2889 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2890 } else {
2891 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2892 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2893 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2896 gen_movl_TN_reg(rd, cpu_dst);
2897 } else if (xop == 0x27) { /* sra, V9 srax */
2898 cpu_src1 = get_src1(insn, cpu_src1);
2899 if (IS_IMM) { /* immediate */
2900 simm = GET_FIELDs(insn, 20, 31);
2901 if (insn & (1 << 12)) {
2902 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2903 } else {
2904 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2905 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2906 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2908 } else { /* register */
2909 rs2 = GET_FIELD(insn, 27, 31);
2910 gen_movl_reg_TN(rs2, cpu_src2);
2911 if (insn & (1 << 12)) {
2912 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2913 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2914 } else {
2915 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2916 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2917 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2918 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2921 gen_movl_TN_reg(rd, cpu_dst);
2922 #endif
2923 } else if (xop < 0x36) {
2924 if (xop < 0x20) {
2925 cpu_src1 = get_src1(insn, cpu_src1);
2926 cpu_src2 = get_src2(insn, cpu_src2);
2927 switch (xop & ~0x10) {
2928 case 0x0: /* add */
2929 if (IS_IMM) {
2930 simm = GET_FIELDs(insn, 19, 31);
2931 if (xop & 0x10) {
2932 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2933 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2934 dc->cc_op = CC_OP_ADD;
2935 } else {
2936 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2938 } else {
2939 if (xop & 0x10) {
2940 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2941 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2942 dc->cc_op = CC_OP_ADD;
2943 } else {
2944 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2947 break;
2948 case 0x1: /* and */
2949 if (IS_IMM) {
2950 simm = GET_FIELDs(insn, 19, 31);
2951 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2952 } else {
2953 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2955 if (xop & 0x10) {
2956 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2957 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2958 dc->cc_op = CC_OP_LOGIC;
2960 break;
2961 case 0x2: /* or */
2962 if (IS_IMM) {
2963 simm = GET_FIELDs(insn, 19, 31);
2964 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2965 } else {
2966 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2968 if (xop & 0x10) {
2969 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2970 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2971 dc->cc_op = CC_OP_LOGIC;
2973 break;
2974 case 0x3: /* xor */
2975 if (IS_IMM) {
2976 simm = GET_FIELDs(insn, 19, 31);
2977 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2978 } else {
2979 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2981 if (xop & 0x10) {
2982 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2983 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2984 dc->cc_op = CC_OP_LOGIC;
2986 break;
2987 case 0x4: /* sub */
2988 if (IS_IMM) {
2989 simm = GET_FIELDs(insn, 19, 31);
2990 if (xop & 0x10) {
2991 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2992 } else {
2993 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2995 } else {
2996 if (xop & 0x10) {
2997 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2998 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2999 dc->cc_op = CC_OP_SUB;
3000 } else {
3001 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3004 break;
3005 case 0x5: /* andn */
3006 if (IS_IMM) {
3007 simm = GET_FIELDs(insn, 19, 31);
3008 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3009 } else {
3010 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3012 if (xop & 0x10) {
3013 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3014 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3015 dc->cc_op = CC_OP_LOGIC;
3017 break;
3018 case 0x6: /* orn */
3019 if (IS_IMM) {
3020 simm = GET_FIELDs(insn, 19, 31);
3021 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3022 } else {
3023 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3025 if (xop & 0x10) {
3026 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3027 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3028 dc->cc_op = CC_OP_LOGIC;
3030 break;
3031 case 0x7: /* xorn */
3032 if (IS_IMM) {
3033 simm = GET_FIELDs(insn, 19, 31);
3034 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3035 } else {
3036 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3037 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3039 if (xop & 0x10) {
3040 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3041 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3042 dc->cc_op = CC_OP_LOGIC;
3044 break;
3045 case 0x8: /* addx, V9 addc */
3046 if (IS_IMM) {
3047 simm = GET_FIELDs(insn, 19, 31);
3048 if (xop & 0x10) {
3049 gen_helper_compute_psr();
3050 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3051 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3052 dc->cc_op = CC_OP_ADDX;
3053 } else {
3054 gen_helper_compute_psr();
3055 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3056 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3057 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3059 } else {
3060 if (xop & 0x10) {
3061 gen_helper_compute_psr();
3062 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3063 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3064 dc->cc_op = CC_OP_ADDX;
3065 } else {
3066 gen_helper_compute_psr();
3067 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3068 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3069 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3072 break;
3073 #ifdef TARGET_SPARC64
3074 case 0x9: /* V9 mulx */
3075 if (IS_IMM) {
3076 simm = GET_FIELDs(insn, 19, 31);
3077 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3078 } else {
3079 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3081 break;
3082 #endif
3083 case 0xa: /* umul */
3084 CHECK_IU_FEATURE(dc, MUL);
3085 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3086 if (xop & 0x10) {
3087 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3088 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3089 dc->cc_op = CC_OP_LOGIC;
3091 break;
3092 case 0xb: /* smul */
3093 CHECK_IU_FEATURE(dc, MUL);
3094 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3095 if (xop & 0x10) {
3096 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3097 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3098 dc->cc_op = CC_OP_LOGIC;
3100 break;
3101 case 0xc: /* subx, V9 subc */
3102 if (IS_IMM) {
3103 simm = GET_FIELDs(insn, 19, 31);
3104 if (xop & 0x10) {
3105 gen_helper_compute_psr();
3106 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3107 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3108 dc->cc_op = CC_OP_SUBX;
3109 } else {
3110 gen_helper_compute_psr();
3111 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3112 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3113 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3115 } else {
3116 if (xop & 0x10) {
3117 gen_helper_compute_psr();
3118 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3119 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3120 dc->cc_op = CC_OP_SUBX;
3121 } else {
3122 gen_helper_compute_psr();
3123 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3124 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3125 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3128 break;
3129 #ifdef TARGET_SPARC64
3130 case 0xd: /* V9 udivx */
3131 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3132 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3133 gen_trap_ifdivzero_tl(cpu_cc_src2);
3134 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3135 break;
3136 #endif
3137 case 0xe: /* udiv */
3138 CHECK_IU_FEATURE(dc, DIV);
3139 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3140 if (xop & 0x10) {
3141 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3142 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3143 dc->cc_op = CC_OP_DIV;
3145 break;
3146 case 0xf: /* sdiv */
3147 CHECK_IU_FEATURE(dc, DIV);
3148 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3149 if (xop & 0x10) {
3150 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3151 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3152 dc->cc_op = CC_OP_DIV;
3154 break;
3155 default:
3156 goto illegal_insn;
3158 gen_movl_TN_reg(rd, cpu_dst);
3159 } else {
3160 cpu_src1 = get_src1(insn, cpu_src1);
3161 cpu_src2 = get_src2(insn, cpu_src2);
3162 switch (xop) {
3163 case 0x20: /* taddcc */
3164 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3165 gen_movl_TN_reg(rd, cpu_dst);
3166 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3167 dc->cc_op = CC_OP_TADD;
3168 break;
3169 case 0x21: /* tsubcc */
3170 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3171 gen_movl_TN_reg(rd, cpu_dst);
3172 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3173 dc->cc_op = CC_OP_TSUB;
3174 break;
3175 case 0x22: /* taddcctv */
3176 save_state(dc, cpu_cond);
3177 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3178 gen_movl_TN_reg(rd, cpu_dst);
3179 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3180 dc->cc_op = CC_OP_TADDTV;
3181 break;
3182 case 0x23: /* tsubcctv */
3183 save_state(dc, cpu_cond);
3184 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3185 gen_movl_TN_reg(rd, cpu_dst);
3186 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3187 dc->cc_op = CC_OP_TSUBTV;
3188 break;
3189 case 0x24: /* mulscc */
3190 gen_helper_compute_psr();
3191 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3192 gen_movl_TN_reg(rd, cpu_dst);
3193 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3194 dc->cc_op = CC_OP_FLAGS;
3195 break;
3196 #ifndef TARGET_SPARC64
3197 case 0x25: /* sll */
3198 if (IS_IMM) { /* immediate */
3199 simm = GET_FIELDs(insn, 20, 31);
3200 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3201 } else { /* register */
3202 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3203 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3205 gen_movl_TN_reg(rd, cpu_dst);
3206 break;
3207 case 0x26: /* srl */
3208 if (IS_IMM) { /* immediate */
3209 simm = GET_FIELDs(insn, 20, 31);
3210 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3211 } else { /* register */
3212 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3213 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3215 gen_movl_TN_reg(rd, cpu_dst);
3216 break;
3217 case 0x27: /* sra */
3218 if (IS_IMM) { /* immediate */
3219 simm = GET_FIELDs(insn, 20, 31);
3220 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3221 } else { /* register */
3222 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3223 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3225 gen_movl_TN_reg(rd, cpu_dst);
3226 break;
3227 #endif
3228 case 0x30:
3230 switch(rd) {
3231 case 0: /* wry */
3232 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3233 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3234 break;
3235 #ifndef TARGET_SPARC64
3236 case 0x01 ... 0x0f: /* undefined in the
3237 SPARCv8 manual, nop
3238 on the microSPARC
3239 II */
3240 case 0x10 ... 0x1f: /* implementation-dependent
3241 in the SPARCv8
3242 manual, nop on the
3243 microSPARC II */
3244 break;
3245 #else
3246 case 0x2: /* V9 wrccr */
3247 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3248 gen_helper_wrccr(cpu_dst);
3249 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3250 dc->cc_op = CC_OP_FLAGS;
3251 break;
3252 case 0x3: /* V9 wrasi */
3253 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3254 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3255 break;
3256 case 0x6: /* V9 wrfprs */
3257 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3258 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3259 save_state(dc, cpu_cond);
3260 gen_op_next_insn();
3261 tcg_gen_exit_tb(0);
3262 dc->is_br = 1;
3263 break;
3264 case 0xf: /* V9 sir, nop if user */
3265 #if !defined(CONFIG_USER_ONLY)
3266 if (supervisor(dc))
3267 ; // XXX
3268 #endif
3269 break;
3270 case 0x13: /* Graphics Status */
3271 if (gen_trap_ifnofpu(dc, cpu_cond))
3272 goto jmp_insn;
3273 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3274 break;
3275 case 0x14: /* Softint set */
3276 if (!supervisor(dc))
3277 goto illegal_insn;
3278 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3279 gen_helper_set_softint(cpu_tmp64);
3280 break;
3281 case 0x15: /* Softint clear */
3282 if (!supervisor(dc))
3283 goto illegal_insn;
3284 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3285 gen_helper_clear_softint(cpu_tmp64);
3286 break;
3287 case 0x16: /* Softint write */
3288 if (!supervisor(dc))
3289 goto illegal_insn;
3290 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3291 gen_helper_write_softint(cpu_tmp64);
3292 break;
3293 case 0x17: /* Tick compare */
3294 #if !defined(CONFIG_USER_ONLY)
3295 if (!supervisor(dc))
3296 goto illegal_insn;
3297 #endif
3299 TCGv_ptr r_tickptr;
3301 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3302 cpu_src2);
3303 r_tickptr = tcg_temp_new_ptr();
3304 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3305 offsetof(CPUState, tick));
3306 gen_helper_tick_set_limit(r_tickptr,
3307 cpu_tick_cmpr);
3308 tcg_temp_free_ptr(r_tickptr);
3310 break;
3311 case 0x18: /* System tick */
3312 #if !defined(CONFIG_USER_ONLY)
3313 if (!supervisor(dc))
3314 goto illegal_insn;
3315 #endif
3317 TCGv_ptr r_tickptr;
3319 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3320 cpu_src2);
3321 r_tickptr = tcg_temp_new_ptr();
3322 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3323 offsetof(CPUState, stick));
3324 gen_helper_tick_set_count(r_tickptr,
3325 cpu_dst);
3326 tcg_temp_free_ptr(r_tickptr);
3328 break;
3329 case 0x19: /* System tick compare */
3330 #if !defined(CONFIG_USER_ONLY)
3331 if (!supervisor(dc))
3332 goto illegal_insn;
3333 #endif
3335 TCGv_ptr r_tickptr;
3337 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3338 cpu_src2);
3339 r_tickptr = tcg_temp_new_ptr();
3340 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3341 offsetof(CPUState, stick));
3342 gen_helper_tick_set_limit(r_tickptr,
3343 cpu_stick_cmpr);
3344 tcg_temp_free_ptr(r_tickptr);
3346 break;
3348 case 0x10: /* Performance Control */
3349 case 0x11: /* Performance Instrumentation
3350 Counter */
3351 case 0x12: /* Dispatch Control */
3352 #endif
3353 default:
3354 goto illegal_insn;
3357 break;
3358 #if !defined(CONFIG_USER_ONLY)
3359 case 0x31: /* wrpsr, V9 saved, restored */
3361 if (!supervisor(dc))
3362 goto priv_insn;
3363 #ifdef TARGET_SPARC64
3364 switch (rd) {
3365 case 0:
3366 gen_helper_saved();
3367 break;
3368 case 1:
3369 gen_helper_restored();
3370 break;
3371 case 2: /* UA2005 allclean */
3372 case 3: /* UA2005 otherw */
3373 case 4: /* UA2005 normalw */
3374 case 5: /* UA2005 invalw */
3375 // XXX
3376 default:
3377 goto illegal_insn;
3379 #else
3380 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3381 gen_helper_wrpsr(cpu_dst);
3382 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3383 dc->cc_op = CC_OP_FLAGS;
3384 save_state(dc, cpu_cond);
3385 gen_op_next_insn();
3386 tcg_gen_exit_tb(0);
3387 dc->is_br = 1;
3388 #endif
3390 break;
3391 case 0x32: /* wrwim, V9 wrpr */
3393 if (!supervisor(dc))
3394 goto priv_insn;
3395 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3396 #ifdef TARGET_SPARC64
3397 switch (rd) {
3398 case 0: // tpc
3400 TCGv_ptr r_tsptr;
3402 r_tsptr = tcg_temp_new_ptr();
3403 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3404 offsetof(CPUState, tsptr));
3405 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3406 offsetof(trap_state, tpc));
3407 tcg_temp_free_ptr(r_tsptr);
3409 break;
3410 case 1: // tnpc
3412 TCGv_ptr r_tsptr;
3414 r_tsptr = tcg_temp_new_ptr();
3415 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3416 offsetof(CPUState, tsptr));
3417 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3418 offsetof(trap_state, tnpc));
3419 tcg_temp_free_ptr(r_tsptr);
3421 break;
3422 case 2: // tstate
3424 TCGv_ptr r_tsptr;
3426 r_tsptr = tcg_temp_new_ptr();
3427 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3428 offsetof(CPUState, tsptr));
3429 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3430 offsetof(trap_state,
3431 tstate));
3432 tcg_temp_free_ptr(r_tsptr);
3434 break;
3435 case 3: // tt
3437 TCGv_ptr r_tsptr;
3439 r_tsptr = tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3441 offsetof(CPUState, tsptr));
3442 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3443 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3444 offsetof(trap_state, tt));
3445 tcg_temp_free_ptr(r_tsptr);
3447 break;
3448 case 4: // tick
3450 TCGv_ptr r_tickptr;
3452 r_tickptr = tcg_temp_new_ptr();
3453 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3454 offsetof(CPUState, tick));
3455 gen_helper_tick_set_count(r_tickptr,
3456 cpu_tmp0);
3457 tcg_temp_free_ptr(r_tickptr);
3459 break;
3460 case 5: // tba
3461 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3462 break;
3463 case 6: // pstate
3464 save_state(dc, cpu_cond);
3465 gen_helper_wrpstate(cpu_tmp0);
3466 gen_op_next_insn();
3467 tcg_gen_exit_tb(0);
3468 dc->is_br = 1;
3469 break;
3470 case 7: // tl
3471 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3472 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3473 offsetof(CPUSPARCState, tl));
3474 break;
3475 case 8: // pil
3476 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3477 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3478 offsetof(CPUSPARCState,
3479 psrpil));
3480 break;
3481 case 9: // cwp
3482 gen_helper_wrcwp(cpu_tmp0);
3483 break;
3484 case 10: // cansave
3485 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3486 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3487 offsetof(CPUSPARCState,
3488 cansave));
3489 break;
3490 case 11: // canrestore
3491 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3492 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3493 offsetof(CPUSPARCState,
3494 canrestore));
3495 break;
3496 case 12: // cleanwin
3497 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3498 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3499 offsetof(CPUSPARCState,
3500 cleanwin));
3501 break;
3502 case 13: // otherwin
3503 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3504 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3505 offsetof(CPUSPARCState,
3506 otherwin));
3507 break;
3508 case 14: // wstate
3509 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3510 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3511 offsetof(CPUSPARCState,
3512 wstate));
3513 break;
3514 case 16: // UA2005 gl
3515 CHECK_IU_FEATURE(dc, GL);
3516 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3517 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3518 offsetof(CPUSPARCState, gl));
3519 break;
3520 case 26: // UA2005 strand status
3521 CHECK_IU_FEATURE(dc, HYPV);
3522 if (!hypervisor(dc))
3523 goto priv_insn;
3524 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3525 break;
3526 default:
3527 goto illegal_insn;
3529 #else
3530 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3531 if (dc->def->nwindows != 32)
3532 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3533 (1 << dc->def->nwindows) - 1);
3534 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3535 #endif
3537 break;
3538 case 0x33: /* wrtbr, UA2005 wrhpr */
3540 #ifndef TARGET_SPARC64
3541 if (!supervisor(dc))
3542 goto priv_insn;
3543 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3544 #else
3545 CHECK_IU_FEATURE(dc, HYPV);
3546 if (!hypervisor(dc))
3547 goto priv_insn;
3548 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3549 switch (rd) {
3550 case 0: // hpstate
3551 // XXX gen_op_wrhpstate();
3552 save_state(dc, cpu_cond);
3553 gen_op_next_insn();
3554 tcg_gen_exit_tb(0);
3555 dc->is_br = 1;
3556 break;
3557 case 1: // htstate
3558 // XXX gen_op_wrhtstate();
3559 break;
3560 case 3: // hintp
3561 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3562 break;
3563 case 5: // htba
3564 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3565 break;
3566 case 31: // hstick_cmpr
3568 TCGv_ptr r_tickptr;
3570 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3571 r_tickptr = tcg_temp_new_ptr();
3572 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3573 offsetof(CPUState, hstick));
3574 gen_helper_tick_set_limit(r_tickptr,
3575 cpu_hstick_cmpr);
3576 tcg_temp_free_ptr(r_tickptr);
3578 break;
3579 case 6: // hver readonly
3580 default:
3581 goto illegal_insn;
3583 #endif
3585 break;
3586 #endif
3587 #ifdef TARGET_SPARC64
3588 case 0x2c: /* V9 movcc */
3590 int cc = GET_FIELD_SP(insn, 11, 12);
3591 int cond = GET_FIELD_SP(insn, 14, 17);
3592 TCGv r_cond;
3593 int l1;
3595 r_cond = tcg_temp_new();
3596 if (insn & (1 << 18)) {
3597 if (cc == 0)
3598 gen_cond(r_cond, 0, cond, dc);
3599 else if (cc == 2)
3600 gen_cond(r_cond, 1, cond, dc);
3601 else
3602 goto illegal_insn;
3603 } else {
3604 gen_fcond(r_cond, cc, cond);
3607 l1 = gen_new_label();
3609 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3610 if (IS_IMM) { /* immediate */
3611 TCGv r_const;
3613 simm = GET_FIELD_SPs(insn, 0, 10);
3614 r_const = tcg_const_tl(simm);
3615 gen_movl_TN_reg(rd, r_const);
3616 tcg_temp_free(r_const);
3617 } else {
3618 rs2 = GET_FIELD_SP(insn, 0, 4);
3619 gen_movl_reg_TN(rs2, cpu_tmp0);
3620 gen_movl_TN_reg(rd, cpu_tmp0);
3622 gen_set_label(l1);
3623 tcg_temp_free(r_cond);
3624 break;
3626 case 0x2d: /* V9 sdivx */
3627 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3628 gen_movl_TN_reg(rd, cpu_dst);
3629 break;
3630 case 0x2e: /* V9 popc */
3632 cpu_src2 = get_src2(insn, cpu_src2);
3633 gen_helper_popc(cpu_dst, cpu_src2);
3634 gen_movl_TN_reg(rd, cpu_dst);
3636 case 0x2f: /* V9 movr */
3638 int cond = GET_FIELD_SP(insn, 10, 12);
3639 int l1;
3641 cpu_src1 = get_src1(insn, cpu_src1);
3643 l1 = gen_new_label();
3645 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3646 cpu_src1, 0, l1);
3647 if (IS_IMM) { /* immediate */
3648 TCGv r_const;
3650 simm = GET_FIELD_SPs(insn, 0, 9);
3651 r_const = tcg_const_tl(simm);
3652 gen_movl_TN_reg(rd, r_const);
3653 tcg_temp_free(r_const);
3654 } else {
3655 rs2 = GET_FIELD_SP(insn, 0, 4);
3656 gen_movl_reg_TN(rs2, cpu_tmp0);
3657 gen_movl_TN_reg(rd, cpu_tmp0);
3659 gen_set_label(l1);
3660 break;
3662 #endif
3663 default:
3664 goto illegal_insn;
3667 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3668 #ifdef TARGET_SPARC64
3669 int opf = GET_FIELD_SP(insn, 5, 13);
3670 rs1 = GET_FIELD(insn, 13, 17);
3671 rs2 = GET_FIELD(insn, 27, 31);
3672 if (gen_trap_ifnofpu(dc, cpu_cond))
3673 goto jmp_insn;
3675 switch (opf) {
3676 case 0x000: /* VIS I edge8cc */
3677 case 0x001: /* VIS II edge8n */
3678 case 0x002: /* VIS I edge8lcc */
3679 case 0x003: /* VIS II edge8ln */
3680 case 0x004: /* VIS I edge16cc */
3681 case 0x005: /* VIS II edge16n */
3682 case 0x006: /* VIS I edge16lcc */
3683 case 0x007: /* VIS II edge16ln */
3684 case 0x008: /* VIS I edge32cc */
3685 case 0x009: /* VIS II edge32n */
3686 case 0x00a: /* VIS I edge32lcc */
3687 case 0x00b: /* VIS II edge32ln */
3688 // XXX
3689 goto illegal_insn;
3690 case 0x010: /* VIS I array8 */
3691 CHECK_FPU_FEATURE(dc, VIS1);
3692 cpu_src1 = get_src1(insn, cpu_src1);
3693 gen_movl_reg_TN(rs2, cpu_src2);
3694 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3695 gen_movl_TN_reg(rd, cpu_dst);
3696 break;
3697 case 0x012: /* VIS I array16 */
3698 CHECK_FPU_FEATURE(dc, VIS1);
3699 cpu_src1 = get_src1(insn, cpu_src1);
3700 gen_movl_reg_TN(rs2, cpu_src2);
3701 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3702 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3703 gen_movl_TN_reg(rd, cpu_dst);
3704 break;
3705 case 0x014: /* VIS I array32 */
3706 CHECK_FPU_FEATURE(dc, VIS1);
3707 cpu_src1 = get_src1(insn, cpu_src1);
3708 gen_movl_reg_TN(rs2, cpu_src2);
3709 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3710 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3711 gen_movl_TN_reg(rd, cpu_dst);
3712 break;
3713 case 0x018: /* VIS I alignaddr */
3714 CHECK_FPU_FEATURE(dc, VIS1);
3715 cpu_src1 = get_src1(insn, cpu_src1);
3716 gen_movl_reg_TN(rs2, cpu_src2);
3717 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3718 gen_movl_TN_reg(rd, cpu_dst);
3719 break;
3720 case 0x019: /* VIS II bmask */
3721 case 0x01a: /* VIS I alignaddrl */
3722 // XXX
3723 goto illegal_insn;
3724 case 0x020: /* VIS I fcmple16 */
3725 CHECK_FPU_FEATURE(dc, VIS1);
3726 gen_op_load_fpr_DT0(DFPREG(rs1));
3727 gen_op_load_fpr_DT1(DFPREG(rs2));
3728 gen_helper_fcmple16();
3729 gen_op_store_DT0_fpr(DFPREG(rd));
3730 break;
3731 case 0x022: /* VIS I fcmpne16 */
3732 CHECK_FPU_FEATURE(dc, VIS1);
3733 gen_op_load_fpr_DT0(DFPREG(rs1));
3734 gen_op_load_fpr_DT1(DFPREG(rs2));
3735 gen_helper_fcmpne16();
3736 gen_op_store_DT0_fpr(DFPREG(rd));
3737 break;
3738 case 0x024: /* VIS I fcmple32 */
3739 CHECK_FPU_FEATURE(dc, VIS1);
3740 gen_op_load_fpr_DT0(DFPREG(rs1));
3741 gen_op_load_fpr_DT1(DFPREG(rs2));
3742 gen_helper_fcmple32();
3743 gen_op_store_DT0_fpr(DFPREG(rd));
3744 break;
3745 case 0x026: /* VIS I fcmpne32 */
3746 CHECK_FPU_FEATURE(dc, VIS1);
3747 gen_op_load_fpr_DT0(DFPREG(rs1));
3748 gen_op_load_fpr_DT1(DFPREG(rs2));
3749 gen_helper_fcmpne32();
3750 gen_op_store_DT0_fpr(DFPREG(rd));
3751 break;
3752 case 0x028: /* VIS I fcmpgt16 */
3753 CHECK_FPU_FEATURE(dc, VIS1);
3754 gen_op_load_fpr_DT0(DFPREG(rs1));
3755 gen_op_load_fpr_DT1(DFPREG(rs2));
3756 gen_helper_fcmpgt16();
3757 gen_op_store_DT0_fpr(DFPREG(rd));
3758 break;
3759 case 0x02a: /* VIS I fcmpeq16 */
3760 CHECK_FPU_FEATURE(dc, VIS1);
3761 gen_op_load_fpr_DT0(DFPREG(rs1));
3762 gen_op_load_fpr_DT1(DFPREG(rs2));
3763 gen_helper_fcmpeq16();
3764 gen_op_store_DT0_fpr(DFPREG(rd));
3765 break;
3766 case 0x02c: /* VIS I fcmpgt32 */
3767 CHECK_FPU_FEATURE(dc, VIS1);
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3770 gen_helper_fcmpgt32();
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3772 break;
3773 case 0x02e: /* VIS I fcmpeq32 */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_op_load_fpr_DT0(DFPREG(rs1));
3776 gen_op_load_fpr_DT1(DFPREG(rs2));
3777 gen_helper_fcmpeq32();
3778 gen_op_store_DT0_fpr(DFPREG(rd));
3779 break;
3780 case 0x031: /* VIS I fmul8x16 */
3781 CHECK_FPU_FEATURE(dc, VIS1);
3782 gen_op_load_fpr_DT0(DFPREG(rs1));
3783 gen_op_load_fpr_DT1(DFPREG(rs2));
3784 gen_helper_fmul8x16();
3785 gen_op_store_DT0_fpr(DFPREG(rd));
3786 break;
3787 case 0x033: /* VIS I fmul8x16au */
3788 CHECK_FPU_FEATURE(dc, VIS1);
3789 gen_op_load_fpr_DT0(DFPREG(rs1));
3790 gen_op_load_fpr_DT1(DFPREG(rs2));
3791 gen_helper_fmul8x16au();
3792 gen_op_store_DT0_fpr(DFPREG(rd));
3793 break;
3794 case 0x035: /* VIS I fmul8x16al */
3795 CHECK_FPU_FEATURE(dc, VIS1);
3796 gen_op_load_fpr_DT0(DFPREG(rs1));
3797 gen_op_load_fpr_DT1(DFPREG(rs2));
3798 gen_helper_fmul8x16al();
3799 gen_op_store_DT0_fpr(DFPREG(rd));
3800 break;
3801 case 0x036: /* VIS I fmul8sux16 */
3802 CHECK_FPU_FEATURE(dc, VIS1);
3803 gen_op_load_fpr_DT0(DFPREG(rs1));
3804 gen_op_load_fpr_DT1(DFPREG(rs2));
3805 gen_helper_fmul8sux16();
3806 gen_op_store_DT0_fpr(DFPREG(rd));
3807 break;
3808 case 0x037: /* VIS I fmul8ulx16 */
3809 CHECK_FPU_FEATURE(dc, VIS1);
3810 gen_op_load_fpr_DT0(DFPREG(rs1));
3811 gen_op_load_fpr_DT1(DFPREG(rs2));
3812 gen_helper_fmul8ulx16();
3813 gen_op_store_DT0_fpr(DFPREG(rd));
3814 break;
3815 case 0x038: /* VIS I fmuld8sux16 */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 gen_op_load_fpr_DT0(DFPREG(rs1));
3818 gen_op_load_fpr_DT1(DFPREG(rs2));
3819 gen_helper_fmuld8sux16();
3820 gen_op_store_DT0_fpr(DFPREG(rd));
3821 break;
3822 case 0x039: /* VIS I fmuld8ulx16 */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 gen_op_load_fpr_DT0(DFPREG(rs1));
3825 gen_op_load_fpr_DT1(DFPREG(rs2));
3826 gen_helper_fmuld8ulx16();
3827 gen_op_store_DT0_fpr(DFPREG(rd));
3828 break;
3829 case 0x03a: /* VIS I fpack32 */
3830 case 0x03b: /* VIS I fpack16 */
3831 case 0x03d: /* VIS I fpackfix */
3832 case 0x03e: /* VIS I pdist */
3833 // XXX
3834 goto illegal_insn;
3835 case 0x048: /* VIS I faligndata */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 gen_helper_faligndata();
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3841 break;
3842 case 0x04b: /* VIS I fpmerge */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 gen_op_load_fpr_DT0(DFPREG(rs1));
3845 gen_op_load_fpr_DT1(DFPREG(rs2));
3846 gen_helper_fpmerge();
3847 gen_op_store_DT0_fpr(DFPREG(rd));
3848 break;
3849 case 0x04c: /* VIS II bshuffle */
3850 // XXX
3851 goto illegal_insn;
3852 case 0x04d: /* VIS I fexpand */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 gen_helper_fexpand();
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x050: /* VIS I fpadd16 */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 gen_op_load_fpr_DT0(DFPREG(rs1));
3862 gen_op_load_fpr_DT1(DFPREG(rs2));
3863 gen_helper_fpadd16();
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x051: /* VIS I fpadd16s */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 gen_helper_fpadd16s(cpu_fpr[rd],
3869 cpu_fpr[rs1], cpu_fpr[rs2]);
3870 break;
3871 case 0x052: /* VIS I fpadd32 */
3872 CHECK_FPU_FEATURE(dc, VIS1);
3873 gen_op_load_fpr_DT0(DFPREG(rs1));
3874 gen_op_load_fpr_DT1(DFPREG(rs2));
3875 gen_helper_fpadd32();
3876 gen_op_store_DT0_fpr(DFPREG(rd));
3877 break;
3878 case 0x053: /* VIS I fpadd32s */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 gen_helper_fpadd32s(cpu_fpr[rd],
3881 cpu_fpr[rs1], cpu_fpr[rs2]);
3882 break;
3883 case 0x054: /* VIS I fpsub16 */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 gen_op_load_fpr_DT0(DFPREG(rs1));
3886 gen_op_load_fpr_DT1(DFPREG(rs2));
3887 gen_helper_fpsub16();
3888 gen_op_store_DT0_fpr(DFPREG(rd));
3889 break;
3890 case 0x055: /* VIS I fpsub16s */
3891 CHECK_FPU_FEATURE(dc, VIS1);
3892 gen_helper_fpsub16s(cpu_fpr[rd],
3893 cpu_fpr[rs1], cpu_fpr[rs2]);
3894 break;
3895 case 0x056: /* VIS I fpsub32 */
3896 CHECK_FPU_FEATURE(dc, VIS1);
3897 gen_op_load_fpr_DT0(DFPREG(rs1));
3898 gen_op_load_fpr_DT1(DFPREG(rs2));
3899 gen_helper_fpsub32();
3900 gen_op_store_DT0_fpr(DFPREG(rd));
3901 break;
3902 case 0x057: /* VIS I fpsub32s */
3903 CHECK_FPU_FEATURE(dc, VIS1);
3904 gen_helper_fpsub32s(cpu_fpr[rd],
3905 cpu_fpr[rs1], cpu_fpr[rs2]);
3906 break;
3907 case 0x060: /* VIS I fzero */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3910 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3911 break;
3912 case 0x061: /* VIS I fzeros */
3913 CHECK_FPU_FEATURE(dc, VIS1);
3914 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3915 break;
3916 case 0x062: /* VIS I fnor */
3917 CHECK_FPU_FEATURE(dc, VIS1);
3918 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3919 cpu_fpr[DFPREG(rs2)]);
3920 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3921 cpu_fpr[DFPREG(rs2) + 1]);
3922 break;
3923 case 0x063: /* VIS I fnors */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3926 break;
3927 case 0x064: /* VIS I fandnot2 */
3928 CHECK_FPU_FEATURE(dc, VIS1);
3929 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3930 cpu_fpr[DFPREG(rs2)]);
3931 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3932 cpu_fpr[DFPREG(rs1) + 1],
3933 cpu_fpr[DFPREG(rs2) + 1]);
3934 break;
3935 case 0x065: /* VIS I fandnot2s */
3936 CHECK_FPU_FEATURE(dc, VIS1);
3937 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3938 break;
3939 case 0x066: /* VIS I fnot2 */
3940 CHECK_FPU_FEATURE(dc, VIS1);
3941 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3942 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3943 cpu_fpr[DFPREG(rs2) + 1]);
3944 break;
3945 case 0x067: /* VIS I fnot2s */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3948 break;
3949 case 0x068: /* VIS I fandnot1 */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3952 cpu_fpr[DFPREG(rs1)]);
3953 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3954 cpu_fpr[DFPREG(rs2) + 1],
3955 cpu_fpr[DFPREG(rs1) + 1]);
3956 break;
3957 case 0x069: /* VIS I fandnot1s */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3960 break;
3961 case 0x06a: /* VIS I fnot1 */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3964 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3965 cpu_fpr[DFPREG(rs1) + 1]);
3966 break;
3967 case 0x06b: /* VIS I fnot1s */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3970 break;
3971 case 0x06c: /* VIS I fxor */
3972 CHECK_FPU_FEATURE(dc, VIS1);
3973 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3974 cpu_fpr[DFPREG(rs2)]);
3975 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3976 cpu_fpr[DFPREG(rs1) + 1],
3977 cpu_fpr[DFPREG(rs2) + 1]);
3978 break;
3979 case 0x06d: /* VIS I fxors */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3982 break;
3983 case 0x06e: /* VIS I fnand */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3986 cpu_fpr[DFPREG(rs2)]);
3987 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3988 cpu_fpr[DFPREG(rs2) + 1]);
3989 break;
3990 case 0x06f: /* VIS I fnands */
3991 CHECK_FPU_FEATURE(dc, VIS1);
3992 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3993 break;
3994 case 0x070: /* VIS I fand */
3995 CHECK_FPU_FEATURE(dc, VIS1);
3996 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3997 cpu_fpr[DFPREG(rs2)]);
3998 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3999 cpu_fpr[DFPREG(rs1) + 1],
4000 cpu_fpr[DFPREG(rs2) + 1]);
4001 break;
4002 case 0x071: /* VIS I fands */
4003 CHECK_FPU_FEATURE(dc, VIS1);
4004 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4005 break;
4006 case 0x072: /* VIS I fxnor */
4007 CHECK_FPU_FEATURE(dc, VIS1);
4008 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4009 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4010 cpu_fpr[DFPREG(rs1)]);
4011 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4012 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4013 cpu_fpr[DFPREG(rs1) + 1]);
4014 break;
4015 case 0x073: /* VIS I fxnors */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4018 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4019 break;
4020 case 0x074: /* VIS I fsrc1 */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4023 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4024 cpu_fpr[DFPREG(rs1) + 1]);
4025 break;
4026 case 0x075: /* VIS I fsrc1s */
4027 CHECK_FPU_FEATURE(dc, VIS1);
4028 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4029 break;
4030 case 0x076: /* VIS I fornot2 */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4033 cpu_fpr[DFPREG(rs2)]);
4034 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4035 cpu_fpr[DFPREG(rs1) + 1],
4036 cpu_fpr[DFPREG(rs2) + 1]);
4037 break;
4038 case 0x077: /* VIS I fornot2s */
4039 CHECK_FPU_FEATURE(dc, VIS1);
4040 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4041 break;
4042 case 0x078: /* VIS I fsrc2 */
4043 CHECK_FPU_FEATURE(dc, VIS1);
4044 gen_op_load_fpr_DT0(DFPREG(rs2));
4045 gen_op_store_DT0_fpr(DFPREG(rd));
4046 break;
4047 case 0x079: /* VIS I fsrc2s */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4050 break;
4051 case 0x07a: /* VIS I fornot1 */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4054 cpu_fpr[DFPREG(rs1)]);
4055 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4056 cpu_fpr[DFPREG(rs2) + 1],
4057 cpu_fpr[DFPREG(rs1) + 1]);
4058 break;
4059 case 0x07b: /* VIS I fornot1s */
4060 CHECK_FPU_FEATURE(dc, VIS1);
4061 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4062 break;
4063 case 0x07c: /* VIS I for */
4064 CHECK_FPU_FEATURE(dc, VIS1);
4065 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4066 cpu_fpr[DFPREG(rs2)]);
4067 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4068 cpu_fpr[DFPREG(rs1) + 1],
4069 cpu_fpr[DFPREG(rs2) + 1]);
4070 break;
4071 case 0x07d: /* VIS I fors */
4072 CHECK_FPU_FEATURE(dc, VIS1);
4073 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4074 break;
4075 case 0x07e: /* VIS I fone */
4076 CHECK_FPU_FEATURE(dc, VIS1);
4077 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4078 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4079 break;
4080 case 0x07f: /* VIS I fones */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4083 break;
4084 case 0x080: /* VIS I shutdown */
4085 case 0x081: /* VIS II siam */
4086 // XXX
4087 goto illegal_insn;
4088 default:
4089 goto illegal_insn;
4091 #else
4092 goto ncp_insn;
4093 #endif
4094 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4095 #ifdef TARGET_SPARC64
4096 goto illegal_insn;
4097 #else
4098 goto ncp_insn;
4099 #endif
4100 #ifdef TARGET_SPARC64
4101 } else if (xop == 0x39) { /* V9 return */
4102 TCGv_i32 r_const;
4104 save_state(dc, cpu_cond);
4105 cpu_src1 = get_src1(insn, cpu_src1);
4106 if (IS_IMM) { /* immediate */
4107 simm = GET_FIELDs(insn, 19, 31);
4108 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4109 } else { /* register */
4110 rs2 = GET_FIELD(insn, 27, 31);
4111 if (rs2) {
4112 gen_movl_reg_TN(rs2, cpu_src2);
4113 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4114 } else
4115 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4117 gen_helper_restore();
4118 gen_mov_pc_npc(dc, cpu_cond);
4119 r_const = tcg_const_i32(3);
4120 gen_helper_check_align(cpu_dst, r_const);
4121 tcg_temp_free_i32(r_const);
4122 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4123 dc->npc = DYNAMIC_PC;
4124 goto jmp_insn;
4125 #endif
4126 } else {
4127 cpu_src1 = get_src1(insn, cpu_src1);
4128 if (IS_IMM) { /* immediate */
4129 simm = GET_FIELDs(insn, 19, 31);
4130 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4131 } else { /* register */
4132 rs2 = GET_FIELD(insn, 27, 31);
4133 if (rs2) {
4134 gen_movl_reg_TN(rs2, cpu_src2);
4135 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4136 } else
4137 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4139 switch (xop) {
4140 case 0x38: /* jmpl */
4142 TCGv r_pc;
4143 TCGv_i32 r_const;
4145 r_pc = tcg_const_tl(dc->pc);
4146 gen_movl_TN_reg(rd, r_pc);
4147 tcg_temp_free(r_pc);
4148 gen_mov_pc_npc(dc, cpu_cond);
4149 r_const = tcg_const_i32(3);
4150 gen_helper_check_align(cpu_dst, r_const);
4151 tcg_temp_free_i32(r_const);
4152 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4153 dc->npc = DYNAMIC_PC;
4155 goto jmp_insn;
4156 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4157 case 0x39: /* rett, V9 return */
4159 TCGv_i32 r_const;
4161 if (!supervisor(dc))
4162 goto priv_insn;
4163 gen_mov_pc_npc(dc, cpu_cond);
4164 r_const = tcg_const_i32(3);
4165 gen_helper_check_align(cpu_dst, r_const);
4166 tcg_temp_free_i32(r_const);
4167 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4168 dc->npc = DYNAMIC_PC;
4169 gen_helper_rett();
4171 goto jmp_insn;
4172 #endif
4173 case 0x3b: /* flush */
4174 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4175 goto unimp_flush;
4176 gen_helper_flush(cpu_dst);
4177 break;
4178 case 0x3c: /* save */
4179 save_state(dc, cpu_cond);
4180 gen_helper_save();
4181 gen_movl_TN_reg(rd, cpu_dst);
4182 break;
4183 case 0x3d: /* restore */
4184 save_state(dc, cpu_cond);
4185 gen_helper_restore();
4186 gen_movl_TN_reg(rd, cpu_dst);
4187 break;
4188 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4189 case 0x3e: /* V9 done/retry */
4191 switch (rd) {
4192 case 0:
4193 if (!supervisor(dc))
4194 goto priv_insn;
4195 dc->npc = DYNAMIC_PC;
4196 dc->pc = DYNAMIC_PC;
4197 gen_helper_done();
4198 goto jmp_insn;
4199 case 1:
4200 if (!supervisor(dc))
4201 goto priv_insn;
4202 dc->npc = DYNAMIC_PC;
4203 dc->pc = DYNAMIC_PC;
4204 gen_helper_retry();
4205 goto jmp_insn;
4206 default:
4207 goto illegal_insn;
4210 break;
4211 #endif
4212 default:
4213 goto illegal_insn;
4216 break;
4218 break;
4219 case 3: /* load/store instructions */
4221 unsigned int xop = GET_FIELD(insn, 7, 12);
4223 cpu_src1 = get_src1(insn, cpu_src1);
4224 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4225 rs2 = GET_FIELD(insn, 27, 31);
4226 gen_movl_reg_TN(rs2, cpu_src2);
4227 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4228 } else if (IS_IMM) { /* immediate */
4229 simm = GET_FIELDs(insn, 19, 31);
4230 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4231 } else { /* register */
4232 rs2 = GET_FIELD(insn, 27, 31);
4233 if (rs2 != 0) {
4234 gen_movl_reg_TN(rs2, cpu_src2);
4235 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4236 } else
4237 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4239 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4240 (xop > 0x17 && xop <= 0x1d ) ||
4241 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4242 switch (xop) {
4243 case 0x0: /* ld, V9 lduw, load unsigned word */
4244 gen_address_mask(dc, cpu_addr);
4245 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4246 break;
4247 case 0x1: /* ldub, load unsigned byte */
4248 gen_address_mask(dc, cpu_addr);
4249 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4250 break;
4251 case 0x2: /* lduh, load unsigned halfword */
4252 gen_address_mask(dc, cpu_addr);
4253 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4254 break;
4255 case 0x3: /* ldd, load double word */
4256 if (rd & 1)
4257 goto illegal_insn;
4258 else {
4259 TCGv_i32 r_const;
4261 save_state(dc, cpu_cond);
4262 r_const = tcg_const_i32(7);
4263 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4264 tcg_temp_free_i32(r_const);
4265 gen_address_mask(dc, cpu_addr);
4266 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4267 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4268 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4269 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4270 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4271 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4272 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4274 break;
4275 case 0x9: /* ldsb, load signed byte */
4276 gen_address_mask(dc, cpu_addr);
4277 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4278 break;
4279 case 0xa: /* ldsh, load signed halfword */
4280 gen_address_mask(dc, cpu_addr);
4281 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4282 break;
4283 case 0xd: /* ldstub -- XXX: should be atomically */
4285 TCGv r_const;
4287 gen_address_mask(dc, cpu_addr);
4288 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4289 r_const = tcg_const_tl(0xff);
4290 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4291 tcg_temp_free(r_const);
4293 break;
4294 case 0x0f: /* swap, swap register with memory. Also
4295 atomically */
4296 CHECK_IU_FEATURE(dc, SWAP);
4297 gen_movl_reg_TN(rd, cpu_val);
4298 gen_address_mask(dc, cpu_addr);
4299 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4300 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4301 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4302 break;
4303 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4304 case 0x10: /* lda, V9 lduwa, load word alternate */
4305 #ifndef TARGET_SPARC64
4306 if (IS_IMM)
4307 goto illegal_insn;
4308 if (!supervisor(dc))
4309 goto priv_insn;
4310 #endif
4311 save_state(dc, cpu_cond);
4312 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4313 break;
4314 case 0x11: /* lduba, load unsigned byte alternate */
4315 #ifndef TARGET_SPARC64
4316 if (IS_IMM)
4317 goto illegal_insn;
4318 if (!supervisor(dc))
4319 goto priv_insn;
4320 #endif
4321 save_state(dc, cpu_cond);
4322 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4323 break;
4324 case 0x12: /* lduha, load unsigned halfword alternate */
4325 #ifndef TARGET_SPARC64
4326 if (IS_IMM)
4327 goto illegal_insn;
4328 if (!supervisor(dc))
4329 goto priv_insn;
4330 #endif
4331 save_state(dc, cpu_cond);
4332 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4333 break;
4334 case 0x13: /* ldda, load double word alternate */
4335 #ifndef TARGET_SPARC64
4336 if (IS_IMM)
4337 goto illegal_insn;
4338 if (!supervisor(dc))
4339 goto priv_insn;
4340 #endif
4341 if (rd & 1)
4342 goto illegal_insn;
4343 save_state(dc, cpu_cond);
4344 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4345 goto skip_move;
4346 case 0x19: /* ldsba, load signed byte alternate */
4347 #ifndef TARGET_SPARC64
4348 if (IS_IMM)
4349 goto illegal_insn;
4350 if (!supervisor(dc))
4351 goto priv_insn;
4352 #endif
4353 save_state(dc, cpu_cond);
4354 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4355 break;
4356 case 0x1a: /* ldsha, load signed halfword alternate */
4357 #ifndef TARGET_SPARC64
4358 if (IS_IMM)
4359 goto illegal_insn;
4360 if (!supervisor(dc))
4361 goto priv_insn;
4362 #endif
4363 save_state(dc, cpu_cond);
4364 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4365 break;
4366 case 0x1d: /* ldstuba -- XXX: should be atomically */
4367 #ifndef TARGET_SPARC64
4368 if (IS_IMM)
4369 goto illegal_insn;
4370 if (!supervisor(dc))
4371 goto priv_insn;
4372 #endif
4373 save_state(dc, cpu_cond);
4374 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4375 break;
4376 case 0x1f: /* swapa, swap reg with alt. memory. Also
4377 atomically */
4378 CHECK_IU_FEATURE(dc, SWAP);
4379 #ifndef TARGET_SPARC64
4380 if (IS_IMM)
4381 goto illegal_insn;
4382 if (!supervisor(dc))
4383 goto priv_insn;
4384 #endif
4385 save_state(dc, cpu_cond);
4386 gen_movl_reg_TN(rd, cpu_val);
4387 gen_swap_asi(cpu_val, cpu_addr, insn);
4388 break;
4390 #ifndef TARGET_SPARC64
4391 case 0x30: /* ldc */
4392 case 0x31: /* ldcsr */
4393 case 0x33: /* lddc */
4394 goto ncp_insn;
4395 #endif
4396 #endif
4397 #ifdef TARGET_SPARC64
4398 case 0x08: /* V9 ldsw */
4399 gen_address_mask(dc, cpu_addr);
4400 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4401 break;
4402 case 0x0b: /* V9 ldx */
4403 gen_address_mask(dc, cpu_addr);
4404 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4405 break;
4406 case 0x18: /* V9 ldswa */
4407 save_state(dc, cpu_cond);
4408 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4409 break;
4410 case 0x1b: /* V9 ldxa */
4411 save_state(dc, cpu_cond);
4412 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4413 break;
4414 case 0x2d: /* V9 prefetch, no effect */
4415 goto skip_move;
4416 case 0x30: /* V9 ldfa */
4417 save_state(dc, cpu_cond);
4418 gen_ldf_asi(cpu_addr, insn, 4, rd);
4419 goto skip_move;
4420 case 0x33: /* V9 lddfa */
4421 save_state(dc, cpu_cond);
4422 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4423 goto skip_move;
4424 case 0x3d: /* V9 prefetcha, no effect */
4425 goto skip_move;
4426 case 0x32: /* V9 ldqfa */
4427 CHECK_FPU_FEATURE(dc, FLOAT128);
4428 save_state(dc, cpu_cond);
4429 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4430 goto skip_move;
4431 #endif
4432 default:
4433 goto illegal_insn;
4435 gen_movl_TN_reg(rd, cpu_val);
4436 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4437 skip_move: ;
4438 #endif
4439 } else if (xop >= 0x20 && xop < 0x24) {
4440 if (gen_trap_ifnofpu(dc, cpu_cond))
4441 goto jmp_insn;
4442 save_state(dc, cpu_cond);
4443 switch (xop) {
4444 case 0x20: /* ldf, load fpreg */
4445 gen_address_mask(dc, cpu_addr);
4446 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4447 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4448 break;
4449 case 0x21: /* ldfsr, V9 ldxfsr */
4450 #ifdef TARGET_SPARC64
4451 gen_address_mask(dc, cpu_addr);
4452 if (rd == 1) {
4453 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4454 gen_helper_ldxfsr(cpu_tmp64);
4455 } else
4456 #else
4458 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4459 gen_helper_ldfsr(cpu_tmp32);
4461 #endif
4462 break;
4463 case 0x22: /* ldqf, load quad fpreg */
4465 TCGv_i32 r_const;
4467 CHECK_FPU_FEATURE(dc, FLOAT128);
4468 r_const = tcg_const_i32(dc->mem_idx);
4469 gen_helper_ldqf(cpu_addr, r_const);
4470 tcg_temp_free_i32(r_const);
4471 gen_op_store_QT0_fpr(QFPREG(rd));
4473 break;
4474 case 0x23: /* lddf, load double fpreg */
4476 TCGv_i32 r_const;
4478 r_const = tcg_const_i32(dc->mem_idx);
4479 gen_helper_lddf(cpu_addr, r_const);
4480 tcg_temp_free_i32(r_const);
4481 gen_op_store_DT0_fpr(DFPREG(rd));
4483 break;
4484 default:
4485 goto illegal_insn;
4487 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4488 xop == 0xe || xop == 0x1e) {
4489 gen_movl_reg_TN(rd, cpu_val);
4490 switch (xop) {
4491 case 0x4: /* st, store word */
4492 gen_address_mask(dc, cpu_addr);
4493 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4494 break;
4495 case 0x5: /* stb, store byte */
4496 gen_address_mask(dc, cpu_addr);
4497 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4498 break;
4499 case 0x6: /* sth, store halfword */
4500 gen_address_mask(dc, cpu_addr);
4501 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4502 break;
4503 case 0x7: /* std, store double word */
4504 if (rd & 1)
4505 goto illegal_insn;
4506 else {
4507 TCGv_i32 r_const;
4509 save_state(dc, cpu_cond);
4510 gen_address_mask(dc, cpu_addr);
4511 r_const = tcg_const_i32(7);
4512 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4513 tcg_temp_free_i32(r_const);
4514 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4515 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4516 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4518 break;
4519 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4520 case 0x14: /* sta, V9 stwa, store word alternate */
4521 #ifndef TARGET_SPARC64
4522 if (IS_IMM)
4523 goto illegal_insn;
4524 if (!supervisor(dc))
4525 goto priv_insn;
4526 #endif
4527 save_state(dc, cpu_cond);
4528 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4529 break;
4530 case 0x15: /* stba, store byte alternate */
4531 #ifndef TARGET_SPARC64
4532 if (IS_IMM)
4533 goto illegal_insn;
4534 if (!supervisor(dc))
4535 goto priv_insn;
4536 #endif
4537 save_state(dc, cpu_cond);
4538 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4539 break;
4540 case 0x16: /* stha, store halfword alternate */
4541 #ifndef TARGET_SPARC64
4542 if (IS_IMM)
4543 goto illegal_insn;
4544 if (!supervisor(dc))
4545 goto priv_insn;
4546 #endif
4547 save_state(dc, cpu_cond);
4548 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4549 break;
4550 case 0x17: /* stda, store double word alternate */
4551 #ifndef TARGET_SPARC64
4552 if (IS_IMM)
4553 goto illegal_insn;
4554 if (!supervisor(dc))
4555 goto priv_insn;
4556 #endif
4557 if (rd & 1)
4558 goto illegal_insn;
4559 else {
4560 save_state(dc, cpu_cond);
4561 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4563 break;
4564 #endif
4565 #ifdef TARGET_SPARC64
4566 case 0x0e: /* V9 stx */
4567 gen_address_mask(dc, cpu_addr);
4568 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4569 break;
4570 case 0x1e: /* V9 stxa */
4571 save_state(dc, cpu_cond);
4572 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4573 break;
4574 #endif
4575 default:
4576 goto illegal_insn;
4578 } else if (xop > 0x23 && xop < 0x28) {
4579 if (gen_trap_ifnofpu(dc, cpu_cond))
4580 goto jmp_insn;
4581 save_state(dc, cpu_cond);
4582 switch (xop) {
4583 case 0x24: /* stf, store fpreg */
4584 gen_address_mask(dc, cpu_addr);
4585 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4586 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4587 break;
4588 case 0x25: /* stfsr, V9 stxfsr */
4589 #ifdef TARGET_SPARC64
4590 gen_address_mask(dc, cpu_addr);
4591 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4592 if (rd == 1)
4593 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4594 else
4595 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4596 #else
4597 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4598 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4599 #endif
4600 break;
4601 case 0x26:
4602 #ifdef TARGET_SPARC64
4603 /* V9 stqf, store quad fpreg */
4605 TCGv_i32 r_const;
4607 CHECK_FPU_FEATURE(dc, FLOAT128);
4608 gen_op_load_fpr_QT0(QFPREG(rd));
4609 r_const = tcg_const_i32(dc->mem_idx);
4610 gen_helper_stqf(cpu_addr, r_const);
4611 tcg_temp_free_i32(r_const);
4613 break;
4614 #else /* !TARGET_SPARC64 */
4615 /* stdfq, store floating point queue */
4616 #if defined(CONFIG_USER_ONLY)
4617 goto illegal_insn;
4618 #else
4619 if (!supervisor(dc))
4620 goto priv_insn;
4621 if (gen_trap_ifnofpu(dc, cpu_cond))
4622 goto jmp_insn;
4623 goto nfq_insn;
4624 #endif
4625 #endif
4626 case 0x27: /* stdf, store double fpreg */
4628 TCGv_i32 r_const;
4630 gen_op_load_fpr_DT0(DFPREG(rd));
4631 r_const = tcg_const_i32(dc->mem_idx);
4632 gen_helper_stdf(cpu_addr, r_const);
4633 tcg_temp_free_i32(r_const);
4635 break;
4636 default:
4637 goto illegal_insn;
4639 } else if (xop > 0x33 && xop < 0x3f) {
4640 save_state(dc, cpu_cond);
4641 switch (xop) {
4642 #ifdef TARGET_SPARC64
4643 case 0x34: /* V9 stfa */
4644 gen_stf_asi(cpu_addr, insn, 4, rd);
4645 break;
4646 case 0x36: /* V9 stqfa */
4648 TCGv_i32 r_const;
4650 CHECK_FPU_FEATURE(dc, FLOAT128);
4651 r_const = tcg_const_i32(7);
4652 gen_helper_check_align(cpu_addr, r_const);
4653 tcg_temp_free_i32(r_const);
4654 gen_op_load_fpr_QT0(QFPREG(rd));
4655 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4657 break;
4658 case 0x37: /* V9 stdfa */
4659 gen_op_load_fpr_DT0(DFPREG(rd));
4660 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4661 break;
4662 case 0x3c: /* V9 casa */
4663 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4664 gen_movl_TN_reg(rd, cpu_val);
4665 break;
4666 case 0x3e: /* V9 casxa */
4667 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4668 gen_movl_TN_reg(rd, cpu_val);
4669 break;
4670 #else
4671 case 0x34: /* stc */
4672 case 0x35: /* stcsr */
4673 case 0x36: /* stdcq */
4674 case 0x37: /* stdc */
4675 goto ncp_insn;
4676 #endif
4677 default:
4678 goto illegal_insn;
4680 } else
4681 goto illegal_insn;
4683 break;
4685 /* default case for non jump instructions */
4686 if (dc->npc == DYNAMIC_PC) {
4687 dc->pc = DYNAMIC_PC;
4688 gen_op_next_insn();
4689 } else if (dc->npc == JUMP_PC) {
4690 /* we can do a static jump */
4691 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4692 dc->is_br = 1;
4693 } else {
4694 dc->pc = dc->npc;
4695 dc->npc = dc->npc + 4;
4697 jmp_insn:
4698 return;
4699 illegal_insn:
4701 TCGv_i32 r_const;
4703 save_state(dc, cpu_cond);
4704 r_const = tcg_const_i32(TT_ILL_INSN);
4705 gen_helper_raise_exception(r_const);
4706 tcg_temp_free_i32(r_const);
4707 dc->is_br = 1;
4709 return;
4710 unimp_flush:
4712 TCGv_i32 r_const;
4714 save_state(dc, cpu_cond);
4715 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4716 gen_helper_raise_exception(r_const);
4717 tcg_temp_free_i32(r_const);
4718 dc->is_br = 1;
4720 return;
4721 #if !defined(CONFIG_USER_ONLY)
4722 priv_insn:
4724 TCGv_i32 r_const;
4726 save_state(dc, cpu_cond);
4727 r_const = tcg_const_i32(TT_PRIV_INSN);
4728 gen_helper_raise_exception(r_const);
4729 tcg_temp_free_i32(r_const);
4730 dc->is_br = 1;
4732 return;
4733 #endif
4734 nfpu_insn:
4735 save_state(dc, cpu_cond);
4736 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4737 dc->is_br = 1;
4738 return;
4739 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4740 nfq_insn:
4741 save_state(dc, cpu_cond);
4742 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4743 dc->is_br = 1;
4744 return;
4745 #endif
4746 #ifndef TARGET_SPARC64
4747 ncp_insn:
4749 TCGv r_const;
4751 save_state(dc, cpu_cond);
4752 r_const = tcg_const_i32(TT_NCP_INSN);
4753 gen_helper_raise_exception(r_const);
4754 tcg_temp_free(r_const);
4755 dc->is_br = 1;
4757 return;
4758 #endif
4761 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4762 int spc, CPUSPARCState *env)
4764 target_ulong pc_start, last_pc;
4765 uint16_t *gen_opc_end;
4766 DisasContext dc1, *dc = &dc1;
4767 CPUBreakpoint *bp;
4768 int j, lj = -1;
4769 int num_insns;
4770 int max_insns;
4772 memset(dc, 0, sizeof(DisasContext));
4773 dc->tb = tb;
4774 pc_start = tb->pc;
4775 dc->pc = pc_start;
4776 last_pc = dc->pc;
4777 dc->npc = (target_ulong) tb->cs_base;
4778 dc->cc_op = CC_OP_DYNAMIC;
4779 dc->mem_idx = cpu_mmu_index(env);
4780 dc->def = env->def;
4781 if ((dc->def->features & CPU_FEATURE_FLOAT))
4782 dc->fpu_enabled = cpu_fpu_enabled(env);
4783 else
4784 dc->fpu_enabled = 0;
4785 #ifdef TARGET_SPARC64
4786 dc->address_mask_32bit = env->pstate & PS_AM;
4787 #endif
4788 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4790 cpu_tmp0 = tcg_temp_new();
4791 cpu_tmp32 = tcg_temp_new_i32();
4792 cpu_tmp64 = tcg_temp_new_i64();
4794 cpu_dst = tcg_temp_local_new();
4796 // loads and stores
4797 cpu_val = tcg_temp_local_new();
4798 cpu_addr = tcg_temp_local_new();
4800 num_insns = 0;
4801 max_insns = tb->cflags & CF_COUNT_MASK;
4802 if (max_insns == 0)
4803 max_insns = CF_COUNT_MASK;
4804 gen_icount_start();
4805 do {
4806 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4807 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4808 if (bp->pc == dc->pc) {
4809 if (dc->pc != pc_start)
4810 save_state(dc, cpu_cond);
4811 gen_helper_debug();
4812 tcg_gen_exit_tb(0);
4813 dc->is_br = 1;
4814 goto exit_gen_loop;
4818 if (spc) {
4819 qemu_log("Search PC...\n");
4820 j = gen_opc_ptr - gen_opc_buf;
4821 if (lj < j) {
4822 lj++;
4823 while (lj < j)
4824 gen_opc_instr_start[lj++] = 0;
4825 gen_opc_pc[lj] = dc->pc;
4826 gen_opc_npc[lj] = dc->npc;
4827 gen_opc_instr_start[lj] = 1;
4828 gen_opc_icount[lj] = num_insns;
4831 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4832 gen_io_start();
4833 last_pc = dc->pc;
4834 disas_sparc_insn(dc);
4835 num_insns++;
4837 if (dc->is_br)
4838 break;
4839 /* if the next PC is different, we abort now */
4840 if (dc->pc != (last_pc + 4))
4841 break;
4842 /* if we reach a page boundary, we stop generation so that the
4843 PC of a TT_TFAULT exception is always in the right page */
4844 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4845 break;
4846 /* if single step mode, we generate only one instruction and
4847 generate an exception */
4848 if (env->singlestep_enabled || singlestep) {
4849 tcg_gen_movi_tl(cpu_pc, dc->pc);
4850 tcg_gen_exit_tb(0);
4851 break;
4853 } while ((gen_opc_ptr < gen_opc_end) &&
4854 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4855 num_insns < max_insns);
4857 exit_gen_loop:
4858 tcg_temp_free(cpu_addr);
4859 tcg_temp_free(cpu_val);
4860 tcg_temp_free(cpu_dst);
4861 tcg_temp_free_i64(cpu_tmp64);
4862 tcg_temp_free_i32(cpu_tmp32);
4863 tcg_temp_free(cpu_tmp0);
4864 if (tb->cflags & CF_LAST_IO)
4865 gen_io_end();
4866 if (!dc->is_br) {
4867 if (dc->pc != DYNAMIC_PC &&
4868 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4869 /* static PC and NPC: we can use direct chaining */
4870 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4871 } else {
4872 if (dc->pc != DYNAMIC_PC)
4873 tcg_gen_movi_tl(cpu_pc, dc->pc);
4874 save_npc(dc, cpu_cond);
4875 tcg_gen_exit_tb(0);
4878 gen_icount_end(tb, num_insns);
4879 *gen_opc_ptr = INDEX_op_end;
4880 if (spc) {
4881 j = gen_opc_ptr - gen_opc_buf;
4882 lj++;
4883 while (lj <= j)
4884 gen_opc_instr_start[lj++] = 0;
4885 #if 0
4886 log_page_dump();
4887 #endif
4888 gen_opc_jump_pc[0] = dc->jump_pc[0];
4889 gen_opc_jump_pc[1] = dc->jump_pc[1];
4890 } else {
4891 tb->size = last_pc + 4 - pc_start;
4892 tb->icount = num_insns;
4894 #ifdef DEBUG_DISAS
4895 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4896 qemu_log("--------------\n");
4897 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4898 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4899 qemu_log("\n");
4901 #endif
4904 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4906 gen_intermediate_code_internal(tb, 0, env);
4909 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4911 gen_intermediate_code_internal(tb, 1, env);
4914 void gen_intermediate_code_init(CPUSPARCState *env)
4916 unsigned int i;
4917 static int inited;
4918 static const char * const gregnames[8] = {
4919 NULL, // g0 not used
4920 "g1",
4921 "g2",
4922 "g3",
4923 "g4",
4924 "g5",
4925 "g6",
4926 "g7",
4928 static const char * const fregnames[64] = {
4929 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4930 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4931 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4932 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4933 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4934 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4935 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4936 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4939 /* init various static tables */
4940 if (!inited) {
4941 inited = 1;
4943 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4944 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4945 offsetof(CPUState, regwptr),
4946 "regwptr");
4947 #ifdef TARGET_SPARC64
4948 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4949 "xcc");
4950 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4951 "asi");
4952 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4953 "fprs");
4954 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4955 "gsr");
4956 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4957 offsetof(CPUState, tick_cmpr),
4958 "tick_cmpr");
4959 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4960 offsetof(CPUState, stick_cmpr),
4961 "stick_cmpr");
4962 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4963 offsetof(CPUState, hstick_cmpr),
4964 "hstick_cmpr");
4965 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4966 "hintp");
4967 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4968 "htba");
4969 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4970 "hver");
4971 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4972 offsetof(CPUState, ssr), "ssr");
4973 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4974 offsetof(CPUState, version), "ver");
4975 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4976 offsetof(CPUState, softint),
4977 "softint");
4978 #else
4979 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4980 "wim");
4981 #endif
4982 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4983 "cond");
4984 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4985 "cc_src");
4986 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4987 offsetof(CPUState, cc_src2),
4988 "cc_src2");
4989 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4990 "cc_dst");
4991 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4992 "cc_op");
4993 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4994 "psr");
4995 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4996 "fsr");
4997 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4998 "pc");
4999 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5000 "npc");
5001 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5002 #ifndef CONFIG_USER_ONLY
5003 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5004 "tbr");
5005 #endif
5006 for (i = 1; i < 8; i++)
5007 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5008 offsetof(CPUState, gregs[i]),
5009 gregnames[i]);
5010 for (i = 0; i < TARGET_FPREGS; i++)
5011 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5012 offsetof(CPUState, fpr[i]),
5013 fregnames[i]);
5015 /* register helpers */
5017 #define GEN_HELPER 2
5018 #include "helper.h"
5022 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5023 unsigned long searched_pc, int pc_pos, void *puc)
5025 target_ulong npc;
5026 env->pc = gen_opc_pc[pc_pos];
5027 npc = gen_opc_npc[pc_pos];
5028 if (npc == 1) {
5029 /* dynamic NPC: already stored */
5030 } else if (npc == 2) {
5031 target_ulong t2 = (target_ulong)(unsigned long)puc;
5032 /* jump PC: use T2 and the jump targets of the translation */
5033 if (t2)
5034 env->npc = gen_opc_jump_pc[0];
5035 else
5036 env->npc = gen_opc_jump_pc[1];
5037 } else {
5038 env->npc = npc;