Simplified some dead extended arith code after search and replace.
[qemu/qemu-JZ.git] / target-sparc / translate.c
blob05f23de130a0588f9f6b84bd664f0f57805ed752
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 TODO-list:
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
30 #include <stdarg.h>
31 #include <stdlib.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <inttypes.h>
36 #include "cpu.h"
37 #include "exec-all.h"
38 #include "disas.h"
39 #include "helper.h"
40 #include "tcg-op.h"
42 #define DEBUG_DISAS
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_dst, cpu_psr;
50 static TCGv cpu_gregs[8];
51 #ifdef TARGET_SPARC64
52 static TCGv cpu_xcc;
53 #endif
54 /* local register indexes (only used inside old micro ops) */
55 static TCGv cpu_tmp0;
57 typedef struct DisasContext {
58 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
59 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
60 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
61 int is_br;
62 int mem_idx;
63 int fpu_enabled;
64 struct TranslationBlock *tb;
65 } DisasContext;
67 typedef struct sparc_def_t sparc_def_t;
69 struct sparc_def_t {
70 const unsigned char *name;
71 target_ulong iu_version;
72 uint32_t fpu_version;
73 uint32_t mmu_version;
74 uint32_t mmu_bm;
75 uint32_t mmu_ctpr_mask;
76 uint32_t mmu_cxr_mask;
77 uint32_t mmu_sfsr_mask;
78 uint32_t mmu_trcr_mask;
81 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
83 extern FILE *logfile;
84 extern int loglevel;
86 // This function uses non-native bit order
87 #define GET_FIELD(X, FROM, TO) \
88 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90 // This function uses the order in the manuals, i.e. bit 0 is 2^0
91 #define GET_FIELD_SP(X, FROM, TO) \
92 GET_FIELD(X, 31 - (TO), 31 - (FROM))
94 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
95 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97 #ifdef TARGET_SPARC64
98 #define FFPREG(r) (r)
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define FFPREG(r) (r)
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
107 static int sign_extend(int x, int len)
109 len = 32 - len;
110 return (x << len) >> len;
113 #define IS_IMM (insn & (1<<13))
115 static void disas_sparc_insn(DisasContext * dc);
117 #ifdef TARGET_SPARC64
118 #define GEN32(func, NAME) \
119 static GenOpFunc * const NAME ## _table [64] = { \
120 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
121 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
122 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
123 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
124 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
125 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
126 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
127 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
128 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
129 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
130 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
131 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
132 }; \
133 static inline void func(int n) \
135 NAME ## _table[n](); \
137 #else
138 #define GEN32(func, NAME) \
139 static GenOpFunc *const NAME ## _table [32] = { \
140 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
141 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
142 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
143 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
144 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
145 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
146 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
147 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
148 }; \
149 static inline void func(int n) \
151 NAME ## _table[n](); \
153 #endif
155 /* floating point registers moves */
156 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
157 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
158 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
159 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
161 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
162 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
163 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
164 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
166 #if defined(CONFIG_USER_ONLY)
167 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
168 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
169 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
170 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
171 #endif
173 /* moves */
174 #ifdef CONFIG_USER_ONLY
175 #define supervisor(dc) 0
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) 0
178 #endif
179 #define gen_op_ldst(name) gen_op_##name##_raw()
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #define OP_LD_TABLE(width) \
185 static GenOpFunc * const gen_op_##width[] = { \
186 &gen_op_##width##_user, \
187 &gen_op_##width##_kernel, \
188 &gen_op_##width##_hypv, \
190 #else
191 #define OP_LD_TABLE(width) \
192 static GenOpFunc * const gen_op_##width[] = { \
193 &gen_op_##width##_user, \
194 &gen_op_##width##_kernel, \
196 #endif
197 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
198 #endif
200 #ifndef CONFIG_USER_ONLY
201 #ifdef __i386__
202 OP_LD_TABLE(std);
203 #endif /* __i386__ */
204 OP_LD_TABLE(stf);
205 OP_LD_TABLE(stdf);
206 OP_LD_TABLE(ldf);
207 OP_LD_TABLE(lddf);
208 #endif
210 #ifdef TARGET_ABI32
211 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
212 #else
213 #define ABI32_MASK(addr)
214 #endif
216 static inline void gen_movl_simm_T1(int32_t val)
218 tcg_gen_movi_tl(cpu_T[1], val);
221 static inline void gen_movl_reg_TN(int reg, TCGv tn)
223 if (reg == 0)
224 tcg_gen_movi_tl(tn, 0);
225 else if (reg < 8)
226 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
227 else {
228 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
232 static inline void gen_movl_reg_T0(int reg)
234 gen_movl_reg_TN(reg, cpu_T[0]);
237 static inline void gen_movl_reg_T1(int reg)
239 gen_movl_reg_TN(reg, cpu_T[1]);
242 #ifdef __i386__
243 static inline void gen_movl_reg_T2(int reg)
245 gen_movl_reg_TN(reg, cpu_T[2]);
248 #endif /* __i386__ */
249 static inline void gen_movl_TN_reg(int reg, TCGv tn)
251 if (reg == 0)
252 return;
253 else if (reg < 8)
254 tcg_gen_mov_tl(cpu_gregs[reg], tn);
255 else {
256 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
260 static inline void gen_movl_T0_reg(int reg)
262 gen_movl_TN_reg(reg, cpu_T[0]);
265 static inline void gen_movl_T1_reg(int reg)
267 gen_movl_TN_reg(reg, cpu_T[1]);
270 static inline void gen_op_movl_T0_env(size_t offset)
272 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
275 static inline void gen_op_movl_env_T0(size_t offset)
277 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
280 static inline void gen_op_movtl_T0_env(size_t offset)
282 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
285 static inline void gen_op_movtl_env_T0(size_t offset)
287 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
290 static inline void gen_op_add_T1_T0(void)
292 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
295 static inline void gen_op_or_T1_T0(void)
297 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
300 static inline void gen_op_xor_T1_T0(void)
302 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
305 static inline void gen_jmp_im(target_ulong pc)
307 tcg_gen_movi_tl(cpu_tmp0, pc);
308 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
311 static inline void gen_movl_npc_im(target_ulong npc)
313 tcg_gen_movi_tl(cpu_tmp0, npc);
314 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
317 static inline void gen_goto_tb(DisasContext *s, int tb_num,
318 target_ulong pc, target_ulong npc)
320 TranslationBlock *tb;
322 tb = s->tb;
323 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
324 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
325 /* jump to same page: we can use a direct jump */
326 tcg_gen_goto_tb(tb_num);
327 gen_jmp_im(pc);
328 gen_movl_npc_im(npc);
329 tcg_gen_exit_tb((long)tb + tb_num);
330 } else {
331 /* jump to another page: currently not optimized */
332 gen_jmp_im(pc);
333 gen_movl_npc_im(npc);
334 tcg_gen_exit_tb(0);
338 // XXX suboptimal
339 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
341 tcg_gen_shri_i32(reg, src, 23);
342 tcg_gen_andi_tl(reg, reg, 0x1);
345 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
347 tcg_gen_shri_i32(reg, src, 22);
348 tcg_gen_andi_tl(reg, reg, 0x1);
351 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
353 tcg_gen_shri_i32(reg, src, 21);
354 tcg_gen_andi_tl(reg, reg, 0x1);
357 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
359 tcg_gen_shri_i32(reg, src, 20);
360 tcg_gen_andi_tl(reg, reg, 0x1);
363 static inline void gen_op_exception(int exception)
365 TCGv r_except;
367 r_except = tcg_temp_new(TCG_TYPE_I32);
368 tcg_gen_movi_i32(r_except, exception);
369 tcg_gen_helper_0_1(raise_exception, r_except);
372 static inline void gen_cc_clear(void)
374 tcg_gen_movi_i32(cpu_psr, 0);
375 #ifdef TARGET_SPARC64
376 tcg_gen_movi_i32(cpu_xcc, 0);
377 #endif
380 /* old op:
381 if (!T0)
382 env->psr |= PSR_ZERO;
383 if ((int32_t) T0 < 0)
384 env->psr |= PSR_NEG;
386 static inline void gen_cc_NZ(TCGv dst)
388 int l1, l2;
389 TCGv r_zero;
391 l1 = gen_new_label();
392 l2 = gen_new_label();
393 r_zero = tcg_const_tl(0);
394 tcg_gen_brcond_i32(TCG_COND_NE, dst, r_zero, l1);
395 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
396 gen_set_label(l1);
397 tcg_gen_brcond_i32(TCG_COND_GE, dst, r_zero, l2);
398 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
399 gen_set_label(l2);
400 #ifdef TARGET_SPARC64
402 int l3, l4;
404 l3 = gen_new_label();
405 l4 = gen_new_label();
406 tcg_gen_brcond_tl(TCG_COND_NE, dst, r_zero, l3);
407 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
408 gen_set_label(l3);
409 tcg_gen_brcond_tl(TCG_COND_GE, dst, r_zero, l4);
410 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
411 gen_set_label(l4);
413 #endif
416 /* old op:
417 if (T0 < src1)
418 env->psr |= PSR_CARRY;
420 static inline void gen_cc_C_add(TCGv dst, TCGv src1)
422 int l1;
424 l1 = gen_new_label();
425 tcg_gen_brcond_i32(TCG_COND_GEU, dst, src1, l1);
426 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
427 gen_set_label(l1);
428 #ifdef TARGET_SPARC64
430 int l2;
432 l2 = gen_new_label();
433 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l2);
434 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
435 gen_set_label(l2);
437 #endif
440 /* old op:
441 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
442 env->psr |= PSR_OVF;
444 static inline void gen_cc_V_add(TCGv dst, TCGv src1, TCGv src2)
446 TCGv r_temp, r_temp2, r_temp3, r_zero;
447 int l1;
449 l1 = gen_new_label();
451 r_temp = tcg_temp_new(TCG_TYPE_TL);
452 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
453 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
454 r_zero = tcg_const_tl(0);
455 tcg_gen_xor_tl(r_temp, src1, src2);
456 tcg_gen_xori_tl(r_temp, r_temp, -1);
457 tcg_gen_xor_tl(r_temp2, src1, dst);
458 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
459 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
460 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
461 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
462 gen_set_label(l1);
463 #ifdef TARGET_SPARC64
465 int l2;
467 l2 = gen_new_label();
468 tcg_gen_xor_tl(r_temp, src1, src2);
469 tcg_gen_xori_tl(r_temp, r_temp, -1);
470 tcg_gen_xor_tl(r_temp2, src1, dst);
471 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
472 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
473 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
474 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
475 gen_set_label(l2);
477 #endif
480 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
482 TCGv r_temp, r_temp2, r_temp3, r_zero;
483 int l1;
485 l1 = gen_new_label();
487 r_temp = tcg_temp_new(TCG_TYPE_TL);
488 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
489 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
490 r_zero = tcg_const_tl(0);
491 tcg_gen_xor_tl(r_temp, src1, src2);
492 tcg_gen_xori_tl(r_temp, r_temp, -1);
493 tcg_gen_xor_tl(r_temp2, src1, dst);
494 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
495 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
496 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
497 gen_op_exception(TT_TOVF);
498 gen_set_label(l1);
499 #ifdef TARGET_SPARC64
501 int l2;
503 l2 = gen_new_label();
504 tcg_gen_xor_tl(r_temp, src1, src2);
505 tcg_gen_xori_tl(r_temp, r_temp, -1);
506 tcg_gen_xor_tl(r_temp2, src1, dst);
507 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
508 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
509 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
510 gen_op_exception(TT_TOVF);
511 gen_set_label(l2);
513 #endif
516 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
518 int l1;
519 TCGv r_zero, r_temp;
521 l1 = gen_new_label();
522 r_zero = tcg_const_tl(0);
523 r_temp = tcg_temp_new(TCG_TYPE_TL);
524 tcg_gen_or_tl(r_temp, src1, src2);
525 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
526 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
527 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
528 gen_set_label(l1);
531 static inline void gen_tag_tv(TCGv src1, TCGv src2)
533 int l1;
534 TCGv r_zero, r_temp;
536 l1 = gen_new_label();
537 r_zero = tcg_const_tl(0);
538 r_temp = tcg_temp_new(TCG_TYPE_TL);
539 tcg_gen_or_tl(r_temp, src1, src2);
540 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
541 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
542 gen_op_exception(TT_TOVF);
543 gen_set_label(l1);
546 static inline void gen_op_add_T1_T0_cc(void)
548 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
549 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
550 gen_cc_clear();
551 gen_cc_NZ(cpu_T[0]);
552 gen_cc_C_add(cpu_T[0], cpu_cc_src);
553 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
556 static inline void gen_op_addx_T1_T0_cc(void)
558 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
559 gen_mov_reg_C(cpu_tmp0, cpu_psr);
560 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
561 gen_cc_clear();
562 gen_cc_C_add(cpu_T[0], cpu_cc_src);
563 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
564 gen_cc_C_add(cpu_T[0], cpu_cc_src);
565 gen_cc_NZ(cpu_T[0]);
566 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
569 static inline void gen_op_tadd_T1_T0_cc(void)
571 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
572 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
573 gen_cc_clear();
574 gen_cc_NZ(cpu_T[0]);
575 gen_cc_C_add(cpu_T[0], cpu_cc_src);
576 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
577 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
580 static inline void gen_op_tadd_T1_T0_ccTV(void)
582 gen_tag_tv(cpu_T[0], cpu_T[1]);
583 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
584 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
585 gen_add_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
586 gen_cc_clear();
587 gen_cc_NZ(cpu_T[0]);
588 gen_cc_C_add(cpu_T[0], cpu_cc_src);
591 /* old op:
592 if (src1 < T1)
593 env->psr |= PSR_CARRY;
595 static inline void gen_cc_C_sub(TCGv src1, TCGv src2)
597 int l1;
599 l1 = gen_new_label();
600 tcg_gen_brcond_i32(TCG_COND_GEU, src1, src2, l1);
601 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
602 gen_set_label(l1);
603 #ifdef TARGET_SPARC64
605 int l2;
607 l2 = gen_new_label();
608 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l2);
609 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
610 gen_set_label(l2);
612 #endif
615 /* old op:
616 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
617 env->psr |= PSR_OVF;
619 static inline void gen_cc_V_sub(TCGv dst, TCGv src1, TCGv src2)
621 TCGv r_temp, r_temp2, r_temp3, r_zero;
622 int l1;
624 l1 = gen_new_label();
626 r_temp = tcg_temp_new(TCG_TYPE_TL);
627 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
628 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
629 r_zero = tcg_const_tl(0);
630 tcg_gen_xor_tl(r_temp, src1, src2);
631 tcg_gen_xor_tl(r_temp2, src1, dst);
632 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
633 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
634 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
635 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
636 gen_set_label(l1);
637 #ifdef TARGET_SPARC64
639 int l2;
641 l2 = gen_new_label();
642 tcg_gen_xor_tl(r_temp, src1, src2);
643 tcg_gen_xor_tl(r_temp2, src1, dst);
644 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
645 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
646 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
647 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
648 gen_set_label(l2);
650 #endif
653 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
655 TCGv r_temp, r_temp2, r_temp3, r_zero;
656 int l1;
658 l1 = gen_new_label();
660 r_temp = tcg_temp_new(TCG_TYPE_TL);
661 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
662 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
663 r_zero = tcg_const_tl(0);
664 tcg_gen_xor_tl(r_temp, src1, src2);
665 tcg_gen_xor_tl(r_temp2, src1, dst);
666 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
667 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
668 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
669 gen_op_exception(TT_TOVF);
670 gen_set_label(l1);
671 #ifdef TARGET_SPARC64
673 int l2;
675 l2 = gen_new_label();
676 tcg_gen_xor_tl(r_temp, src1, src2);
677 tcg_gen_xor_tl(r_temp2, src1, dst);
678 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
679 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
680 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
681 gen_op_exception(TT_TOVF);
682 gen_set_label(l2);
684 #endif
687 static inline void gen_op_sub_T1_T0_cc(void)
689 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
690 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
691 gen_cc_clear();
692 gen_cc_NZ(cpu_T[0]);
693 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
694 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
697 static inline void gen_op_subx_T1_T0_cc(void)
699 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
700 gen_mov_reg_C(cpu_tmp0, cpu_psr);
701 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
702 gen_cc_clear();
703 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
704 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
705 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
706 gen_cc_NZ(cpu_T[0]);
707 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
710 static inline void gen_op_tsub_T1_T0_cc(void)
712 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
713 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
714 gen_cc_clear();
715 gen_cc_NZ(cpu_T[0]);
716 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
717 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
718 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
721 static inline void gen_op_tsub_T1_T0_ccTV(void)
723 gen_tag_tv(cpu_T[0], cpu_T[1]);
724 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
725 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
726 gen_sub_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
727 gen_cc_clear();
728 gen_cc_NZ(cpu_T[0]);
729 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
732 #ifdef TARGET_SPARC64
733 static inline void gen_trap_ifdivzero_i64(TCGv divisor)
735 int l1;
737 l1 = gen_new_label();
738 tcg_gen_brcond_i64(TCG_COND_NE, divisor, tcg_const_tl(0), l1);
739 gen_op_exception(TT_DIV_ZERO);
740 gen_set_label(l1);
743 static inline void gen_op_sdivx_T1_T0(void)
745 int l1, l2;
747 l1 = gen_new_label();
748 l2 = gen_new_label();
749 gen_trap_ifdivzero_i64(cpu_T[1]);
750 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[0], tcg_const_i64(INT64_MIN), l1);
751 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[1], tcg_const_i64(-1), l1);
752 tcg_gen_movi_i64(cpu_T[0], INT64_MIN);
753 gen_op_jmp_label(l2);
754 gen_set_label(l1);
755 tcg_gen_div_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
756 gen_set_label(l2);
758 #endif
760 static inline void gen_op_div_cc(void)
762 int l1;
763 TCGv r_zero;
765 gen_cc_clear();
766 gen_cc_NZ(cpu_T[0]);
767 l1 = gen_new_label();
768 r_zero = tcg_const_tl(0);
769 tcg_gen_brcond_i32(TCG_COND_EQ, cpu_T[1], r_zero, l1);
770 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
771 gen_set_label(l1);
774 static inline void gen_op_logic_T0_cc(void)
776 gen_cc_clear();
777 gen_cc_NZ(cpu_T[0]);
780 // 1
781 static inline void gen_op_eval_ba(TCGv dst)
783 tcg_gen_movi_tl(dst, 1);
786 // Z
787 static inline void gen_op_eval_be(TCGv dst, TCGv src)
789 gen_mov_reg_Z(dst, src);
792 // Z | (N ^ V)
793 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
795 TCGv r_flag;
797 r_flag = tcg_temp_new(TCG_TYPE_TL);
798 gen_mov_reg_N(r_flag, src);
799 gen_mov_reg_V(dst, src);
800 tcg_gen_xor_tl(dst, dst, r_flag);
801 gen_mov_reg_Z(r_flag, src);
802 tcg_gen_or_tl(dst, dst, r_flag);
805 // N ^ V
806 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
808 TCGv r_V;
810 r_V = tcg_temp_new(TCG_TYPE_TL);
811 gen_mov_reg_V(r_V, src);
812 gen_mov_reg_N(dst, src);
813 tcg_gen_xor_tl(dst, dst, r_V);
816 // C | Z
817 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
819 TCGv r_Z;
821 r_Z = tcg_temp_new(TCG_TYPE_TL);
822 gen_mov_reg_Z(r_Z, src);
823 gen_mov_reg_C(dst, src);
824 tcg_gen_or_tl(dst, dst, r_Z);
827 // C
828 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
830 gen_mov_reg_C(dst, src);
833 // V
834 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
836 gen_mov_reg_V(dst, src);
839 // 0
840 static inline void gen_op_eval_bn(TCGv dst)
842 tcg_gen_movi_tl(dst, 0);
845 // N
846 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
848 gen_mov_reg_N(dst, src);
851 // !Z
852 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
854 gen_mov_reg_Z(dst, src);
855 tcg_gen_xori_tl(dst, dst, 0x1);
858 // !(Z | (N ^ V))
859 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
861 TCGv r_flag;
863 r_flag = tcg_temp_new(TCG_TYPE_TL);
864 gen_mov_reg_N(r_flag, src);
865 gen_mov_reg_V(dst, src);
866 tcg_gen_xor_tl(dst, dst, r_flag);
867 gen_mov_reg_Z(r_flag, src);
868 tcg_gen_or_tl(dst, dst, r_flag);
869 tcg_gen_xori_tl(dst, dst, 0x1);
872 // !(N ^ V)
873 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
875 TCGv r_V;
877 r_V = tcg_temp_new(TCG_TYPE_TL);
878 gen_mov_reg_V(r_V, src);
879 gen_mov_reg_N(dst, src);
880 tcg_gen_xor_tl(dst, dst, r_V);
881 tcg_gen_xori_tl(dst, dst, 0x1);
884 // !(C | Z)
885 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
887 TCGv r_Z;
889 r_Z = tcg_temp_new(TCG_TYPE_TL);
890 gen_mov_reg_Z(r_Z, src);
891 gen_mov_reg_C(dst, src);
892 tcg_gen_or_tl(dst, dst, r_Z);
893 tcg_gen_xori_tl(dst, dst, 0x1);
896 // !C
897 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
899 gen_mov_reg_C(dst, src);
900 tcg_gen_xori_tl(dst, dst, 0x1);
903 // !N
904 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
906 gen_mov_reg_N(dst, src);
907 tcg_gen_xori_tl(dst, dst, 0x1);
910 // !V
911 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
913 gen_mov_reg_V(dst, src);
914 tcg_gen_xori_tl(dst, dst, 0x1);
918 FPSR bit field FCC1 | FCC0:
922 3 unordered
924 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
925 unsigned int fcc_offset)
927 tcg_gen_shri_i32(reg, src, 10 + fcc_offset);
928 tcg_gen_andi_tl(reg, reg, 0x1);
931 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
932 unsigned int fcc_offset)
934 tcg_gen_shri_i32(reg, src, 11 + fcc_offset);
935 tcg_gen_andi_tl(reg, reg, 0x1);
938 // !0: FCC0 | FCC1
939 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
940 unsigned int fcc_offset)
942 TCGv r_fcc1;
944 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
945 gen_mov_reg_FCC0(dst, src, fcc_offset);
946 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
947 tcg_gen_or_tl(dst, dst, r_fcc1);
950 // 1 or 2: FCC0 ^ FCC1
951 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
952 unsigned int fcc_offset)
954 TCGv r_fcc1;
956 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
957 gen_mov_reg_FCC0(dst, src, fcc_offset);
958 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
959 tcg_gen_xor_tl(dst, dst, r_fcc1);
962 // 1 or 3: FCC0
963 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
969 // 1: FCC0 & !FCC1
970 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
971 unsigned int fcc_offset)
973 TCGv r_fcc1;
975 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
978 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
979 tcg_gen_and_tl(dst, dst, r_fcc1);
982 // 2 or 3: FCC1
983 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
986 gen_mov_reg_FCC1(dst, src, fcc_offset);
989 // 2: !FCC0 & FCC1
990 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
991 unsigned int fcc_offset)
993 TCGv r_fcc1;
995 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
996 gen_mov_reg_FCC0(dst, src, fcc_offset);
997 tcg_gen_xori_tl(dst, dst, 0x1);
998 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
999 tcg_gen_and_tl(dst, dst, r_fcc1);
1002 // 3: FCC0 & FCC1
1003 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1004 unsigned int fcc_offset)
1006 TCGv r_fcc1;
1008 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1009 gen_mov_reg_FCC0(dst, src, fcc_offset);
1010 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1011 tcg_gen_and_tl(dst, dst, r_fcc1);
1014 // 0: !(FCC0 | FCC1)
1015 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1016 unsigned int fcc_offset)
1018 TCGv r_fcc1;
1020 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1021 gen_mov_reg_FCC0(dst, src, fcc_offset);
1022 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1023 tcg_gen_or_tl(dst, dst, r_fcc1);
1024 tcg_gen_xori_tl(dst, dst, 0x1);
1027 // 0 or 3: !(FCC0 ^ FCC1)
1028 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1029 unsigned int fcc_offset)
1031 TCGv r_fcc1;
1033 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1034 gen_mov_reg_FCC0(dst, src, fcc_offset);
1035 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1036 tcg_gen_xor_tl(dst, dst, r_fcc1);
1037 tcg_gen_xori_tl(dst, dst, 0x1);
1040 // 0 or 2: !FCC0
1041 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1042 unsigned int fcc_offset)
1044 gen_mov_reg_FCC0(dst, src, fcc_offset);
1045 tcg_gen_xori_tl(dst, dst, 0x1);
1048 // !1: !(FCC0 & !FCC1)
1049 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1050 unsigned int fcc_offset)
1052 TCGv r_fcc1;
1054 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1055 gen_mov_reg_FCC0(dst, src, fcc_offset);
1056 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1057 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
1058 tcg_gen_and_tl(dst, dst, r_fcc1);
1059 tcg_gen_xori_tl(dst, dst, 0x1);
1062 // 0 or 1: !FCC1
1063 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1064 unsigned int fcc_offset)
1066 gen_mov_reg_FCC1(dst, src, fcc_offset);
1067 tcg_gen_xori_tl(dst, dst, 0x1);
1070 // !2: !(!FCC0 & FCC1)
1071 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1072 unsigned int fcc_offset)
1074 TCGv r_fcc1;
1076 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1077 gen_mov_reg_FCC0(dst, src, fcc_offset);
1078 tcg_gen_xori_tl(dst, dst, 0x1);
1079 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1080 tcg_gen_and_tl(dst, dst, r_fcc1);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1084 // !3: !(FCC0 & FCC1)
1085 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 TCGv r_fcc1;
1090 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1091 gen_mov_reg_FCC0(dst, src, fcc_offset);
1092 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1093 tcg_gen_and_tl(dst, dst, r_fcc1);
1094 tcg_gen_xori_tl(dst, dst, 0x1);
1097 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1098 target_ulong pc2, TCGv r_cond)
1100 TCGv r_zero;
1101 int l1;
1103 l1 = gen_new_label();
1104 r_zero = tcg_const_tl(0);
1106 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1108 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1110 gen_set_label(l1);
1111 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1114 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1115 target_ulong pc2, TCGv r_cond)
1117 TCGv r_zero;
1118 int l1;
1120 l1 = gen_new_label();
1121 r_zero = tcg_const_tl(0);
1123 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1125 gen_goto_tb(dc, 0, pc2, pc1);
1127 gen_set_label(l1);
1128 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1131 static inline void gen_branch(DisasContext *dc, target_ulong pc,
1132 target_ulong npc)
1134 gen_goto_tb(dc, 0, pc, npc);
1137 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1138 TCGv r_cond)
1140 TCGv r_zero;
1141 int l1, l2;
1143 l1 = gen_new_label();
1144 l2 = gen_new_label();
1145 r_zero = tcg_const_tl(0);
1147 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1149 gen_movl_npc_im(npc1);
1150 gen_op_jmp_label(l2);
1152 gen_set_label(l1);
1153 gen_movl_npc_im(npc2);
1154 gen_set_label(l2);
1157 /* call this function before using T2 as it may have been set for a jump */
1158 static inline void flush_T2(DisasContext * dc)
1160 if (dc->npc == JUMP_PC) {
1161 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1162 dc->npc = DYNAMIC_PC;
1166 static inline void save_npc(DisasContext * dc)
1168 if (dc->npc == JUMP_PC) {
1169 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1170 dc->npc = DYNAMIC_PC;
1171 } else if (dc->npc != DYNAMIC_PC) {
1172 gen_movl_npc_im(dc->npc);
1176 static inline void save_state(DisasContext * dc)
1178 gen_jmp_im(dc->pc);
1179 save_npc(dc);
1182 static inline void gen_mov_pc_npc(DisasContext * dc)
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1186 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1187 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1188 dc->pc = DYNAMIC_PC;
1189 } else if (dc->npc == DYNAMIC_PC) {
1190 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1191 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1192 dc->pc = DYNAMIC_PC;
1193 } else {
1194 dc->pc = dc->npc;
1198 static inline void gen_op_next_insn(void)
1200 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1201 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1202 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, 4);
1203 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1206 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1208 TCGv r_src;
1210 #ifdef TARGET_SPARC64
1211 if (cc)
1212 r_src = cpu_xcc;
1213 else
1214 r_src = cpu_psr;
1215 #else
1216 r_src = cpu_psr;
1217 #endif
1218 switch (cond) {
1219 case 0x0:
1220 gen_op_eval_bn(r_dst);
1221 break;
1222 case 0x1:
1223 gen_op_eval_be(r_dst, r_src);
1224 break;
1225 case 0x2:
1226 gen_op_eval_ble(r_dst, r_src);
1227 break;
1228 case 0x3:
1229 gen_op_eval_bl(r_dst, r_src);
1230 break;
1231 case 0x4:
1232 gen_op_eval_bleu(r_dst, r_src);
1233 break;
1234 case 0x5:
1235 gen_op_eval_bcs(r_dst, r_src);
1236 break;
1237 case 0x6:
1238 gen_op_eval_bneg(r_dst, r_src);
1239 break;
1240 case 0x7:
1241 gen_op_eval_bvs(r_dst, r_src);
1242 break;
1243 case 0x8:
1244 gen_op_eval_ba(r_dst);
1245 break;
1246 case 0x9:
1247 gen_op_eval_bne(r_dst, r_src);
1248 break;
1249 case 0xa:
1250 gen_op_eval_bg(r_dst, r_src);
1251 break;
1252 case 0xb:
1253 gen_op_eval_bge(r_dst, r_src);
1254 break;
1255 case 0xc:
1256 gen_op_eval_bgu(r_dst, r_src);
1257 break;
1258 case 0xd:
1259 gen_op_eval_bcc(r_dst, r_src);
1260 break;
1261 case 0xe:
1262 gen_op_eval_bpos(r_dst, r_src);
1263 break;
1264 case 0xf:
1265 gen_op_eval_bvc(r_dst, r_src);
1266 break;
1270 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1272 TCGv r_src;
1273 unsigned int offset;
1275 r_src = tcg_temp_new(TCG_TYPE_TL);
1276 tcg_gen_ld_tl(r_src, cpu_env, offsetof(CPUSPARCState, fsr));
1278 switch (cc) {
1279 default:
1280 case 0x0:
1281 offset = 0;
1282 break;
1283 case 0x1:
1284 offset = 32 - 10;
1285 break;
1286 case 0x2:
1287 offset = 34 - 10;
1288 break;
1289 case 0x3:
1290 offset = 36 - 10;
1291 break;
1294 switch (cond) {
1295 case 0x0:
1296 gen_op_eval_bn(r_dst);
1297 break;
1298 case 0x1:
1299 gen_op_eval_fbne(r_dst, r_src, offset);
1300 break;
1301 case 0x2:
1302 gen_op_eval_fblg(r_dst, r_src, offset);
1303 break;
1304 case 0x3:
1305 gen_op_eval_fbul(r_dst, r_src, offset);
1306 break;
1307 case 0x4:
1308 gen_op_eval_fbl(r_dst, r_src, offset);
1309 break;
1310 case 0x5:
1311 gen_op_eval_fbug(r_dst, r_src, offset);
1312 break;
1313 case 0x6:
1314 gen_op_eval_fbg(r_dst, r_src, offset);
1315 break;
1316 case 0x7:
1317 gen_op_eval_fbu(r_dst, r_src, offset);
1318 break;
1319 case 0x8:
1320 gen_op_eval_ba(r_dst);
1321 break;
1322 case 0x9:
1323 gen_op_eval_fbe(r_dst, r_src, offset);
1324 break;
1325 case 0xa:
1326 gen_op_eval_fbue(r_dst, r_src, offset);
1327 break;
1328 case 0xb:
1329 gen_op_eval_fbge(r_dst, r_src, offset);
1330 break;
1331 case 0xc:
1332 gen_op_eval_fbuge(r_dst, r_src, offset);
1333 break;
1334 case 0xd:
1335 gen_op_eval_fble(r_dst, r_src, offset);
1336 break;
1337 case 0xe:
1338 gen_op_eval_fbule(r_dst, r_src, offset);
1339 break;
1340 case 0xf:
1341 gen_op_eval_fbo(r_dst, r_src, offset);
1342 break;
1346 #ifdef TARGET_SPARC64
1347 // Inverted logic
1348 static const int gen_tcg_cond_reg[8] = {
1350 TCG_COND_NE,
1351 TCG_COND_GT,
1352 TCG_COND_GE,
1354 TCG_COND_EQ,
1355 TCG_COND_LE,
1356 TCG_COND_LT,
1359 static inline void gen_cond_reg(TCGv r_dst, int cond)
1361 TCGv r_zero;
1362 int l1;
1364 l1 = gen_new_label();
1365 r_zero = tcg_const_tl(0);
1366 tcg_gen_mov_tl(r_dst, r_zero);
1367 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1368 tcg_gen_movi_tl(r_dst, 1);
1369 gen_set_label(l1);
1371 #endif
1373 /* XXX: potentially incorrect if dynamic npc */
1374 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1376 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1377 target_ulong target = dc->pc + offset;
1379 if (cond == 0x0) {
1380 /* unconditional not taken */
1381 if (a) {
1382 dc->pc = dc->npc + 4;
1383 dc->npc = dc->pc + 4;
1384 } else {
1385 dc->pc = dc->npc;
1386 dc->npc = dc->pc + 4;
1388 } else if (cond == 0x8) {
1389 /* unconditional taken */
1390 if (a) {
1391 dc->pc = target;
1392 dc->npc = dc->pc + 4;
1393 } else {
1394 dc->pc = dc->npc;
1395 dc->npc = target;
1397 } else {
1398 flush_T2(dc);
1399 gen_cond(cpu_T[2], cc, cond);
1400 if (a) {
1401 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1402 dc->is_br = 1;
1403 } else {
1404 dc->pc = dc->npc;
1405 dc->jump_pc[0] = target;
1406 dc->jump_pc[1] = dc->npc + 4;
1407 dc->npc = JUMP_PC;
1412 /* XXX: potentially incorrect if dynamic npc */
1413 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1415 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1416 target_ulong target = dc->pc + offset;
1418 if (cond == 0x0) {
1419 /* unconditional not taken */
1420 if (a) {
1421 dc->pc = dc->npc + 4;
1422 dc->npc = dc->pc + 4;
1423 } else {
1424 dc->pc = dc->npc;
1425 dc->npc = dc->pc + 4;
1427 } else if (cond == 0x8) {
1428 /* unconditional taken */
1429 if (a) {
1430 dc->pc = target;
1431 dc->npc = dc->pc + 4;
1432 } else {
1433 dc->pc = dc->npc;
1434 dc->npc = target;
1436 } else {
1437 flush_T2(dc);
1438 gen_fcond(cpu_T[2], cc, cond);
1439 if (a) {
1440 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1441 dc->is_br = 1;
1442 } else {
1443 dc->pc = dc->npc;
1444 dc->jump_pc[0] = target;
1445 dc->jump_pc[1] = dc->npc + 4;
1446 dc->npc = JUMP_PC;
1451 #ifdef TARGET_SPARC64
1452 /* XXX: potentially incorrect if dynamic npc */
1453 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
1455 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1456 target_ulong target = dc->pc + offset;
1458 flush_T2(dc);
1459 gen_cond_reg(cpu_T[2], cond);
1460 if (a) {
1461 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1462 dc->is_br = 1;
1463 } else {
1464 dc->pc = dc->npc;
1465 dc->jump_pc[0] = target;
1466 dc->jump_pc[1] = dc->npc + 4;
1467 dc->npc = JUMP_PC;
1471 static GenOpFunc * const gen_fcmps[4] = {
1472 helper_fcmps,
1473 helper_fcmps_fcc1,
1474 helper_fcmps_fcc2,
1475 helper_fcmps_fcc3,
1478 static GenOpFunc * const gen_fcmpd[4] = {
1479 helper_fcmpd,
1480 helper_fcmpd_fcc1,
1481 helper_fcmpd_fcc2,
1482 helper_fcmpd_fcc3,
1485 #if defined(CONFIG_USER_ONLY)
1486 static GenOpFunc * const gen_fcmpq[4] = {
1487 helper_fcmpq,
1488 helper_fcmpq_fcc1,
1489 helper_fcmpq_fcc2,
1490 helper_fcmpq_fcc3,
1492 #endif
1494 static GenOpFunc * const gen_fcmpes[4] = {
1495 helper_fcmpes,
1496 helper_fcmpes_fcc1,
1497 helper_fcmpes_fcc2,
1498 helper_fcmpes_fcc3,
1501 static GenOpFunc * const gen_fcmped[4] = {
1502 helper_fcmped,
1503 helper_fcmped_fcc1,
1504 helper_fcmped_fcc2,
1505 helper_fcmped_fcc3,
1508 #if defined(CONFIG_USER_ONLY)
1509 static GenOpFunc * const gen_fcmpeq[4] = {
1510 helper_fcmpeq,
1511 helper_fcmpeq_fcc1,
1512 helper_fcmpeq_fcc2,
1513 helper_fcmpeq_fcc3,
1515 #endif
1517 static inline void gen_op_fcmps(int fccno)
1519 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1522 static inline void gen_op_fcmpd(int fccno)
1524 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1527 #if defined(CONFIG_USER_ONLY)
1528 static inline void gen_op_fcmpq(int fccno)
1530 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1532 #endif
1534 static inline void gen_op_fcmpes(int fccno)
1536 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1539 static inline void gen_op_fcmped(int fccno)
1541 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1544 #if defined(CONFIG_USER_ONLY)
1545 static inline void gen_op_fcmpeq(int fccno)
1547 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1549 #endif
1551 #else
1553 static inline void gen_op_fcmps(int fccno)
1555 tcg_gen_helper_0_0(helper_fcmps);
1558 static inline void gen_op_fcmpd(int fccno)
1560 tcg_gen_helper_0_0(helper_fcmpd);
1563 #if defined(CONFIG_USER_ONLY)
1564 static inline void gen_op_fcmpq(int fccno)
1566 tcg_gen_helper_0_0(helper_fcmpq);
1568 #endif
1570 static inline void gen_op_fcmpes(int fccno)
1572 tcg_gen_helper_0_0(helper_fcmpes);
1575 static inline void gen_op_fcmped(int fccno)
1577 tcg_gen_helper_0_0(helper_fcmped);
1580 #if defined(CONFIG_USER_ONLY)
1581 static inline void gen_op_fcmpeq(int fccno)
1583 tcg_gen_helper_0_0(helper_fcmpeq);
1585 #endif
1587 #endif
1589 static inline void gen_op_fpexception_im(int fsr_flags)
1591 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1592 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~FSR_FTT_MASK);
1593 tcg_gen_ori_tl(cpu_tmp0, cpu_tmp0, fsr_flags);
1594 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1595 gen_op_exception(TT_FP_EXCP);
1598 static int gen_trap_ifnofpu(DisasContext * dc)
1600 #if !defined(CONFIG_USER_ONLY)
1601 if (!dc->fpu_enabled) {
1602 save_state(dc);
1603 gen_op_exception(TT_NFPU_INSN);
1604 dc->is_br = 1;
1605 return 1;
1607 #endif
1608 return 0;
1611 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1613 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1614 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1615 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1618 static inline void gen_clear_float_exceptions(void)
1620 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1623 /* asi moves */
1624 #ifdef TARGET_SPARC64
1625 static inline void gen_ld_asi(int insn, int size, int sign)
1627 int asi, offset;
1628 TCGv r_size, r_sign;
1630 r_size = tcg_temp_new(TCG_TYPE_I32);
1631 r_sign = tcg_temp_new(TCG_TYPE_I32);
1632 tcg_gen_movi_i32(r_size, size);
1633 tcg_gen_movi_i32(r_sign, sign);
1634 if (IS_IMM) {
1635 offset = GET_FIELD(insn, 25, 31);
1636 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1637 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1638 } else {
1639 asi = GET_FIELD(insn, 19, 26);
1640 tcg_gen_movi_i32(cpu_T[1], asi);
1642 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
1643 r_sign);
1646 static inline void gen_st_asi(int insn, int size)
1648 int asi, offset;
1649 TCGv r_asi, r_size;
1651 r_asi = tcg_temp_new(TCG_TYPE_I32);
1652 r_size = tcg_temp_new(TCG_TYPE_I32);
1653 tcg_gen_movi_i32(r_size, size);
1654 if (IS_IMM) {
1655 offset = GET_FIELD(insn, 25, 31);
1656 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1657 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1658 } else {
1659 asi = GET_FIELD(insn, 19, 26);
1660 tcg_gen_movi_i32(r_asi, asi);
1662 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
1665 static inline void gen_ldf_asi(int insn, int size, int rd)
1667 int asi, offset;
1668 TCGv r_asi, r_size, r_rd;
1670 r_asi = tcg_temp_new(TCG_TYPE_I32);
1671 r_size = tcg_temp_new(TCG_TYPE_I32);
1672 r_rd = tcg_temp_new(TCG_TYPE_I32);
1673 tcg_gen_movi_i32(r_size, size);
1674 tcg_gen_movi_i32(r_rd, rd);
1675 if (IS_IMM) {
1676 offset = GET_FIELD(insn, 25, 31);
1677 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1678 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1679 } else {
1680 asi = GET_FIELD(insn, 19, 26);
1681 tcg_gen_movi_i32(r_asi, asi);
1683 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
1686 static inline void gen_stf_asi(int insn, int size, int rd)
1688 int asi, offset;
1689 TCGv r_asi, r_size, r_rd;
1691 r_asi = tcg_temp_new(TCG_TYPE_I32);
1692 r_size = tcg_temp_new(TCG_TYPE_I32);
1693 r_rd = tcg_temp_new(TCG_TYPE_I32);
1694 tcg_gen_movi_i32(r_size, size);
1695 tcg_gen_movi_i32(r_rd, rd);
1696 if (IS_IMM) {
1697 offset = GET_FIELD(insn, 25, 31);
1698 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1699 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1700 } else {
1701 asi = GET_FIELD(insn, 19, 26);
1702 tcg_gen_movi_i32(r_asi, asi);
1704 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
1707 static inline void gen_swap_asi(int insn)
1709 int asi, offset;
1710 TCGv r_size, r_sign, r_temp;
1712 r_size = tcg_temp_new(TCG_TYPE_I32);
1713 r_sign = tcg_temp_new(TCG_TYPE_I32);
1714 r_temp = tcg_temp_new(TCG_TYPE_I32);
1715 tcg_gen_movi_i32(r_size, 4);
1716 tcg_gen_movi_i32(r_sign, 0);
1717 if (IS_IMM) {
1718 offset = GET_FIELD(insn, 25, 31);
1719 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1720 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1721 } else {
1722 asi = GET_FIELD(insn, 19, 26);
1723 tcg_gen_movi_i32(cpu_T[1], asi);
1725 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1726 r_sign);
1727 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1728 tcg_gen_mov_i32(cpu_T[1], r_temp);
1731 static inline void gen_ldda_asi(int insn)
1733 int asi, offset;
1734 TCGv r_size, r_sign, r_dword;
1736 r_size = tcg_temp_new(TCG_TYPE_I32);
1737 r_sign = tcg_temp_new(TCG_TYPE_I32);
1738 r_dword = tcg_temp_new(TCG_TYPE_I64);
1739 tcg_gen_movi_i32(r_size, 8);
1740 tcg_gen_movi_i32(r_sign, 0);
1741 if (IS_IMM) {
1742 offset = GET_FIELD(insn, 25, 31);
1743 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1744 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1745 } else {
1746 asi = GET_FIELD(insn, 19, 26);
1747 tcg_gen_movi_i32(cpu_T[1], asi);
1749 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1750 r_sign);
1751 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1752 tcg_gen_shri_i64(r_dword, r_dword, 32);
1753 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1756 static inline void gen_cas_asi(int insn, int rd)
1758 int asi, offset;
1759 TCGv r_val1, r_asi;
1761 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1762 r_asi = tcg_temp_new(TCG_TYPE_I32);
1763 gen_movl_reg_TN(rd, r_val1);
1764 if (IS_IMM) {
1765 offset = GET_FIELD(insn, 25, 31);
1766 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1767 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1768 } else {
1769 asi = GET_FIELD(insn, 19, 26);
1770 tcg_gen_movi_i32(r_asi, asi);
1772 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1773 r_asi);
1776 static inline void gen_casx_asi(int insn, int rd)
1778 int asi, offset;
1779 TCGv r_val1, r_asi;
1781 r_val1 = tcg_temp_new(TCG_TYPE_I64);
1782 r_asi = tcg_temp_new(TCG_TYPE_I32);
1783 gen_movl_reg_TN(rd, r_val1);
1784 if (IS_IMM) {
1785 offset = GET_FIELD(insn, 25, 31);
1786 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1787 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1788 } else {
1789 asi = GET_FIELD(insn, 19, 26);
1790 tcg_gen_movi_i32(r_asi, asi);
1792 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1793 r_asi);
1796 #elif !defined(CONFIG_USER_ONLY)
1798 static inline void gen_ld_asi(int insn, int size, int sign)
1800 int asi;
1801 TCGv r_size, r_sign, r_dword;
1803 r_size = tcg_temp_new(TCG_TYPE_I32);
1804 r_sign = tcg_temp_new(TCG_TYPE_I32);
1805 r_dword = tcg_temp_new(TCG_TYPE_I64);
1806 tcg_gen_movi_i32(r_size, size);
1807 tcg_gen_movi_i32(r_sign, sign);
1808 asi = GET_FIELD(insn, 19, 26);
1809 tcg_gen_movi_i32(cpu_T[1], asi);
1810 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1811 r_sign);
1812 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1815 static inline void gen_st_asi(int insn, int size)
1817 int asi;
1818 TCGv r_dword, r_asi, r_size;
1820 r_dword = tcg_temp_new(TCG_TYPE_I64);
1821 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1822 r_asi = tcg_temp_new(TCG_TYPE_I32);
1823 r_size = tcg_temp_new(TCG_TYPE_I32);
1824 asi = GET_FIELD(insn, 19, 26);
1825 tcg_gen_movi_i32(r_asi, asi);
1826 tcg_gen_movi_i32(r_size, size);
1827 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1830 static inline void gen_swap_asi(int insn)
1832 int asi;
1833 TCGv r_size, r_sign, r_temp;
1835 r_size = tcg_temp_new(TCG_TYPE_I32);
1836 r_sign = tcg_temp_new(TCG_TYPE_I32);
1837 r_temp = tcg_temp_new(TCG_TYPE_I32);
1838 tcg_gen_movi_i32(r_size, 4);
1839 tcg_gen_movi_i32(r_sign, 0);
1840 asi = GET_FIELD(insn, 19, 26);
1841 tcg_gen_movi_i32(cpu_T[1], asi);
1842 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1843 r_sign);
1844 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1845 tcg_gen_mov_i32(cpu_T[1], r_temp);
1848 static inline void gen_ldda_asi(int insn)
1850 int asi;
1851 TCGv r_size, r_sign, r_dword;
1853 r_size = tcg_temp_new(TCG_TYPE_I32);
1854 r_sign = tcg_temp_new(TCG_TYPE_I32);
1855 r_dword = tcg_temp_new(TCG_TYPE_I64);
1856 tcg_gen_movi_i32(r_size, 8);
1857 tcg_gen_movi_i32(r_sign, 0);
1858 asi = GET_FIELD(insn, 19, 26);
1859 tcg_gen_movi_i32(cpu_T[1], asi);
1860 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1861 r_sign);
1862 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1863 tcg_gen_shri_i64(r_dword, r_dword, 32);
1864 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1866 #endif
1868 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1869 static inline void gen_ldstub_asi(int insn)
1871 int asi;
1872 TCGv r_dword, r_asi, r_size;
1874 gen_ld_asi(insn, 1, 0);
1876 r_dword = tcg_temp_new(TCG_TYPE_I64);
1877 r_asi = tcg_temp_new(TCG_TYPE_I32);
1878 r_size = tcg_temp_new(TCG_TYPE_I32);
1879 asi = GET_FIELD(insn, 19, 26);
1880 tcg_gen_movi_i32(r_dword, 0xff);
1881 tcg_gen_movi_i32(r_asi, asi);
1882 tcg_gen_movi_i32(r_size, 1);
1883 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1885 #endif
1887 /* before an instruction, dc->pc must be static */
1888 static void disas_sparc_insn(DisasContext * dc)
1890 unsigned int insn, opc, rs1, rs2, rd;
1892 insn = ldl_code(dc->pc);
1893 opc = GET_FIELD(insn, 0, 1);
1895 rd = GET_FIELD(insn, 2, 6);
1896 switch (opc) {
1897 case 0: /* branches/sethi */
1899 unsigned int xop = GET_FIELD(insn, 7, 9);
1900 int32_t target;
1901 switch (xop) {
1902 #ifdef TARGET_SPARC64
1903 case 0x1: /* V9 BPcc */
1905 int cc;
1907 target = GET_FIELD_SP(insn, 0, 18);
1908 target = sign_extend(target, 18);
1909 target <<= 2;
1910 cc = GET_FIELD_SP(insn, 20, 21);
1911 if (cc == 0)
1912 do_branch(dc, target, insn, 0);
1913 else if (cc == 2)
1914 do_branch(dc, target, insn, 1);
1915 else
1916 goto illegal_insn;
1917 goto jmp_insn;
1919 case 0x3: /* V9 BPr */
1921 target = GET_FIELD_SP(insn, 0, 13) |
1922 (GET_FIELD_SP(insn, 20, 21) << 14);
1923 target = sign_extend(target, 16);
1924 target <<= 2;
1925 rs1 = GET_FIELD(insn, 13, 17);
1926 gen_movl_reg_T0(rs1);
1927 do_branch_reg(dc, target, insn);
1928 goto jmp_insn;
1930 case 0x5: /* V9 FBPcc */
1932 int cc = GET_FIELD_SP(insn, 20, 21);
1933 if (gen_trap_ifnofpu(dc))
1934 goto jmp_insn;
1935 target = GET_FIELD_SP(insn, 0, 18);
1936 target = sign_extend(target, 19);
1937 target <<= 2;
1938 do_fbranch(dc, target, insn, cc);
1939 goto jmp_insn;
1941 #else
1942 case 0x7: /* CBN+x */
1944 goto ncp_insn;
1946 #endif
1947 case 0x2: /* BN+x */
1949 target = GET_FIELD(insn, 10, 31);
1950 target = sign_extend(target, 22);
1951 target <<= 2;
1952 do_branch(dc, target, insn, 0);
1953 goto jmp_insn;
1955 case 0x6: /* FBN+x */
1957 if (gen_trap_ifnofpu(dc))
1958 goto jmp_insn;
1959 target = GET_FIELD(insn, 10, 31);
1960 target = sign_extend(target, 22);
1961 target <<= 2;
1962 do_fbranch(dc, target, insn, 0);
1963 goto jmp_insn;
1965 case 0x4: /* SETHI */
1966 #define OPTIM
1967 #if defined(OPTIM)
1968 if (rd) { // nop
1969 #endif
1970 uint32_t value = GET_FIELD(insn, 10, 31);
1971 tcg_gen_movi_tl(cpu_T[0], value << 10);
1972 gen_movl_T0_reg(rd);
1973 #if defined(OPTIM)
1975 #endif
1976 break;
1977 case 0x0: /* UNIMPL */
1978 default:
1979 goto illegal_insn;
1981 break;
1983 break;
1984 case 1:
1985 /*CALL*/ {
1986 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1988 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1989 gen_movl_T0_reg(15);
1990 target += dc->pc;
1991 gen_mov_pc_npc(dc);
1992 dc->npc = target;
1994 goto jmp_insn;
1995 case 2: /* FPU & Logical Operations */
1997 unsigned int xop = GET_FIELD(insn, 7, 12);
1998 if (xop == 0x3a) { /* generate trap */
1999 int cond;
2001 rs1 = GET_FIELD(insn, 13, 17);
2002 gen_movl_reg_T0(rs1);
2003 if (IS_IMM) {
2004 rs2 = GET_FIELD(insn, 25, 31);
2005 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
2006 } else {
2007 rs2 = GET_FIELD(insn, 27, 31);
2008 #if defined(OPTIM)
2009 if (rs2 != 0) {
2010 #endif
2011 gen_movl_reg_T1(rs2);
2012 gen_op_add_T1_T0();
2013 #if defined(OPTIM)
2015 #endif
2017 cond = GET_FIELD(insn, 3, 6);
2018 if (cond == 0x8) {
2019 save_state(dc);
2020 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
2021 } else if (cond != 0) {
2022 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2023 #ifdef TARGET_SPARC64
2024 /* V9 icc/xcc */
2025 int cc = GET_FIELD_SP(insn, 11, 12);
2027 save_state(dc);
2028 if (cc == 0)
2029 gen_cond(r_cond, 0, cond);
2030 else if (cc == 2)
2031 gen_cond(r_cond, 1, cond);
2032 else
2033 goto illegal_insn;
2034 #else
2035 save_state(dc);
2036 gen_cond(r_cond, 0, cond);
2037 #endif
2038 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], r_cond);
2040 gen_op_next_insn();
2041 tcg_gen_exit_tb(0);
2042 dc->is_br = 1;
2043 goto jmp_insn;
2044 } else if (xop == 0x28) {
2045 rs1 = GET_FIELD(insn, 13, 17);
2046 switch(rs1) {
2047 case 0: /* rdy */
2048 #ifndef TARGET_SPARC64
2049 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2050 manual, rdy on the microSPARC
2051 II */
2052 case 0x0f: /* stbar in the SPARCv8 manual,
2053 rdy on the microSPARC II */
2054 case 0x10 ... 0x1f: /* implementation-dependent in the
2055 SPARCv8 manual, rdy on the
2056 microSPARC II */
2057 #endif
2058 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
2059 gen_movl_T0_reg(rd);
2060 break;
2061 #ifdef TARGET_SPARC64
2062 case 0x2: /* V9 rdccr */
2063 gen_op_rdccr();
2064 gen_movl_T0_reg(rd);
2065 break;
2066 case 0x3: /* V9 rdasi */
2067 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
2068 gen_movl_T0_reg(rd);
2069 break;
2070 case 0x4: /* V9 rdtick */
2072 TCGv r_tickptr;
2074 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2075 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2076 offsetof(CPUState, tick));
2077 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2078 r_tickptr);
2079 gen_movl_T0_reg(rd);
2081 break;
2082 case 0x5: /* V9 rdpc */
2083 tcg_gen_movi_tl(cpu_T[0], dc->pc);
2084 gen_movl_T0_reg(rd);
2085 break;
2086 case 0x6: /* V9 rdfprs */
2087 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
2088 gen_movl_T0_reg(rd);
2089 break;
2090 case 0xf: /* V9 membar */
2091 break; /* no effect */
2092 case 0x13: /* Graphics Status */
2093 if (gen_trap_ifnofpu(dc))
2094 goto jmp_insn;
2095 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
2096 gen_movl_T0_reg(rd);
2097 break;
2098 case 0x17: /* Tick compare */
2099 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
2100 gen_movl_T0_reg(rd);
2101 break;
2102 case 0x18: /* System tick */
2104 TCGv r_tickptr;
2106 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2107 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2108 offsetof(CPUState, stick));
2109 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2110 r_tickptr);
2111 gen_movl_T0_reg(rd);
2113 break;
2114 case 0x19: /* System tick compare */
2115 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
2116 gen_movl_T0_reg(rd);
2117 break;
2118 case 0x10: /* Performance Control */
2119 case 0x11: /* Performance Instrumentation Counter */
2120 case 0x12: /* Dispatch Control */
2121 case 0x14: /* Softint set, WO */
2122 case 0x15: /* Softint clear, WO */
2123 case 0x16: /* Softint write */
2124 #endif
2125 default:
2126 goto illegal_insn;
2128 #if !defined(CONFIG_USER_ONLY)
2129 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2130 #ifndef TARGET_SPARC64
2131 if (!supervisor(dc))
2132 goto priv_insn;
2133 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
2134 #else
2135 if (!hypervisor(dc))
2136 goto priv_insn;
2137 rs1 = GET_FIELD(insn, 13, 17);
2138 switch (rs1) {
2139 case 0: // hpstate
2140 // gen_op_rdhpstate();
2141 break;
2142 case 1: // htstate
2143 // gen_op_rdhtstate();
2144 break;
2145 case 3: // hintp
2146 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
2147 break;
2148 case 5: // htba
2149 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
2150 break;
2151 case 6: // hver
2152 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
2153 break;
2154 case 31: // hstick_cmpr
2155 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2156 break;
2157 default:
2158 goto illegal_insn;
2160 #endif
2161 gen_movl_T0_reg(rd);
2162 break;
2163 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2164 if (!supervisor(dc))
2165 goto priv_insn;
2166 #ifdef TARGET_SPARC64
2167 rs1 = GET_FIELD(insn, 13, 17);
2168 switch (rs1) {
2169 case 0: // tpc
2171 TCGv r_tsptr;
2173 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2174 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2175 offsetof(CPUState, tsptr));
2176 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2177 offsetof(trap_state, tpc));
2179 break;
2180 case 1: // tnpc
2182 TCGv r_tsptr;
2184 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2185 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2186 offsetof(CPUState, tsptr));
2187 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2188 offsetof(trap_state, tnpc));
2190 break;
2191 case 2: // tstate
2193 TCGv r_tsptr;
2195 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2196 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2197 offsetof(CPUState, tsptr));
2198 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2199 offsetof(trap_state, tstate));
2201 break;
2202 case 3: // tt
2204 TCGv r_tsptr;
2206 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2207 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2208 offsetof(CPUState, tsptr));
2209 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
2210 offsetof(trap_state, tt));
2212 break;
2213 case 4: // tick
2215 TCGv r_tickptr;
2217 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2218 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2219 offsetof(CPUState, tick));
2220 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2221 r_tickptr);
2222 gen_movl_T0_reg(rd);
2224 break;
2225 case 5: // tba
2226 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2227 break;
2228 case 6: // pstate
2229 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
2230 break;
2231 case 7: // tl
2232 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
2233 break;
2234 case 8: // pil
2235 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
2236 break;
2237 case 9: // cwp
2238 gen_op_rdcwp();
2239 break;
2240 case 10: // cansave
2241 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
2242 break;
2243 case 11: // canrestore
2244 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
2245 break;
2246 case 12: // cleanwin
2247 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
2248 break;
2249 case 13: // otherwin
2250 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
2251 break;
2252 case 14: // wstate
2253 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
2254 break;
2255 case 16: // UA2005 gl
2256 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
2257 break;
2258 case 26: // UA2005 strand status
2259 if (!hypervisor(dc))
2260 goto priv_insn;
2261 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
2262 break;
2263 case 31: // ver
2264 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
2265 break;
2266 case 15: // fq
2267 default:
2268 goto illegal_insn;
2270 #else
2271 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
2272 #endif
2273 gen_movl_T0_reg(rd);
2274 break;
2275 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2276 #ifdef TARGET_SPARC64
2277 gen_op_flushw();
2278 #else
2279 if (!supervisor(dc))
2280 goto priv_insn;
2281 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2282 gen_movl_T0_reg(rd);
2283 #endif
2284 break;
2285 #endif
2286 } else if (xop == 0x34) { /* FPU Operations */
2287 if (gen_trap_ifnofpu(dc))
2288 goto jmp_insn;
2289 gen_op_clear_ieee_excp_and_FTT();
2290 rs1 = GET_FIELD(insn, 13, 17);
2291 rs2 = GET_FIELD(insn, 27, 31);
2292 xop = GET_FIELD(insn, 18, 26);
2293 switch (xop) {
2294 case 0x1: /* fmovs */
2295 gen_op_load_fpr_FT0(rs2);
2296 gen_op_store_FT0_fpr(rd);
2297 break;
2298 case 0x5: /* fnegs */
2299 gen_op_load_fpr_FT1(rs2);
2300 gen_op_fnegs();
2301 gen_op_store_FT0_fpr(rd);
2302 break;
2303 case 0x9: /* fabss */
2304 gen_op_load_fpr_FT1(rs2);
2305 tcg_gen_helper_0_0(helper_fabss);
2306 gen_op_store_FT0_fpr(rd);
2307 break;
2308 case 0x29: /* fsqrts */
2309 gen_op_load_fpr_FT1(rs2);
2310 gen_clear_float_exceptions();
2311 tcg_gen_helper_0_0(helper_fsqrts);
2312 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2313 gen_op_store_FT0_fpr(rd);
2314 break;
2315 case 0x2a: /* fsqrtd */
2316 gen_op_load_fpr_DT1(DFPREG(rs2));
2317 gen_clear_float_exceptions();
2318 tcg_gen_helper_0_0(helper_fsqrtd);
2319 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2320 gen_op_store_DT0_fpr(DFPREG(rd));
2321 break;
2322 case 0x2b: /* fsqrtq */
2323 #if defined(CONFIG_USER_ONLY)
2324 gen_op_load_fpr_QT1(QFPREG(rs2));
2325 gen_clear_float_exceptions();
2326 tcg_gen_helper_0_0(helper_fsqrtq);
2327 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2328 gen_op_store_QT0_fpr(QFPREG(rd));
2329 break;
2330 #else
2331 goto nfpu_insn;
2332 #endif
2333 case 0x41:
2334 gen_op_load_fpr_FT0(rs1);
2335 gen_op_load_fpr_FT1(rs2);
2336 gen_clear_float_exceptions();
2337 gen_op_fadds();
2338 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2339 gen_op_store_FT0_fpr(rd);
2340 break;
2341 case 0x42:
2342 gen_op_load_fpr_DT0(DFPREG(rs1));
2343 gen_op_load_fpr_DT1(DFPREG(rs2));
2344 gen_clear_float_exceptions();
2345 gen_op_faddd();
2346 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2347 gen_op_store_DT0_fpr(DFPREG(rd));
2348 break;
2349 case 0x43: /* faddq */
2350 #if defined(CONFIG_USER_ONLY)
2351 gen_op_load_fpr_QT0(QFPREG(rs1));
2352 gen_op_load_fpr_QT1(QFPREG(rs2));
2353 gen_clear_float_exceptions();
2354 gen_op_faddq();
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2356 gen_op_store_QT0_fpr(QFPREG(rd));
2357 break;
2358 #else
2359 goto nfpu_insn;
2360 #endif
2361 case 0x45:
2362 gen_op_load_fpr_FT0(rs1);
2363 gen_op_load_fpr_FT1(rs2);
2364 gen_clear_float_exceptions();
2365 gen_op_fsubs();
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2367 gen_op_store_FT0_fpr(rd);
2368 break;
2369 case 0x46:
2370 gen_op_load_fpr_DT0(DFPREG(rs1));
2371 gen_op_load_fpr_DT1(DFPREG(rs2));
2372 gen_clear_float_exceptions();
2373 gen_op_fsubd();
2374 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2375 gen_op_store_DT0_fpr(DFPREG(rd));
2376 break;
2377 case 0x47: /* fsubq */
2378 #if defined(CONFIG_USER_ONLY)
2379 gen_op_load_fpr_QT0(QFPREG(rs1));
2380 gen_op_load_fpr_QT1(QFPREG(rs2));
2381 gen_clear_float_exceptions();
2382 gen_op_fsubq();
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 gen_op_store_QT0_fpr(QFPREG(rd));
2385 break;
2386 #else
2387 goto nfpu_insn;
2388 #endif
2389 case 0x49:
2390 gen_op_load_fpr_FT0(rs1);
2391 gen_op_load_fpr_FT1(rs2);
2392 gen_clear_float_exceptions();
2393 gen_op_fmuls();
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2395 gen_op_store_FT0_fpr(rd);
2396 break;
2397 case 0x4a:
2398 gen_op_load_fpr_DT0(DFPREG(rs1));
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_clear_float_exceptions();
2401 gen_op_fmuld();
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0x4b: /* fmulq */
2406 #if defined(CONFIG_USER_ONLY)
2407 gen_op_load_fpr_QT0(QFPREG(rs1));
2408 gen_op_load_fpr_QT1(QFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 gen_op_fmulq();
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 #else
2415 goto nfpu_insn;
2416 #endif
2417 case 0x4d:
2418 gen_op_load_fpr_FT0(rs1);
2419 gen_op_load_fpr_FT1(rs2);
2420 gen_clear_float_exceptions();
2421 gen_op_fdivs();
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2423 gen_op_store_FT0_fpr(rd);
2424 break;
2425 case 0x4e:
2426 gen_op_load_fpr_DT0(DFPREG(rs1));
2427 gen_op_load_fpr_DT1(DFPREG(rs2));
2428 gen_clear_float_exceptions();
2429 gen_op_fdivd();
2430 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2431 gen_op_store_DT0_fpr(DFPREG(rd));
2432 break;
2433 case 0x4f: /* fdivq */
2434 #if defined(CONFIG_USER_ONLY)
2435 gen_op_load_fpr_QT0(QFPREG(rs1));
2436 gen_op_load_fpr_QT1(QFPREG(rs2));
2437 gen_clear_float_exceptions();
2438 gen_op_fdivq();
2439 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2440 gen_op_store_QT0_fpr(QFPREG(rd));
2441 break;
2442 #else
2443 goto nfpu_insn;
2444 #endif
2445 case 0x69:
2446 gen_op_load_fpr_FT0(rs1);
2447 gen_op_load_fpr_FT1(rs2);
2448 gen_clear_float_exceptions();
2449 gen_op_fsmuld();
2450 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2451 gen_op_store_DT0_fpr(DFPREG(rd));
2452 break;
2453 case 0x6e: /* fdmulq */
2454 #if defined(CONFIG_USER_ONLY)
2455 gen_op_load_fpr_DT0(DFPREG(rs1));
2456 gen_op_load_fpr_DT1(DFPREG(rs2));
2457 gen_clear_float_exceptions();
2458 gen_op_fdmulq();
2459 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2460 gen_op_store_QT0_fpr(QFPREG(rd));
2461 break;
2462 #else
2463 goto nfpu_insn;
2464 #endif
2465 case 0xc4:
2466 gen_op_load_fpr_FT1(rs2);
2467 gen_clear_float_exceptions();
2468 gen_op_fitos();
2469 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2470 gen_op_store_FT0_fpr(rd);
2471 break;
2472 case 0xc6:
2473 gen_op_load_fpr_DT1(DFPREG(rs2));
2474 gen_clear_float_exceptions();
2475 gen_op_fdtos();
2476 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2477 gen_op_store_FT0_fpr(rd);
2478 break;
2479 case 0xc7: /* fqtos */
2480 #if defined(CONFIG_USER_ONLY)
2481 gen_op_load_fpr_QT1(QFPREG(rs2));
2482 gen_clear_float_exceptions();
2483 gen_op_fqtos();
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 gen_op_store_FT0_fpr(rd);
2486 break;
2487 #else
2488 goto nfpu_insn;
2489 #endif
2490 case 0xc8:
2491 gen_op_load_fpr_FT1(rs2);
2492 gen_op_fitod();
2493 gen_op_store_DT0_fpr(DFPREG(rd));
2494 break;
2495 case 0xc9:
2496 gen_op_load_fpr_FT1(rs2);
2497 gen_op_fstod();
2498 gen_op_store_DT0_fpr(DFPREG(rd));
2499 break;
2500 case 0xcb: /* fqtod */
2501 #if defined(CONFIG_USER_ONLY)
2502 gen_op_load_fpr_QT1(QFPREG(rs2));
2503 gen_clear_float_exceptions();
2504 gen_op_fqtod();
2505 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2506 gen_op_store_DT0_fpr(DFPREG(rd));
2507 break;
2508 #else
2509 goto nfpu_insn;
2510 #endif
2511 case 0xcc: /* fitoq */
2512 #if defined(CONFIG_USER_ONLY)
2513 gen_op_load_fpr_FT1(rs2);
2514 gen_op_fitoq();
2515 gen_op_store_QT0_fpr(QFPREG(rd));
2516 break;
2517 #else
2518 goto nfpu_insn;
2519 #endif
2520 case 0xcd: /* fstoq */
2521 #if defined(CONFIG_USER_ONLY)
2522 gen_op_load_fpr_FT1(rs2);
2523 gen_op_fstoq();
2524 gen_op_store_QT0_fpr(QFPREG(rd));
2525 break;
2526 #else
2527 goto nfpu_insn;
2528 #endif
2529 case 0xce: /* fdtoq */
2530 #if defined(CONFIG_USER_ONLY)
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_op_fdtoq();
2533 gen_op_store_QT0_fpr(QFPREG(rd));
2534 break;
2535 #else
2536 goto nfpu_insn;
2537 #endif
2538 case 0xd1:
2539 gen_op_load_fpr_FT1(rs2);
2540 gen_clear_float_exceptions();
2541 gen_op_fstoi();
2542 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2543 gen_op_store_FT0_fpr(rd);
2544 break;
2545 case 0xd2:
2546 gen_op_load_fpr_DT1(DFPREG(rs2));
2547 gen_clear_float_exceptions();
2548 gen_op_fdtoi();
2549 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2550 gen_op_store_FT0_fpr(rd);
2551 break;
2552 case 0xd3: /* fqtoi */
2553 #if defined(CONFIG_USER_ONLY)
2554 gen_op_load_fpr_QT1(QFPREG(rs2));
2555 gen_clear_float_exceptions();
2556 gen_op_fqtoi();
2557 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2558 gen_op_store_FT0_fpr(rd);
2559 break;
2560 #else
2561 goto nfpu_insn;
2562 #endif
2563 #ifdef TARGET_SPARC64
2564 case 0x2: /* V9 fmovd */
2565 gen_op_load_fpr_DT0(DFPREG(rs2));
2566 gen_op_store_DT0_fpr(DFPREG(rd));
2567 break;
2568 case 0x3: /* V9 fmovq */
2569 #if defined(CONFIG_USER_ONLY)
2570 gen_op_load_fpr_QT0(QFPREG(rs2));
2571 gen_op_store_QT0_fpr(QFPREG(rd));
2572 break;
2573 #else
2574 goto nfpu_insn;
2575 #endif
2576 case 0x6: /* V9 fnegd */
2577 gen_op_load_fpr_DT1(DFPREG(rs2));
2578 gen_op_fnegd();
2579 gen_op_store_DT0_fpr(DFPREG(rd));
2580 break;
2581 case 0x7: /* V9 fnegq */
2582 #if defined(CONFIG_USER_ONLY)
2583 gen_op_load_fpr_QT1(QFPREG(rs2));
2584 gen_op_fnegq();
2585 gen_op_store_QT0_fpr(QFPREG(rd));
2586 break;
2587 #else
2588 goto nfpu_insn;
2589 #endif
2590 case 0xa: /* V9 fabsd */
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2592 tcg_gen_helper_0_0(helper_fabsd);
2593 gen_op_store_DT0_fpr(DFPREG(rd));
2594 break;
2595 case 0xb: /* V9 fabsq */
2596 #if defined(CONFIG_USER_ONLY)
2597 gen_op_load_fpr_QT1(QFPREG(rs2));
2598 tcg_gen_helper_0_0(helper_fabsq);
2599 gen_op_store_QT0_fpr(QFPREG(rd));
2600 break;
2601 #else
2602 goto nfpu_insn;
2603 #endif
2604 case 0x81: /* V9 fstox */
2605 gen_op_load_fpr_FT1(rs2);
2606 gen_clear_float_exceptions();
2607 gen_op_fstox();
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2609 gen_op_store_DT0_fpr(DFPREG(rd));
2610 break;
2611 case 0x82: /* V9 fdtox */
2612 gen_op_load_fpr_DT1(DFPREG(rs2));
2613 gen_clear_float_exceptions();
2614 gen_op_fdtox();
2615 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2616 gen_op_store_DT0_fpr(DFPREG(rd));
2617 break;
2618 case 0x83: /* V9 fqtox */
2619 #if defined(CONFIG_USER_ONLY)
2620 gen_op_load_fpr_QT1(QFPREG(rs2));
2621 gen_clear_float_exceptions();
2622 gen_op_fqtox();
2623 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2624 gen_op_store_DT0_fpr(DFPREG(rd));
2625 break;
2626 #else
2627 goto nfpu_insn;
2628 #endif
2629 case 0x84: /* V9 fxtos */
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 gen_op_fxtos();
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_FT0_fpr(rd);
2635 break;
2636 case 0x88: /* V9 fxtod */
2637 gen_op_load_fpr_DT1(DFPREG(rs2));
2638 gen_clear_float_exceptions();
2639 gen_op_fxtod();
2640 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2642 break;
2643 case 0x8c: /* V9 fxtoq */
2644 #if defined(CONFIG_USER_ONLY)
2645 gen_op_load_fpr_DT1(DFPREG(rs2));
2646 gen_clear_float_exceptions();
2647 gen_op_fxtoq();
2648 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2649 gen_op_store_QT0_fpr(QFPREG(rd));
2650 break;
2651 #else
2652 goto nfpu_insn;
2653 #endif
2654 #endif
2655 default:
2656 goto illegal_insn;
2658 } else if (xop == 0x35) { /* FPU Operations */
2659 #ifdef TARGET_SPARC64
2660 int cond;
2661 #endif
2662 if (gen_trap_ifnofpu(dc))
2663 goto jmp_insn;
2664 gen_op_clear_ieee_excp_and_FTT();
2665 rs1 = GET_FIELD(insn, 13, 17);
2666 rs2 = GET_FIELD(insn, 27, 31);
2667 xop = GET_FIELD(insn, 18, 26);
2668 #ifdef TARGET_SPARC64
2669 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2670 TCGv r_zero;
2671 int l1;
2673 l1 = gen_new_label();
2674 r_zero = tcg_const_tl(0);
2675 cond = GET_FIELD_SP(insn, 14, 17);
2676 rs1 = GET_FIELD(insn, 13, 17);
2677 gen_movl_reg_T0(rs1);
2678 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2679 gen_op_load_fpr_FT0(rs2);
2680 gen_op_store_FT0_fpr(rd);
2681 gen_set_label(l1);
2682 break;
2683 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2684 TCGv r_zero;
2685 int l1;
2687 l1 = gen_new_label();
2688 r_zero = tcg_const_tl(0);
2689 cond = GET_FIELD_SP(insn, 14, 17);
2690 rs1 = GET_FIELD(insn, 13, 17);
2691 gen_movl_reg_T0(rs1);
2692 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2693 gen_op_load_fpr_DT0(DFPREG(rs2));
2694 gen_op_store_DT0_fpr(DFPREG(rd));
2695 gen_set_label(l1);
2696 break;
2697 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2698 #if defined(CONFIG_USER_ONLY)
2699 TCGv r_zero;
2700 int l1;
2702 l1 = gen_new_label();
2703 r_zero = tcg_const_tl(0);
2704 cond = GET_FIELD_SP(insn, 14, 17);
2705 rs1 = GET_FIELD(insn, 13, 17);
2706 gen_movl_reg_T0(rs1);
2707 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2708 gen_op_load_fpr_QT0(QFPREG(rs2));
2709 gen_op_store_QT0_fpr(QFPREG(rd));
2710 gen_set_label(l1);
2711 break;
2712 #else
2713 goto nfpu_insn;
2714 #endif
2716 #endif
2717 switch (xop) {
2718 #ifdef TARGET_SPARC64
2719 #define FMOVCC(size_FDQ, fcc) \
2721 TCGv r_zero, r_cond; \
2722 int l1; \
2724 l1 = gen_new_label(); \
2725 r_zero = tcg_const_tl(0); \
2726 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2727 cond = GET_FIELD_SP(insn, 14, 17); \
2728 gen_fcond(r_cond, fcc, cond); \
2729 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2730 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2731 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2732 gen_set_label(l1); \
2734 case 0x001: /* V9 fmovscc %fcc0 */
2735 FMOVCC(F, 0);
2736 break;
2737 case 0x002: /* V9 fmovdcc %fcc0 */
2738 FMOVCC(D, 0);
2739 break;
2740 case 0x003: /* V9 fmovqcc %fcc0 */
2741 #if defined(CONFIG_USER_ONLY)
2742 FMOVCC(Q, 0);
2743 break;
2744 #else
2745 goto nfpu_insn;
2746 #endif
2747 case 0x041: /* V9 fmovscc %fcc1 */
2748 FMOVCC(F, 1);
2749 break;
2750 case 0x042: /* V9 fmovdcc %fcc1 */
2751 FMOVCC(D, 1);
2752 break;
2753 case 0x043: /* V9 fmovqcc %fcc1 */
2754 #if defined(CONFIG_USER_ONLY)
2755 FMOVCC(Q, 1);
2756 break;
2757 #else
2758 goto nfpu_insn;
2759 #endif
2760 case 0x081: /* V9 fmovscc %fcc2 */
2761 FMOVCC(F, 2);
2762 break;
2763 case 0x082: /* V9 fmovdcc %fcc2 */
2764 FMOVCC(D, 2);
2765 break;
2766 case 0x083: /* V9 fmovqcc %fcc2 */
2767 #if defined(CONFIG_USER_ONLY)
2768 FMOVCC(Q, 2);
2769 break;
2770 #else
2771 goto nfpu_insn;
2772 #endif
2773 case 0x0c1: /* V9 fmovscc %fcc3 */
2774 FMOVCC(F, 3);
2775 break;
2776 case 0x0c2: /* V9 fmovdcc %fcc3 */
2777 FMOVCC(D, 3);
2778 break;
2779 case 0x0c3: /* V9 fmovqcc %fcc3 */
2780 #if defined(CONFIG_USER_ONLY)
2781 FMOVCC(Q, 3);
2782 break;
2783 #else
2784 goto nfpu_insn;
2785 #endif
2786 #undef FMOVCC
2787 #define FMOVCC(size_FDQ, icc) \
2789 TCGv r_zero, r_cond; \
2790 int l1; \
2792 l1 = gen_new_label(); \
2793 r_zero = tcg_const_tl(0); \
2794 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2795 cond = GET_FIELD_SP(insn, 14, 17); \
2796 gen_cond(r_cond, icc, cond); \
2797 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2798 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2799 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2800 gen_set_label(l1); \
2803 case 0x101: /* V9 fmovscc %icc */
2804 FMOVCC(F, 0);
2805 break;
2806 case 0x102: /* V9 fmovdcc %icc */
2807 FMOVCC(D, 0);
2808 case 0x103: /* V9 fmovqcc %icc */
2809 #if defined(CONFIG_USER_ONLY)
2810 FMOVCC(D, 0);
2811 break;
2812 #else
2813 goto nfpu_insn;
2814 #endif
2815 case 0x181: /* V9 fmovscc %xcc */
2816 FMOVCC(F, 1);
2817 break;
2818 case 0x182: /* V9 fmovdcc %xcc */
2819 FMOVCC(D, 1);
2820 break;
2821 case 0x183: /* V9 fmovqcc %xcc */
2822 #if defined(CONFIG_USER_ONLY)
2823 FMOVCC(Q, 1);
2824 break;
2825 #else
2826 goto nfpu_insn;
2827 #endif
2828 #undef FMOVCC
2829 #endif
2830 case 0x51: /* fcmps, V9 %fcc */
2831 gen_op_load_fpr_FT0(rs1);
2832 gen_op_load_fpr_FT1(rs2);
2833 gen_op_fcmps(rd & 3);
2834 break;
2835 case 0x52: /* fcmpd, V9 %fcc */
2836 gen_op_load_fpr_DT0(DFPREG(rs1));
2837 gen_op_load_fpr_DT1(DFPREG(rs2));
2838 gen_op_fcmpd(rd & 3);
2839 break;
2840 case 0x53: /* fcmpq, V9 %fcc */
2841 #if defined(CONFIG_USER_ONLY)
2842 gen_op_load_fpr_QT0(QFPREG(rs1));
2843 gen_op_load_fpr_QT1(QFPREG(rs2));
2844 gen_op_fcmpq(rd & 3);
2845 break;
2846 #else /* !defined(CONFIG_USER_ONLY) */
2847 goto nfpu_insn;
2848 #endif
2849 case 0x55: /* fcmpes, V9 %fcc */
2850 gen_op_load_fpr_FT0(rs1);
2851 gen_op_load_fpr_FT1(rs2);
2852 gen_op_fcmpes(rd & 3);
2853 break;
2854 case 0x56: /* fcmped, V9 %fcc */
2855 gen_op_load_fpr_DT0(DFPREG(rs1));
2856 gen_op_load_fpr_DT1(DFPREG(rs2));
2857 gen_op_fcmped(rd & 3);
2858 break;
2859 case 0x57: /* fcmpeq, V9 %fcc */
2860 #if defined(CONFIG_USER_ONLY)
2861 gen_op_load_fpr_QT0(QFPREG(rs1));
2862 gen_op_load_fpr_QT1(QFPREG(rs2));
2863 gen_op_fcmpeq(rd & 3);
2864 break;
2865 #else/* !defined(CONFIG_USER_ONLY) */
2866 goto nfpu_insn;
2867 #endif
2868 default:
2869 goto illegal_insn;
2871 #if defined(OPTIM)
2872 } else if (xop == 0x2) {
2873 // clr/mov shortcut
2875 rs1 = GET_FIELD(insn, 13, 17);
2876 if (rs1 == 0) {
2877 // or %g0, x, y -> mov T0, x; mov y, T0
2878 if (IS_IMM) { /* immediate */
2879 rs2 = GET_FIELDs(insn, 19, 31);
2880 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2881 } else { /* register */
2882 rs2 = GET_FIELD(insn, 27, 31);
2883 gen_movl_reg_T0(rs2);
2885 } else {
2886 gen_movl_reg_T0(rs1);
2887 if (IS_IMM) { /* immediate */
2888 rs2 = GET_FIELDs(insn, 19, 31);
2889 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2890 } else { /* register */
2891 // or x, %g0, y -> mov T1, x; mov y, T1
2892 rs2 = GET_FIELD(insn, 27, 31);
2893 if (rs2 != 0) {
2894 gen_movl_reg_T1(rs2);
2895 gen_op_or_T1_T0();
2899 gen_movl_T0_reg(rd);
2900 #endif
2901 #ifdef TARGET_SPARC64
2902 } else if (xop == 0x25) { /* sll, V9 sllx */
2903 rs1 = GET_FIELD(insn, 13, 17);
2904 gen_movl_reg_T0(rs1);
2905 if (IS_IMM) { /* immediate */
2906 rs2 = GET_FIELDs(insn, 20, 31);
2907 if (insn & (1 << 12)) {
2908 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2909 } else {
2910 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2911 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2913 } else { /* register */
2914 rs2 = GET_FIELD(insn, 27, 31);
2915 gen_movl_reg_T1(rs2);
2916 if (insn & (1 << 12)) {
2917 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2918 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2919 } else {
2920 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2921 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2922 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2925 gen_movl_T0_reg(rd);
2926 } else if (xop == 0x26) { /* srl, V9 srlx */
2927 rs1 = GET_FIELD(insn, 13, 17);
2928 gen_movl_reg_T0(rs1);
2929 if (IS_IMM) { /* immediate */
2930 rs2 = GET_FIELDs(insn, 20, 31);
2931 if (insn & (1 << 12)) {
2932 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2933 } else {
2934 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2935 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2937 } else { /* register */
2938 rs2 = GET_FIELD(insn, 27, 31);
2939 gen_movl_reg_T1(rs2);
2940 if (insn & (1 << 12)) {
2941 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2942 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2943 } else {
2944 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2945 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2946 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2949 gen_movl_T0_reg(rd);
2950 } else if (xop == 0x27) { /* sra, V9 srax */
2951 rs1 = GET_FIELD(insn, 13, 17);
2952 gen_movl_reg_T0(rs1);
2953 if (IS_IMM) { /* immediate */
2954 rs2 = GET_FIELDs(insn, 20, 31);
2955 if (insn & (1 << 12)) {
2956 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2957 } else {
2958 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2959 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2960 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2962 } else { /* register */
2963 rs2 = GET_FIELD(insn, 27, 31);
2964 gen_movl_reg_T1(rs2);
2965 if (insn & (1 << 12)) {
2966 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2967 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2968 } else {
2969 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2970 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2971 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2974 gen_movl_T0_reg(rd);
2975 #endif
2976 } else if (xop < 0x36) {
2977 rs1 = GET_FIELD(insn, 13, 17);
2978 gen_movl_reg_T0(rs1);
2979 if (IS_IMM) { /* immediate */
2980 rs2 = GET_FIELDs(insn, 19, 31);
2981 gen_movl_simm_T1(rs2);
2982 } else { /* register */
2983 rs2 = GET_FIELD(insn, 27, 31);
2984 gen_movl_reg_T1(rs2);
2986 if (xop < 0x20) {
2987 switch (xop & ~0x10) {
2988 case 0x0:
2989 if (xop & 0x10)
2990 gen_op_add_T1_T0_cc();
2991 else
2992 gen_op_add_T1_T0();
2993 break;
2994 case 0x1:
2995 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2996 if (xop & 0x10)
2997 gen_op_logic_T0_cc();
2998 break;
2999 case 0x2:
3000 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3001 if (xop & 0x10)
3002 gen_op_logic_T0_cc();
3003 break;
3004 case 0x3:
3005 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3006 if (xop & 0x10)
3007 gen_op_logic_T0_cc();
3008 break;
3009 case 0x4:
3010 if (xop & 0x10)
3011 gen_op_sub_T1_T0_cc();
3012 else
3013 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3014 break;
3015 case 0x5:
3016 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3017 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3018 if (xop & 0x10)
3019 gen_op_logic_T0_cc();
3020 break;
3021 case 0x6:
3022 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3023 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3024 if (xop & 0x10)
3025 gen_op_logic_T0_cc();
3026 break;
3027 case 0x7:
3028 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3029 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3030 if (xop & 0x10)
3031 gen_op_logic_T0_cc();
3032 break;
3033 case 0x8:
3034 if (xop & 0x10)
3035 gen_op_addx_T1_T0_cc();
3036 else {
3037 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3038 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3039 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3041 break;
3042 #ifdef TARGET_SPARC64
3043 case 0x9: /* V9 mulx */
3044 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3045 break;
3046 #endif
3047 case 0xa:
3048 gen_op_umul_T1_T0();
3049 if (xop & 0x10)
3050 gen_op_logic_T0_cc();
3051 break;
3052 case 0xb:
3053 gen_op_smul_T1_T0();
3054 if (xop & 0x10)
3055 gen_op_logic_T0_cc();
3056 break;
3057 case 0xc:
3058 if (xop & 0x10)
3059 gen_op_subx_T1_T0_cc();
3060 else {
3061 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3062 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3063 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3065 break;
3066 #ifdef TARGET_SPARC64
3067 case 0xd: /* V9 udivx */
3068 gen_trap_ifdivzero_i64(cpu_T[1]);
3069 tcg_gen_divu_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3070 break;
3071 #endif
3072 case 0xe:
3073 gen_op_udiv_T1_T0();
3074 if (xop & 0x10)
3075 gen_op_div_cc();
3076 break;
3077 case 0xf:
3078 gen_op_sdiv_T1_T0();
3079 if (xop & 0x10)
3080 gen_op_div_cc();
3081 break;
3082 default:
3083 goto illegal_insn;
3085 gen_movl_T0_reg(rd);
3086 } else {
3087 switch (xop) {
3088 case 0x20: /* taddcc */
3089 gen_op_tadd_T1_T0_cc();
3090 gen_movl_T0_reg(rd);
3091 break;
3092 case 0x21: /* tsubcc */
3093 gen_op_tsub_T1_T0_cc();
3094 gen_movl_T0_reg(rd);
3095 break;
3096 case 0x22: /* taddcctv */
3097 save_state(dc);
3098 gen_op_tadd_T1_T0_ccTV();
3099 gen_movl_T0_reg(rd);
3100 break;
3101 case 0x23: /* tsubcctv */
3102 save_state(dc);
3103 gen_op_tsub_T1_T0_ccTV();
3104 gen_movl_T0_reg(rd);
3105 break;
3106 case 0x24: /* mulscc */
3107 gen_op_mulscc_T1_T0();
3108 gen_movl_T0_reg(rd);
3109 break;
3110 #ifndef TARGET_SPARC64
3111 case 0x25: /* sll */
3112 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3113 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3114 gen_movl_T0_reg(rd);
3115 break;
3116 case 0x26: /* srl */
3117 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3118 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3119 gen_movl_T0_reg(rd);
3120 break;
3121 case 0x27: /* sra */
3122 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3123 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3124 gen_movl_T0_reg(rd);
3125 break;
3126 #endif
3127 case 0x30:
3129 switch(rd) {
3130 case 0: /* wry */
3131 gen_op_xor_T1_T0();
3132 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3133 break;
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3136 SPARCv8 manual, nop
3137 on the microSPARC
3138 II */
3139 case 0x10 ... 0x1f: /* implementation-dependent
3140 in the SPARCv8
3141 manual, nop on the
3142 microSPARC II */
3143 break;
3144 #else
3145 case 0x2: /* V9 wrccr */
3146 gen_op_xor_T1_T0();
3147 gen_op_wrccr();
3148 break;
3149 case 0x3: /* V9 wrasi */
3150 gen_op_xor_T1_T0();
3151 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
3152 break;
3153 case 0x6: /* V9 wrfprs */
3154 gen_op_xor_T1_T0();
3155 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
3156 save_state(dc);
3157 gen_op_next_insn();
3158 tcg_gen_exit_tb(0);
3159 dc->is_br = 1;
3160 break;
3161 case 0xf: /* V9 sir, nop if user */
3162 #if !defined(CONFIG_USER_ONLY)
3163 if (supervisor(dc))
3164 ; // XXX
3165 #endif
3166 break;
3167 case 0x13: /* Graphics Status */
3168 if (gen_trap_ifnofpu(dc))
3169 goto jmp_insn;
3170 gen_op_xor_T1_T0();
3171 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
3172 break;
3173 case 0x17: /* Tick compare */
3174 #if !defined(CONFIG_USER_ONLY)
3175 if (!supervisor(dc))
3176 goto illegal_insn;
3177 #endif
3179 TCGv r_tickptr;
3181 gen_op_xor_T1_T0();
3182 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3183 tick_cmpr));
3184 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3185 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3186 offsetof(CPUState, tick));
3187 tcg_gen_helper_0_2(helper_tick_set_limit,
3188 r_tickptr, cpu_T[0]);
3190 break;
3191 case 0x18: /* System tick */
3192 #if !defined(CONFIG_USER_ONLY)
3193 if (!supervisor(dc))
3194 goto illegal_insn;
3195 #endif
3197 TCGv r_tickptr;
3199 gen_op_xor_T1_T0();
3200 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3201 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3202 offsetof(CPUState, stick));
3203 tcg_gen_helper_0_2(helper_tick_set_count,
3204 r_tickptr, cpu_T[0]);
3206 break;
3207 case 0x19: /* System tick compare */
3208 #if !defined(CONFIG_USER_ONLY)
3209 if (!supervisor(dc))
3210 goto illegal_insn;
3211 #endif
3213 TCGv r_tickptr;
3215 gen_op_xor_T1_T0();
3216 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3217 stick_cmpr));
3218 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3219 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3220 offsetof(CPUState, stick));
3221 tcg_gen_helper_0_2(helper_tick_set_limit,
3222 r_tickptr, cpu_T[0]);
3224 break;
3226 case 0x10: /* Performance Control */
3227 case 0x11: /* Performance Instrumentation Counter */
3228 case 0x12: /* Dispatch Control */
3229 case 0x14: /* Softint set */
3230 case 0x15: /* Softint clear */
3231 case 0x16: /* Softint write */
3232 #endif
3233 default:
3234 goto illegal_insn;
3237 break;
3238 #if !defined(CONFIG_USER_ONLY)
3239 case 0x31: /* wrpsr, V9 saved, restored */
3241 if (!supervisor(dc))
3242 goto priv_insn;
3243 #ifdef TARGET_SPARC64
3244 switch (rd) {
3245 case 0:
3246 gen_op_saved();
3247 break;
3248 case 1:
3249 gen_op_restored();
3250 break;
3251 case 2: /* UA2005 allclean */
3252 case 3: /* UA2005 otherw */
3253 case 4: /* UA2005 normalw */
3254 case 5: /* UA2005 invalw */
3255 // XXX
3256 default:
3257 goto illegal_insn;
3259 #else
3260 gen_op_xor_T1_T0();
3261 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
3262 save_state(dc);
3263 gen_op_next_insn();
3264 tcg_gen_exit_tb(0);
3265 dc->is_br = 1;
3266 #endif
3268 break;
3269 case 0x32: /* wrwim, V9 wrpr */
3271 if (!supervisor(dc))
3272 goto priv_insn;
3273 gen_op_xor_T1_T0();
3274 #ifdef TARGET_SPARC64
3275 switch (rd) {
3276 case 0: // tpc
3278 TCGv r_tsptr;
3280 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3281 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3282 offsetof(CPUState, tsptr));
3283 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3284 offsetof(trap_state, tpc));
3286 break;
3287 case 1: // tnpc
3289 TCGv r_tsptr;
3291 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3292 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3293 offsetof(CPUState, tsptr));
3294 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3295 offsetof(trap_state, tnpc));
3297 break;
3298 case 2: // tstate
3300 TCGv r_tsptr;
3302 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3303 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3304 offsetof(CPUState, tsptr));
3305 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3306 offsetof(trap_state, tstate));
3308 break;
3309 case 3: // tt
3311 TCGv r_tsptr;
3313 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3314 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3315 offsetof(CPUState, tsptr));
3316 tcg_gen_st_i32(cpu_T[0], r_tsptr,
3317 offsetof(trap_state, tt));
3319 break;
3320 case 4: // tick
3322 TCGv r_tickptr;
3324 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3325 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3326 offsetof(CPUState, tick));
3327 tcg_gen_helper_0_2(helper_tick_set_count,
3328 r_tickptr, cpu_T[0]);
3330 break;
3331 case 5: // tba
3332 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3333 break;
3334 case 6: // pstate
3335 save_state(dc);
3336 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
3337 gen_op_next_insn();
3338 tcg_gen_exit_tb(0);
3339 dc->is_br = 1;
3340 break;
3341 case 7: // tl
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
3343 break;
3344 case 8: // pil
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
3346 break;
3347 case 9: // cwp
3348 gen_op_wrcwp();
3349 break;
3350 case 10: // cansave
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
3352 break;
3353 case 11: // canrestore
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
3355 break;
3356 case 12: // cleanwin
3357 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
3358 break;
3359 case 13: // otherwin
3360 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
3361 break;
3362 case 14: // wstate
3363 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
3364 break;
3365 case 16: // UA2005 gl
3366 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
3367 break;
3368 case 26: // UA2005 strand status
3369 if (!hypervisor(dc))
3370 goto priv_insn;
3371 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
3372 break;
3373 default:
3374 goto illegal_insn;
3376 #else
3377 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
3378 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
3379 #endif
3381 break;
3382 case 0x33: /* wrtbr, UA2005 wrhpr */
3384 #ifndef TARGET_SPARC64
3385 if (!supervisor(dc))
3386 goto priv_insn;
3387 gen_op_xor_T1_T0();
3388 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3389 #else
3390 if (!hypervisor(dc))
3391 goto priv_insn;
3392 gen_op_xor_T1_T0();
3393 switch (rd) {
3394 case 0: // hpstate
3395 // XXX gen_op_wrhpstate();
3396 save_state(dc);
3397 gen_op_next_insn();
3398 tcg_gen_exit_tb(0);
3399 dc->is_br = 1;
3400 break;
3401 case 1: // htstate
3402 // XXX gen_op_wrhtstate();
3403 break;
3404 case 3: // hintp
3405 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
3406 break;
3407 case 5: // htba
3408 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
3409 break;
3410 case 31: // hstick_cmpr
3412 TCGv r_tickptr;
3414 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3415 hstick_cmpr));
3416 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3417 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3418 offsetof(CPUState, hstick));
3419 tcg_gen_helper_0_2(helper_tick_set_limit,
3420 r_tickptr, cpu_T[0]);
3422 break;
3423 case 6: // hver readonly
3424 default:
3425 goto illegal_insn;
3427 #endif
3429 break;
3430 #endif
3431 #ifdef TARGET_SPARC64
3432 case 0x2c: /* V9 movcc */
3434 int cc = GET_FIELD_SP(insn, 11, 12);
3435 int cond = GET_FIELD_SP(insn, 14, 17);
3436 TCGv r_cond;
3437 int l1;
3439 r_cond = tcg_temp_new(TCG_TYPE_TL);
3440 if (insn & (1 << 18)) {
3441 if (cc == 0)
3442 gen_cond(r_cond, 0, cond);
3443 else if (cc == 2)
3444 gen_cond(r_cond, 1, cond);
3445 else
3446 goto illegal_insn;
3447 } else {
3448 gen_fcond(r_cond, cc, cond);
3451 l1 = gen_new_label();
3453 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond,
3454 tcg_const_tl(0), l1);
3455 if (IS_IMM) { /* immediate */
3456 rs2 = GET_FIELD_SPs(insn, 0, 10);
3457 gen_movl_simm_T1(rs2);
3458 } else {
3459 rs2 = GET_FIELD_SP(insn, 0, 4);
3460 gen_movl_reg_T1(rs2);
3462 gen_movl_T1_reg(rd);
3463 gen_set_label(l1);
3464 break;
3466 case 0x2d: /* V9 sdivx */
3467 gen_op_sdivx_T1_T0();
3468 gen_movl_T0_reg(rd);
3469 break;
3470 case 0x2e: /* V9 popc */
3472 if (IS_IMM) { /* immediate */
3473 rs2 = GET_FIELD_SPs(insn, 0, 12);
3474 gen_movl_simm_T1(rs2);
3475 // XXX optimize: popc(constant)
3477 else {
3478 rs2 = GET_FIELD_SP(insn, 0, 4);
3479 gen_movl_reg_T1(rs2);
3481 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
3482 cpu_T[1]);
3483 gen_movl_T0_reg(rd);
3485 case 0x2f: /* V9 movr */
3487 int cond = GET_FIELD_SP(insn, 10, 12);
3488 TCGv r_zero;
3489 int l1;
3491 rs1 = GET_FIELD(insn, 13, 17);
3492 gen_movl_reg_T0(rs1);
3494 l1 = gen_new_label();
3496 r_zero = tcg_const_tl(0);
3497 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
3498 if (IS_IMM) { /* immediate */
3499 rs2 = GET_FIELD_SPs(insn, 0, 9);
3500 gen_movl_simm_T1(rs2);
3501 } else {
3502 rs2 = GET_FIELD_SP(insn, 0, 4);
3503 gen_movl_reg_T1(rs2);
3505 gen_movl_T1_reg(rd);
3506 gen_set_label(l1);
3507 break;
3509 #endif
3510 default:
3511 goto illegal_insn;
3514 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3515 #ifdef TARGET_SPARC64
3516 int opf = GET_FIELD_SP(insn, 5, 13);
3517 rs1 = GET_FIELD(insn, 13, 17);
3518 rs2 = GET_FIELD(insn, 27, 31);
3519 if (gen_trap_ifnofpu(dc))
3520 goto jmp_insn;
3522 switch (opf) {
3523 case 0x000: /* VIS I edge8cc */
3524 case 0x001: /* VIS II edge8n */
3525 case 0x002: /* VIS I edge8lcc */
3526 case 0x003: /* VIS II edge8ln */
3527 case 0x004: /* VIS I edge16cc */
3528 case 0x005: /* VIS II edge16n */
3529 case 0x006: /* VIS I edge16lcc */
3530 case 0x007: /* VIS II edge16ln */
3531 case 0x008: /* VIS I edge32cc */
3532 case 0x009: /* VIS II edge32n */
3533 case 0x00a: /* VIS I edge32lcc */
3534 case 0x00b: /* VIS II edge32ln */
3535 // XXX
3536 goto illegal_insn;
3537 case 0x010: /* VIS I array8 */
3538 gen_movl_reg_T0(rs1);
3539 gen_movl_reg_T1(rs2);
3540 gen_op_array8();
3541 gen_movl_T0_reg(rd);
3542 break;
3543 case 0x012: /* VIS I array16 */
3544 gen_movl_reg_T0(rs1);
3545 gen_movl_reg_T1(rs2);
3546 gen_op_array16();
3547 gen_movl_T0_reg(rd);
3548 break;
3549 case 0x014: /* VIS I array32 */
3550 gen_movl_reg_T0(rs1);
3551 gen_movl_reg_T1(rs2);
3552 gen_op_array32();
3553 gen_movl_T0_reg(rd);
3554 break;
3555 case 0x018: /* VIS I alignaddr */
3556 gen_movl_reg_T0(rs1);
3557 gen_movl_reg_T1(rs2);
3558 gen_op_alignaddr();
3559 gen_movl_T0_reg(rd);
3560 break;
3561 case 0x019: /* VIS II bmask */
3562 case 0x01a: /* VIS I alignaddrl */
3563 // XXX
3564 goto illegal_insn;
3565 case 0x020: /* VIS I fcmple16 */
3566 gen_op_load_fpr_DT0(DFPREG(rs1));
3567 gen_op_load_fpr_DT1(DFPREG(rs2));
3568 gen_op_fcmple16();
3569 gen_op_store_DT0_fpr(DFPREG(rd));
3570 break;
3571 case 0x022: /* VIS I fcmpne16 */
3572 gen_op_load_fpr_DT0(DFPREG(rs1));
3573 gen_op_load_fpr_DT1(DFPREG(rs2));
3574 gen_op_fcmpne16();
3575 gen_op_store_DT0_fpr(DFPREG(rd));
3576 break;
3577 case 0x024: /* VIS I fcmple32 */
3578 gen_op_load_fpr_DT0(DFPREG(rs1));
3579 gen_op_load_fpr_DT1(DFPREG(rs2));
3580 gen_op_fcmple32();
3581 gen_op_store_DT0_fpr(DFPREG(rd));
3582 break;
3583 case 0x026: /* VIS I fcmpne32 */
3584 gen_op_load_fpr_DT0(DFPREG(rs1));
3585 gen_op_load_fpr_DT1(DFPREG(rs2));
3586 gen_op_fcmpne32();
3587 gen_op_store_DT0_fpr(DFPREG(rd));
3588 break;
3589 case 0x028: /* VIS I fcmpgt16 */
3590 gen_op_load_fpr_DT0(DFPREG(rs1));
3591 gen_op_load_fpr_DT1(DFPREG(rs2));
3592 gen_op_fcmpgt16();
3593 gen_op_store_DT0_fpr(DFPREG(rd));
3594 break;
3595 case 0x02a: /* VIS I fcmpeq16 */
3596 gen_op_load_fpr_DT0(DFPREG(rs1));
3597 gen_op_load_fpr_DT1(DFPREG(rs2));
3598 gen_op_fcmpeq16();
3599 gen_op_store_DT0_fpr(DFPREG(rd));
3600 break;
3601 case 0x02c: /* VIS I fcmpgt32 */
3602 gen_op_load_fpr_DT0(DFPREG(rs1));
3603 gen_op_load_fpr_DT1(DFPREG(rs2));
3604 gen_op_fcmpgt32();
3605 gen_op_store_DT0_fpr(DFPREG(rd));
3606 break;
3607 case 0x02e: /* VIS I fcmpeq32 */
3608 gen_op_load_fpr_DT0(DFPREG(rs1));
3609 gen_op_load_fpr_DT1(DFPREG(rs2));
3610 gen_op_fcmpeq32();
3611 gen_op_store_DT0_fpr(DFPREG(rd));
3612 break;
3613 case 0x031: /* VIS I fmul8x16 */
3614 gen_op_load_fpr_DT0(DFPREG(rs1));
3615 gen_op_load_fpr_DT1(DFPREG(rs2));
3616 gen_op_fmul8x16();
3617 gen_op_store_DT0_fpr(DFPREG(rd));
3618 break;
3619 case 0x033: /* VIS I fmul8x16au */
3620 gen_op_load_fpr_DT0(DFPREG(rs1));
3621 gen_op_load_fpr_DT1(DFPREG(rs2));
3622 gen_op_fmul8x16au();
3623 gen_op_store_DT0_fpr(DFPREG(rd));
3624 break;
3625 case 0x035: /* VIS I fmul8x16al */
3626 gen_op_load_fpr_DT0(DFPREG(rs1));
3627 gen_op_load_fpr_DT1(DFPREG(rs2));
3628 gen_op_fmul8x16al();
3629 gen_op_store_DT0_fpr(DFPREG(rd));
3630 break;
3631 case 0x036: /* VIS I fmul8sux16 */
3632 gen_op_load_fpr_DT0(DFPREG(rs1));
3633 gen_op_load_fpr_DT1(DFPREG(rs2));
3634 gen_op_fmul8sux16();
3635 gen_op_store_DT0_fpr(DFPREG(rd));
3636 break;
3637 case 0x037: /* VIS I fmul8ulx16 */
3638 gen_op_load_fpr_DT0(DFPREG(rs1));
3639 gen_op_load_fpr_DT1(DFPREG(rs2));
3640 gen_op_fmul8ulx16();
3641 gen_op_store_DT0_fpr(DFPREG(rd));
3642 break;
3643 case 0x038: /* VIS I fmuld8sux16 */
3644 gen_op_load_fpr_DT0(DFPREG(rs1));
3645 gen_op_load_fpr_DT1(DFPREG(rs2));
3646 gen_op_fmuld8sux16();
3647 gen_op_store_DT0_fpr(DFPREG(rd));
3648 break;
3649 case 0x039: /* VIS I fmuld8ulx16 */
3650 gen_op_load_fpr_DT0(DFPREG(rs1));
3651 gen_op_load_fpr_DT1(DFPREG(rs2));
3652 gen_op_fmuld8ulx16();
3653 gen_op_store_DT0_fpr(DFPREG(rd));
3654 break;
3655 case 0x03a: /* VIS I fpack32 */
3656 case 0x03b: /* VIS I fpack16 */
3657 case 0x03d: /* VIS I fpackfix */
3658 case 0x03e: /* VIS I pdist */
3659 // XXX
3660 goto illegal_insn;
3661 case 0x048: /* VIS I faligndata */
3662 gen_op_load_fpr_DT0(DFPREG(rs1));
3663 gen_op_load_fpr_DT1(DFPREG(rs2));
3664 gen_op_faligndata();
3665 gen_op_store_DT0_fpr(DFPREG(rd));
3666 break;
3667 case 0x04b: /* VIS I fpmerge */
3668 gen_op_load_fpr_DT0(DFPREG(rs1));
3669 gen_op_load_fpr_DT1(DFPREG(rs2));
3670 gen_op_fpmerge();
3671 gen_op_store_DT0_fpr(DFPREG(rd));
3672 break;
3673 case 0x04c: /* VIS II bshuffle */
3674 // XXX
3675 goto illegal_insn;
3676 case 0x04d: /* VIS I fexpand */
3677 gen_op_load_fpr_DT0(DFPREG(rs1));
3678 gen_op_load_fpr_DT1(DFPREG(rs2));
3679 gen_op_fexpand();
3680 gen_op_store_DT0_fpr(DFPREG(rd));
3681 break;
3682 case 0x050: /* VIS I fpadd16 */
3683 gen_op_load_fpr_DT0(DFPREG(rs1));
3684 gen_op_load_fpr_DT1(DFPREG(rs2));
3685 gen_op_fpadd16();
3686 gen_op_store_DT0_fpr(DFPREG(rd));
3687 break;
3688 case 0x051: /* VIS I fpadd16s */
3689 gen_op_load_fpr_FT0(rs1);
3690 gen_op_load_fpr_FT1(rs2);
3691 gen_op_fpadd16s();
3692 gen_op_store_FT0_fpr(rd);
3693 break;
3694 case 0x052: /* VIS I fpadd32 */
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 gen_op_fpadd32();
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x053: /* VIS I fpadd32s */
3701 gen_op_load_fpr_FT0(rs1);
3702 gen_op_load_fpr_FT1(rs2);
3703 gen_op_fpadd32s();
3704 gen_op_store_FT0_fpr(rd);
3705 break;
3706 case 0x054: /* VIS I fpsub16 */
3707 gen_op_load_fpr_DT0(DFPREG(rs1));
3708 gen_op_load_fpr_DT1(DFPREG(rs2));
3709 gen_op_fpsub16();
3710 gen_op_store_DT0_fpr(DFPREG(rd));
3711 break;
3712 case 0x055: /* VIS I fpsub16s */
3713 gen_op_load_fpr_FT0(rs1);
3714 gen_op_load_fpr_FT1(rs2);
3715 gen_op_fpsub16s();
3716 gen_op_store_FT0_fpr(rd);
3717 break;
3718 case 0x056: /* VIS I fpsub32 */
3719 gen_op_load_fpr_DT0(DFPREG(rs1));
3720 gen_op_load_fpr_DT1(DFPREG(rs2));
3721 gen_op_fpadd32();
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x057: /* VIS I fpsub32s */
3725 gen_op_load_fpr_FT0(rs1);
3726 gen_op_load_fpr_FT1(rs2);
3727 gen_op_fpsub32s();
3728 gen_op_store_FT0_fpr(rd);
3729 break;
3730 case 0x060: /* VIS I fzero */
3731 gen_op_movl_DT0_0();
3732 gen_op_store_DT0_fpr(DFPREG(rd));
3733 break;
3734 case 0x061: /* VIS I fzeros */
3735 gen_op_movl_FT0_0();
3736 gen_op_store_FT0_fpr(rd);
3737 break;
3738 case 0x062: /* VIS I fnor */
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 gen_op_fnor();
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x063: /* VIS I fnors */
3745 gen_op_load_fpr_FT0(rs1);
3746 gen_op_load_fpr_FT1(rs2);
3747 gen_op_fnors();
3748 gen_op_store_FT0_fpr(rd);
3749 break;
3750 case 0x064: /* VIS I fandnot2 */
3751 gen_op_load_fpr_DT1(DFPREG(rs1));
3752 gen_op_load_fpr_DT0(DFPREG(rs2));
3753 gen_op_fandnot();
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x065: /* VIS I fandnot2s */
3757 gen_op_load_fpr_FT1(rs1);
3758 gen_op_load_fpr_FT0(rs2);
3759 gen_op_fandnots();
3760 gen_op_store_FT0_fpr(rd);
3761 break;
3762 case 0x066: /* VIS I fnot2 */
3763 gen_op_load_fpr_DT1(DFPREG(rs2));
3764 gen_op_fnot();
3765 gen_op_store_DT0_fpr(DFPREG(rd));
3766 break;
3767 case 0x067: /* VIS I fnot2s */
3768 gen_op_load_fpr_FT1(rs2);
3769 gen_op_fnot();
3770 gen_op_store_FT0_fpr(rd);
3771 break;
3772 case 0x068: /* VIS I fandnot1 */
3773 gen_op_load_fpr_DT0(DFPREG(rs1));
3774 gen_op_load_fpr_DT1(DFPREG(rs2));
3775 gen_op_fandnot();
3776 gen_op_store_DT0_fpr(DFPREG(rd));
3777 break;
3778 case 0x069: /* VIS I fandnot1s */
3779 gen_op_load_fpr_FT0(rs1);
3780 gen_op_load_fpr_FT1(rs2);
3781 gen_op_fandnots();
3782 gen_op_store_FT0_fpr(rd);
3783 break;
3784 case 0x06a: /* VIS I fnot1 */
3785 gen_op_load_fpr_DT1(DFPREG(rs1));
3786 gen_op_fnot();
3787 gen_op_store_DT0_fpr(DFPREG(rd));
3788 break;
3789 case 0x06b: /* VIS I fnot1s */
3790 gen_op_load_fpr_FT1(rs1);
3791 gen_op_fnot();
3792 gen_op_store_FT0_fpr(rd);
3793 break;
3794 case 0x06c: /* VIS I fxor */
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 gen_op_fxor();
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x06d: /* VIS I fxors */
3801 gen_op_load_fpr_FT0(rs1);
3802 gen_op_load_fpr_FT1(rs2);
3803 gen_op_fxors();
3804 gen_op_store_FT0_fpr(rd);
3805 break;
3806 case 0x06e: /* VIS I fnand */
3807 gen_op_load_fpr_DT0(DFPREG(rs1));
3808 gen_op_load_fpr_DT1(DFPREG(rs2));
3809 gen_op_fnand();
3810 gen_op_store_DT0_fpr(DFPREG(rd));
3811 break;
3812 case 0x06f: /* VIS I fnands */
3813 gen_op_load_fpr_FT0(rs1);
3814 gen_op_load_fpr_FT1(rs2);
3815 gen_op_fnands();
3816 gen_op_store_FT0_fpr(rd);
3817 break;
3818 case 0x070: /* VIS I fand */
3819 gen_op_load_fpr_DT0(DFPREG(rs1));
3820 gen_op_load_fpr_DT1(DFPREG(rs2));
3821 gen_op_fand();
3822 gen_op_store_DT0_fpr(DFPREG(rd));
3823 break;
3824 case 0x071: /* VIS I fands */
3825 gen_op_load_fpr_FT0(rs1);
3826 gen_op_load_fpr_FT1(rs2);
3827 gen_op_fands();
3828 gen_op_store_FT0_fpr(rd);
3829 break;
3830 case 0x072: /* VIS I fxnor */
3831 gen_op_load_fpr_DT0(DFPREG(rs1));
3832 gen_op_load_fpr_DT1(DFPREG(rs2));
3833 gen_op_fxnor();
3834 gen_op_store_DT0_fpr(DFPREG(rd));
3835 break;
3836 case 0x073: /* VIS I fxnors */
3837 gen_op_load_fpr_FT0(rs1);
3838 gen_op_load_fpr_FT1(rs2);
3839 gen_op_fxnors();
3840 gen_op_store_FT0_fpr(rd);
3841 break;
3842 case 0x074: /* VIS I fsrc1 */
3843 gen_op_load_fpr_DT0(DFPREG(rs1));
3844 gen_op_store_DT0_fpr(DFPREG(rd));
3845 break;
3846 case 0x075: /* VIS I fsrc1s */
3847 gen_op_load_fpr_FT0(rs1);
3848 gen_op_store_FT0_fpr(rd);
3849 break;
3850 case 0x076: /* VIS I fornot2 */
3851 gen_op_load_fpr_DT1(DFPREG(rs1));
3852 gen_op_load_fpr_DT0(DFPREG(rs2));
3853 gen_op_fornot();
3854 gen_op_store_DT0_fpr(DFPREG(rd));
3855 break;
3856 case 0x077: /* VIS I fornot2s */
3857 gen_op_load_fpr_FT1(rs1);
3858 gen_op_load_fpr_FT0(rs2);
3859 gen_op_fornots();
3860 gen_op_store_FT0_fpr(rd);
3861 break;
3862 case 0x078: /* VIS I fsrc2 */
3863 gen_op_load_fpr_DT0(DFPREG(rs2));
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x079: /* VIS I fsrc2s */
3867 gen_op_load_fpr_FT0(rs2);
3868 gen_op_store_FT0_fpr(rd);
3869 break;
3870 case 0x07a: /* VIS I fornot1 */
3871 gen_op_load_fpr_DT0(DFPREG(rs1));
3872 gen_op_load_fpr_DT1(DFPREG(rs2));
3873 gen_op_fornot();
3874 gen_op_store_DT0_fpr(DFPREG(rd));
3875 break;
3876 case 0x07b: /* VIS I fornot1s */
3877 gen_op_load_fpr_FT0(rs1);
3878 gen_op_load_fpr_FT1(rs2);
3879 gen_op_fornots();
3880 gen_op_store_FT0_fpr(rd);
3881 break;
3882 case 0x07c: /* VIS I for */
3883 gen_op_load_fpr_DT0(DFPREG(rs1));
3884 gen_op_load_fpr_DT1(DFPREG(rs2));
3885 gen_op_for();
3886 gen_op_store_DT0_fpr(DFPREG(rd));
3887 break;
3888 case 0x07d: /* VIS I fors */
3889 gen_op_load_fpr_FT0(rs1);
3890 gen_op_load_fpr_FT1(rs2);
3891 gen_op_fors();
3892 gen_op_store_FT0_fpr(rd);
3893 break;
3894 case 0x07e: /* VIS I fone */
3895 gen_op_movl_DT0_1();
3896 gen_op_store_DT0_fpr(DFPREG(rd));
3897 break;
3898 case 0x07f: /* VIS I fones */
3899 gen_op_movl_FT0_1();
3900 gen_op_store_FT0_fpr(rd);
3901 break;
3902 case 0x080: /* VIS I shutdown */
3903 case 0x081: /* VIS II siam */
3904 // XXX
3905 goto illegal_insn;
3906 default:
3907 goto illegal_insn;
3909 #else
3910 goto ncp_insn;
3911 #endif
3912 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3913 #ifdef TARGET_SPARC64
3914 goto illegal_insn;
3915 #else
3916 goto ncp_insn;
3917 #endif
3918 #ifdef TARGET_SPARC64
3919 } else if (xop == 0x39) { /* V9 return */
3920 rs1 = GET_FIELD(insn, 13, 17);
3921 save_state(dc);
3922 gen_movl_reg_T0(rs1);
3923 if (IS_IMM) { /* immediate */
3924 rs2 = GET_FIELDs(insn, 19, 31);
3925 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3926 } else { /* register */
3927 rs2 = GET_FIELD(insn, 27, 31);
3928 #if defined(OPTIM)
3929 if (rs2) {
3930 #endif
3931 gen_movl_reg_T1(rs2);
3932 gen_op_add_T1_T0();
3933 #if defined(OPTIM)
3935 #endif
3937 gen_op_restore();
3938 gen_mov_pc_npc(dc);
3939 gen_op_check_align_T0_3();
3940 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3941 dc->npc = DYNAMIC_PC;
3942 goto jmp_insn;
3943 #endif
3944 } else {
3945 rs1 = GET_FIELD(insn, 13, 17);
3946 gen_movl_reg_T0(rs1);
3947 if (IS_IMM) { /* immediate */
3948 rs2 = GET_FIELDs(insn, 19, 31);
3949 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3950 } else { /* register */
3951 rs2 = GET_FIELD(insn, 27, 31);
3952 #if defined(OPTIM)
3953 if (rs2) {
3954 #endif
3955 gen_movl_reg_T1(rs2);
3956 gen_op_add_T1_T0();
3957 #if defined(OPTIM)
3959 #endif
3961 switch (xop) {
3962 case 0x38: /* jmpl */
3964 if (rd != 0) {
3965 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3966 gen_movl_T1_reg(rd);
3968 gen_mov_pc_npc(dc);
3969 gen_op_check_align_T0_3();
3970 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3971 dc->npc = DYNAMIC_PC;
3973 goto jmp_insn;
3974 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3975 case 0x39: /* rett, V9 return */
3977 if (!supervisor(dc))
3978 goto priv_insn;
3979 gen_mov_pc_npc(dc);
3980 gen_op_check_align_T0_3();
3981 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3982 dc->npc = DYNAMIC_PC;
3983 tcg_gen_helper_0_0(helper_rett);
3985 goto jmp_insn;
3986 #endif
3987 case 0x3b: /* flush */
3988 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3989 break;
3990 case 0x3c: /* save */
3991 save_state(dc);
3992 gen_op_save();
3993 gen_movl_T0_reg(rd);
3994 break;
3995 case 0x3d: /* restore */
3996 save_state(dc);
3997 gen_op_restore();
3998 gen_movl_T0_reg(rd);
3999 break;
4000 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4001 case 0x3e: /* V9 done/retry */
4003 switch (rd) {
4004 case 0:
4005 if (!supervisor(dc))
4006 goto priv_insn;
4007 dc->npc = DYNAMIC_PC;
4008 dc->pc = DYNAMIC_PC;
4009 tcg_gen_helper_0_0(helper_done);
4010 goto jmp_insn;
4011 case 1:
4012 if (!supervisor(dc))
4013 goto priv_insn;
4014 dc->npc = DYNAMIC_PC;
4015 dc->pc = DYNAMIC_PC;
4016 tcg_gen_helper_0_0(helper_retry);
4017 goto jmp_insn;
4018 default:
4019 goto illegal_insn;
4022 break;
4023 #endif
4024 default:
4025 goto illegal_insn;
4028 break;
4030 break;
4031 case 3: /* load/store instructions */
4033 unsigned int xop = GET_FIELD(insn, 7, 12);
4034 rs1 = GET_FIELD(insn, 13, 17);
4035 save_state(dc);
4036 gen_movl_reg_T0(rs1);
4037 if (xop == 0x3c || xop == 0x3e)
4039 rs2 = GET_FIELD(insn, 27, 31);
4040 gen_movl_reg_T1(rs2);
4042 else if (IS_IMM) { /* immediate */
4043 rs2 = GET_FIELDs(insn, 19, 31);
4044 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
4045 } else { /* register */
4046 rs2 = GET_FIELD(insn, 27, 31);
4047 #if defined(OPTIM)
4048 if (rs2 != 0) {
4049 #endif
4050 gen_movl_reg_T1(rs2);
4051 gen_op_add_T1_T0();
4052 #if defined(OPTIM)
4054 #endif
4056 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4057 (xop > 0x17 && xop <= 0x1d ) ||
4058 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4059 switch (xop) {
4060 case 0x0: /* load unsigned word */
4061 gen_op_check_align_T0_3();
4062 ABI32_MASK(cpu_T[0]);
4063 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
4064 break;
4065 case 0x1: /* load unsigned byte */
4066 ABI32_MASK(cpu_T[0]);
4067 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
4068 break;
4069 case 0x2: /* load unsigned halfword */
4070 gen_op_check_align_T0_1();
4071 ABI32_MASK(cpu_T[0]);
4072 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
4073 break;
4074 case 0x3: /* load double word */
4075 if (rd & 1)
4076 goto illegal_insn;
4077 else {
4078 TCGv r_dword;
4080 r_dword = tcg_temp_new(TCG_TYPE_I64);
4081 gen_op_check_align_T0_7();
4082 ABI32_MASK(cpu_T[0]);
4083 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
4084 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
4085 gen_movl_T0_reg(rd + 1);
4086 tcg_gen_shri_i64(r_dword, r_dword, 32);
4087 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
4089 break;
4090 case 0x9: /* load signed byte */
4091 ABI32_MASK(cpu_T[0]);
4092 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4093 break;
4094 case 0xa: /* load signed halfword */
4095 gen_op_check_align_T0_1();
4096 ABI32_MASK(cpu_T[0]);
4097 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
4098 break;
4099 case 0xd: /* ldstub -- XXX: should be atomically */
4100 tcg_gen_movi_i32(cpu_tmp0, 0xff);
4101 ABI32_MASK(cpu_T[0]);
4102 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4103 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
4104 break;
4105 case 0x0f: /* swap register with memory. Also atomically */
4106 gen_op_check_align_T0_3();
4107 gen_movl_reg_T1(rd);
4108 ABI32_MASK(cpu_T[0]);
4109 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
4110 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4111 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
4112 break;
4113 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4114 case 0x10: /* load word alternate */
4115 #ifndef TARGET_SPARC64
4116 if (IS_IMM)
4117 goto illegal_insn;
4118 if (!supervisor(dc))
4119 goto priv_insn;
4120 #endif
4121 gen_op_check_align_T0_3();
4122 gen_ld_asi(insn, 4, 0);
4123 break;
4124 case 0x11: /* load unsigned byte alternate */
4125 #ifndef TARGET_SPARC64
4126 if (IS_IMM)
4127 goto illegal_insn;
4128 if (!supervisor(dc))
4129 goto priv_insn;
4130 #endif
4131 gen_ld_asi(insn, 1, 0);
4132 break;
4133 case 0x12: /* load unsigned halfword alternate */
4134 #ifndef TARGET_SPARC64
4135 if (IS_IMM)
4136 goto illegal_insn;
4137 if (!supervisor(dc))
4138 goto priv_insn;
4139 #endif
4140 gen_op_check_align_T0_1();
4141 gen_ld_asi(insn, 2, 0);
4142 break;
4143 case 0x13: /* load double word alternate */
4144 #ifndef TARGET_SPARC64
4145 if (IS_IMM)
4146 goto illegal_insn;
4147 if (!supervisor(dc))
4148 goto priv_insn;
4149 #endif
4150 if (rd & 1)
4151 goto illegal_insn;
4152 gen_op_check_align_T0_7();
4153 gen_ldda_asi(insn);
4154 gen_movl_T0_reg(rd + 1);
4155 break;
4156 case 0x19: /* load signed byte alternate */
4157 #ifndef TARGET_SPARC64
4158 if (IS_IMM)
4159 goto illegal_insn;
4160 if (!supervisor(dc))
4161 goto priv_insn;
4162 #endif
4163 gen_ld_asi(insn, 1, 1);
4164 break;
4165 case 0x1a: /* load signed halfword alternate */
4166 #ifndef TARGET_SPARC64
4167 if (IS_IMM)
4168 goto illegal_insn;
4169 if (!supervisor(dc))
4170 goto priv_insn;
4171 #endif
4172 gen_op_check_align_T0_1();
4173 gen_ld_asi(insn, 2, 1);
4174 break;
4175 case 0x1d: /* ldstuba -- XXX: should be atomically */
4176 #ifndef TARGET_SPARC64
4177 if (IS_IMM)
4178 goto illegal_insn;
4179 if (!supervisor(dc))
4180 goto priv_insn;
4181 #endif
4182 gen_ldstub_asi(insn);
4183 break;
4184 case 0x1f: /* swap reg with alt. memory. Also atomically */
4185 #ifndef TARGET_SPARC64
4186 if (IS_IMM)
4187 goto illegal_insn;
4188 if (!supervisor(dc))
4189 goto priv_insn;
4190 #endif
4191 gen_op_check_align_T0_3();
4192 gen_movl_reg_T1(rd);
4193 gen_swap_asi(insn);
4194 break;
4196 #ifndef TARGET_SPARC64
4197 case 0x30: /* ldc */
4198 case 0x31: /* ldcsr */
4199 case 0x33: /* lddc */
4200 goto ncp_insn;
4201 #endif
4202 #endif
4203 #ifdef TARGET_SPARC64
4204 case 0x08: /* V9 ldsw */
4205 gen_op_check_align_T0_3();
4206 ABI32_MASK(cpu_T[0]);
4207 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
4208 break;
4209 case 0x0b: /* V9 ldx */
4210 gen_op_check_align_T0_7();
4211 ABI32_MASK(cpu_T[0]);
4212 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
4213 break;
4214 case 0x18: /* V9 ldswa */
4215 gen_op_check_align_T0_3();
4216 gen_ld_asi(insn, 4, 1);
4217 break;
4218 case 0x1b: /* V9 ldxa */
4219 gen_op_check_align_T0_7();
4220 gen_ld_asi(insn, 8, 0);
4221 break;
4222 case 0x2d: /* V9 prefetch, no effect */
4223 goto skip_move;
4224 case 0x30: /* V9 ldfa */
4225 gen_op_check_align_T0_3();
4226 gen_ldf_asi(insn, 4, rd);
4227 goto skip_move;
4228 case 0x33: /* V9 lddfa */
4229 gen_op_check_align_T0_3();
4230 gen_ldf_asi(insn, 8, DFPREG(rd));
4231 goto skip_move;
4232 case 0x3d: /* V9 prefetcha, no effect */
4233 goto skip_move;
4234 case 0x32: /* V9 ldqfa */
4235 #if defined(CONFIG_USER_ONLY)
4236 gen_op_check_align_T0_3();
4237 gen_ldf_asi(insn, 16, QFPREG(rd));
4238 goto skip_move;
4239 #else
4240 goto nfpu_insn;
4241 #endif
4242 #endif
4243 default:
4244 goto illegal_insn;
4246 gen_movl_T1_reg(rd);
4247 #ifdef TARGET_SPARC64
4248 skip_move: ;
4249 #endif
4250 } else if (xop >= 0x20 && xop < 0x24) {
4251 if (gen_trap_ifnofpu(dc))
4252 goto jmp_insn;
4253 switch (xop) {
4254 case 0x20: /* load fpreg */
4255 gen_op_check_align_T0_3();
4256 gen_op_ldst(ldf);
4257 gen_op_store_FT0_fpr(rd);
4258 break;
4259 case 0x21: /* load fsr */
4260 gen_op_check_align_T0_3();
4261 gen_op_ldst(ldf);
4262 gen_op_ldfsr();
4263 tcg_gen_helper_0_0(helper_ldfsr);
4264 break;
4265 case 0x22: /* load quad fpreg */
4266 #if defined(CONFIG_USER_ONLY)
4267 gen_op_check_align_T0_7();
4268 gen_op_ldst(ldqf);
4269 gen_op_store_QT0_fpr(QFPREG(rd));
4270 break;
4271 #else
4272 goto nfpu_insn;
4273 #endif
4274 case 0x23: /* load double fpreg */
4275 gen_op_check_align_T0_7();
4276 gen_op_ldst(lddf);
4277 gen_op_store_DT0_fpr(DFPREG(rd));
4278 break;
4279 default:
4280 goto illegal_insn;
4282 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4283 xop == 0xe || xop == 0x1e) {
4284 gen_movl_reg_T1(rd);
4285 switch (xop) {
4286 case 0x4: /* store word */
4287 gen_op_check_align_T0_3();
4288 ABI32_MASK(cpu_T[0]);
4289 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4290 break;
4291 case 0x5: /* store byte */
4292 ABI32_MASK(cpu_T[0]);
4293 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
4294 break;
4295 case 0x6: /* store halfword */
4296 gen_op_check_align_T0_1();
4297 ABI32_MASK(cpu_T[0]);
4298 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
4299 break;
4300 case 0x7: /* store double word */
4301 if (rd & 1)
4302 goto illegal_insn;
4303 #ifndef __i386__
4304 else {
4305 TCGv r_dword, r_low;
4307 gen_op_check_align_T0_7();
4308 r_dword = tcg_temp_new(TCG_TYPE_I64);
4309 r_low = tcg_temp_new(TCG_TYPE_I32);
4310 gen_movl_reg_TN(rd + 1, r_low);
4311 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4312 r_low);
4313 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
4315 #else /* __i386__ */
4316 gen_op_check_align_T0_7();
4317 flush_T2(dc);
4318 gen_movl_reg_T2(rd + 1);
4319 gen_op_ldst(std);
4320 #endif /* __i386__ */
4321 break;
4322 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4323 case 0x14: /* store word alternate */
4324 #ifndef TARGET_SPARC64
4325 if (IS_IMM)
4326 goto illegal_insn;
4327 if (!supervisor(dc))
4328 goto priv_insn;
4329 #endif
4330 gen_op_check_align_T0_3();
4331 gen_st_asi(insn, 4);
4332 break;
4333 case 0x15: /* store byte alternate */
4334 #ifndef TARGET_SPARC64
4335 if (IS_IMM)
4336 goto illegal_insn;
4337 if (!supervisor(dc))
4338 goto priv_insn;
4339 #endif
4340 gen_st_asi(insn, 1);
4341 break;
4342 case 0x16: /* store halfword alternate */
4343 #ifndef TARGET_SPARC64
4344 if (IS_IMM)
4345 goto illegal_insn;
4346 if (!supervisor(dc))
4347 goto priv_insn;
4348 #endif
4349 gen_op_check_align_T0_1();
4350 gen_st_asi(insn, 2);
4351 break;
4352 case 0x17: /* store double word alternate */
4353 #ifndef TARGET_SPARC64
4354 if (IS_IMM)
4355 goto illegal_insn;
4356 if (!supervisor(dc))
4357 goto priv_insn;
4358 #endif
4359 if (rd & 1)
4360 goto illegal_insn;
4361 else {
4362 int asi;
4363 TCGv r_dword, r_temp, r_size;
4365 gen_op_check_align_T0_7();
4366 r_dword = tcg_temp_new(TCG_TYPE_I64);
4367 r_temp = tcg_temp_new(TCG_TYPE_I32);
4368 r_size = tcg_temp_new(TCG_TYPE_I32);
4369 gen_movl_reg_TN(rd + 1, r_temp);
4370 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4371 r_temp);
4372 #ifdef TARGET_SPARC64
4373 if (IS_IMM) {
4374 int offset;
4376 offset = GET_FIELD(insn, 25, 31);
4377 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
4378 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
4379 } else {
4380 #endif
4381 asi = GET_FIELD(insn, 19, 26);
4382 tcg_gen_movi_i32(r_temp, asi);
4383 #ifdef TARGET_SPARC64
4385 #endif
4386 tcg_gen_movi_i32(r_size, 8);
4387 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
4389 break;
4390 #endif
4391 #ifdef TARGET_SPARC64
4392 case 0x0e: /* V9 stx */
4393 gen_op_check_align_T0_7();
4394 ABI32_MASK(cpu_T[0]);
4395 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
4396 break;
4397 case 0x1e: /* V9 stxa */
4398 gen_op_check_align_T0_7();
4399 gen_st_asi(insn, 8);
4400 break;
4401 #endif
4402 default:
4403 goto illegal_insn;
4405 } else if (xop > 0x23 && xop < 0x28) {
4406 if (gen_trap_ifnofpu(dc))
4407 goto jmp_insn;
4408 switch (xop) {
4409 case 0x24:
4410 gen_op_check_align_T0_3();
4411 gen_op_load_fpr_FT0(rd);
4412 gen_op_ldst(stf);
4413 break;
4414 case 0x25: /* stfsr, V9 stxfsr */
4415 #ifdef CONFIG_USER_ONLY
4416 gen_op_check_align_T0_3();
4417 #endif
4418 gen_op_stfsr();
4419 gen_op_ldst(stf);
4420 break;
4421 case 0x26:
4422 #ifdef TARGET_SPARC64
4423 #if defined(CONFIG_USER_ONLY)
4424 /* V9 stqf, store quad fpreg */
4425 gen_op_check_align_T0_7();
4426 gen_op_load_fpr_QT0(QFPREG(rd));
4427 gen_op_ldst(stqf);
4428 break;
4429 #else
4430 goto nfpu_insn;
4431 #endif
4432 #else /* !TARGET_SPARC64 */
4433 /* stdfq, store floating point queue */
4434 #if defined(CONFIG_USER_ONLY)
4435 goto illegal_insn;
4436 #else
4437 if (!supervisor(dc))
4438 goto priv_insn;
4439 if (gen_trap_ifnofpu(dc))
4440 goto jmp_insn;
4441 goto nfq_insn;
4442 #endif
4443 #endif
4444 case 0x27:
4445 gen_op_check_align_T0_7();
4446 gen_op_load_fpr_DT0(DFPREG(rd));
4447 gen_op_ldst(stdf);
4448 break;
4449 default:
4450 goto illegal_insn;
4452 } else if (xop > 0x33 && xop < 0x3f) {
4453 switch (xop) {
4454 #ifdef TARGET_SPARC64
4455 case 0x34: /* V9 stfa */
4456 gen_op_check_align_T0_3();
4457 gen_op_load_fpr_FT0(rd);
4458 gen_stf_asi(insn, 4, rd);
4459 break;
4460 case 0x36: /* V9 stqfa */
4461 #if defined(CONFIG_USER_ONLY)
4462 gen_op_check_align_T0_7();
4463 gen_op_load_fpr_QT0(QFPREG(rd));
4464 gen_stf_asi(insn, 16, QFPREG(rd));
4465 break;
4466 #else
4467 goto nfpu_insn;
4468 #endif
4469 case 0x37: /* V9 stdfa */
4470 gen_op_check_align_T0_3();
4471 gen_op_load_fpr_DT0(DFPREG(rd));
4472 gen_stf_asi(insn, 8, DFPREG(rd));
4473 break;
4474 case 0x3c: /* V9 casa */
4475 gen_op_check_align_T0_3();
4476 gen_cas_asi(insn, rd);
4477 gen_movl_T1_reg(rd);
4478 break;
4479 case 0x3e: /* V9 casxa */
4480 gen_op_check_align_T0_7();
4481 gen_casx_asi(insn, rd);
4482 gen_movl_T1_reg(rd);
4483 break;
4484 #else
4485 case 0x34: /* stc */
4486 case 0x35: /* stcsr */
4487 case 0x36: /* stdcq */
4488 case 0x37: /* stdc */
4489 goto ncp_insn;
4490 #endif
4491 default:
4492 goto illegal_insn;
4495 else
4496 goto illegal_insn;
4498 break;
4500 /* default case for non jump instructions */
4501 if (dc->npc == DYNAMIC_PC) {
4502 dc->pc = DYNAMIC_PC;
4503 gen_op_next_insn();
4504 } else if (dc->npc == JUMP_PC) {
4505 /* we can do a static jump */
4506 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
4507 dc->is_br = 1;
4508 } else {
4509 dc->pc = dc->npc;
4510 dc->npc = dc->npc + 4;
4512 jmp_insn:
4513 return;
4514 illegal_insn:
4515 save_state(dc);
4516 gen_op_exception(TT_ILL_INSN);
4517 dc->is_br = 1;
4518 return;
4519 #if !defined(CONFIG_USER_ONLY)
4520 priv_insn:
4521 save_state(dc);
4522 gen_op_exception(TT_PRIV_INSN);
4523 dc->is_br = 1;
4524 return;
4525 nfpu_insn:
4526 save_state(dc);
4527 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4528 dc->is_br = 1;
4529 return;
4530 #ifndef TARGET_SPARC64
4531 nfq_insn:
4532 save_state(dc);
4533 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4534 dc->is_br = 1;
4535 return;
4536 #endif
4537 #endif
4538 #ifndef TARGET_SPARC64
4539 ncp_insn:
4540 save_state(dc);
4541 gen_op_exception(TT_NCP_INSN);
4542 dc->is_br = 1;
4543 return;
4544 #endif
4547 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4551 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4552 int spc, CPUSPARCState *env)
4554 target_ulong pc_start, last_pc;
4555 uint16_t *gen_opc_end;
4556 DisasContext dc1, *dc = &dc1;
4557 int j, lj = -1;
4559 memset(dc, 0, sizeof(DisasContext));
4560 dc->tb = tb;
4561 pc_start = tb->pc;
4562 dc->pc = pc_start;
4563 last_pc = dc->pc;
4564 dc->npc = (target_ulong) tb->cs_base;
4565 dc->mem_idx = cpu_mmu_index(env);
4566 dc->fpu_enabled = cpu_fpu_enabled(env);
4567 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4569 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4571 do {
4572 if (env->nb_breakpoints > 0) {
4573 for(j = 0; j < env->nb_breakpoints; j++) {
4574 if (env->breakpoints[j] == dc->pc) {
4575 if (dc->pc != pc_start)
4576 save_state(dc);
4577 tcg_gen_helper_0_0(helper_debug);
4578 tcg_gen_exit_tb(0);
4579 dc->is_br = 1;
4580 goto exit_gen_loop;
4584 if (spc) {
4585 if (loglevel > 0)
4586 fprintf(logfile, "Search PC...\n");
4587 j = gen_opc_ptr - gen_opc_buf;
4588 if (lj < j) {
4589 lj++;
4590 while (lj < j)
4591 gen_opc_instr_start[lj++] = 0;
4592 gen_opc_pc[lj] = dc->pc;
4593 gen_opc_npc[lj] = dc->npc;
4594 gen_opc_instr_start[lj] = 1;
4597 last_pc = dc->pc;
4598 disas_sparc_insn(dc);
4600 if (dc->is_br)
4601 break;
4602 /* if the next PC is different, we abort now */
4603 if (dc->pc != (last_pc + 4))
4604 break;
4605 /* if we reach a page boundary, we stop generation so that the
4606 PC of a TT_TFAULT exception is always in the right page */
4607 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4608 break;
4609 /* if single step mode, we generate only one instruction and
4610 generate an exception */
4611 if (env->singlestep_enabled) {
4612 gen_jmp_im(dc->pc);
4613 tcg_gen_exit_tb(0);
4614 break;
4616 } while ((gen_opc_ptr < gen_opc_end) &&
4617 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4619 exit_gen_loop:
4620 if (!dc->is_br) {
4621 if (dc->pc != DYNAMIC_PC &&
4622 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4623 /* static PC and NPC: we can use direct chaining */
4624 gen_branch(dc, dc->pc, dc->npc);
4625 } else {
4626 if (dc->pc != DYNAMIC_PC)
4627 gen_jmp_im(dc->pc);
4628 save_npc(dc);
4629 tcg_gen_exit_tb(0);
4632 *gen_opc_ptr = INDEX_op_end;
4633 if (spc) {
4634 j = gen_opc_ptr - gen_opc_buf;
4635 lj++;
4636 while (lj <= j)
4637 gen_opc_instr_start[lj++] = 0;
4638 #if 0
4639 if (loglevel > 0) {
4640 page_dump(logfile);
4642 #endif
4643 gen_opc_jump_pc[0] = dc->jump_pc[0];
4644 gen_opc_jump_pc[1] = dc->jump_pc[1];
4645 } else {
4646 tb->size = last_pc + 4 - pc_start;
4648 #ifdef DEBUG_DISAS
4649 if (loglevel & CPU_LOG_TB_IN_ASM) {
4650 fprintf(logfile, "--------------\n");
4651 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4652 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4653 fprintf(logfile, "\n");
4655 #endif
4656 return 0;
4659 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4661 return gen_intermediate_code_internal(tb, 0, env);
4664 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4666 return gen_intermediate_code_internal(tb, 1, env);
4669 void cpu_reset(CPUSPARCState *env)
4671 tlb_flush(env, 1);
4672 env->cwp = 0;
4673 env->wim = 1;
4674 env->regwptr = env->regbase + (env->cwp * 16);
4675 #if defined(CONFIG_USER_ONLY)
4676 env->user_mode_only = 1;
4677 #ifdef TARGET_SPARC64
4678 env->cleanwin = NWINDOWS - 2;
4679 env->cansave = NWINDOWS - 2;
4680 env->pstate = PS_RMO | PS_PEF | PS_IE;
4681 env->asi = 0x82; // Primary no-fault
4682 #endif
4683 #else
4684 env->psret = 0;
4685 env->psrs = 1;
4686 env->psrps = 1;
4687 #ifdef TARGET_SPARC64
4688 env->pstate = PS_PRIV;
4689 env->hpstate = HS_PRIV;
4690 env->pc = 0x1fff0000000ULL;
4691 env->tsptr = &env->ts[env->tl];
4692 #else
4693 env->pc = 0;
4694 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4695 env->mmuregs[0] |= env->mmu_bm;
4696 #endif
4697 env->npc = env->pc + 4;
4698 #endif
4701 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4703 CPUSPARCState *env;
4704 const sparc_def_t *def;
4705 static int inited;
4706 unsigned int i;
4707 static const char * const gregnames[8] = {
4708 NULL, // g0 not used
4709 "g1",
4710 "g2",
4711 "g3",
4712 "g4",
4713 "g5",
4714 "g6",
4715 "g7",
4718 def = cpu_sparc_find_by_name(cpu_model);
4719 if (!def)
4720 return NULL;
4722 env = qemu_mallocz(sizeof(CPUSPARCState));
4723 if (!env)
4724 return NULL;
4725 cpu_exec_init(env);
4726 env->cpu_model_str = cpu_model;
4727 env->version = def->iu_version;
4728 env->fsr = def->fpu_version;
4729 #if !defined(TARGET_SPARC64)
4730 env->mmu_bm = def->mmu_bm;
4731 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4732 env->mmu_cxr_mask = def->mmu_cxr_mask;
4733 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4734 env->mmu_trcr_mask = def->mmu_trcr_mask;
4735 env->mmuregs[0] |= def->mmu_version;
4736 cpu_sparc_set_id(env, 0);
4737 #endif
4739 /* init various static tables */
4740 if (!inited) {
4741 inited = 1;
4743 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4744 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4745 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4746 offsetof(CPUState, regwptr),
4747 "regwptr");
4748 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4749 #ifdef TARGET_SPARC64
4750 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4751 TCG_AREG0, offsetof(CPUState, t0), "T0");
4752 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4753 TCG_AREG0, offsetof(CPUState, t1), "T1");
4754 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4755 TCG_AREG0, offsetof(CPUState, t2), "T2");
4756 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4757 TCG_AREG0, offsetof(CPUState, xcc),
4758 "xcc");
4759 #else
4760 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4761 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4762 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4763 #endif
4764 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4765 TCG_AREG0, offsetof(CPUState, cc_src),
4766 "cc_src");
4767 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4768 TCG_AREG0, offsetof(CPUState, cc_dst),
4769 "cc_dst");
4770 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4771 TCG_AREG0, offsetof(CPUState, psr),
4772 "psr");
4773 for (i = 1; i < 8; i++)
4774 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4775 offsetof(CPUState, gregs[i]),
4776 gregnames[i]);
4779 cpu_reset(env);
4781 return env;
4784 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4786 #if !defined(TARGET_SPARC64)
4787 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4788 #endif
4791 static const sparc_def_t sparc_defs[] = {
4792 #ifdef TARGET_SPARC64
4794 .name = "Fujitsu Sparc64",
4795 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4796 | (MAXTL << 8) | (NWINDOWS - 1)),
4797 .fpu_version = 0x00000000,
4798 .mmu_version = 0,
4801 .name = "Fujitsu Sparc64 III",
4802 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4803 | (MAXTL << 8) | (NWINDOWS - 1)),
4804 .fpu_version = 0x00000000,
4805 .mmu_version = 0,
4808 .name = "Fujitsu Sparc64 IV",
4809 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4810 | (MAXTL << 8) | (NWINDOWS - 1)),
4811 .fpu_version = 0x00000000,
4812 .mmu_version = 0,
4815 .name = "Fujitsu Sparc64 V",
4816 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4817 | (MAXTL << 8) | (NWINDOWS - 1)),
4818 .fpu_version = 0x00000000,
4819 .mmu_version = 0,
4822 .name = "TI UltraSparc I",
4823 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4824 | (MAXTL << 8) | (NWINDOWS - 1)),
4825 .fpu_version = 0x00000000,
4826 .mmu_version = 0,
4829 .name = "TI UltraSparc II",
4830 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4831 | (MAXTL << 8) | (NWINDOWS - 1)),
4832 .fpu_version = 0x00000000,
4833 .mmu_version = 0,
4836 .name = "TI UltraSparc IIi",
4837 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4838 | (MAXTL << 8) | (NWINDOWS - 1)),
4839 .fpu_version = 0x00000000,
4840 .mmu_version = 0,
4843 .name = "TI UltraSparc IIe",
4844 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4845 | (MAXTL << 8) | (NWINDOWS - 1)),
4846 .fpu_version = 0x00000000,
4847 .mmu_version = 0,
4850 .name = "Sun UltraSparc III",
4851 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4852 | (MAXTL << 8) | (NWINDOWS - 1)),
4853 .fpu_version = 0x00000000,
4854 .mmu_version = 0,
4857 .name = "Sun UltraSparc III Cu",
4858 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4859 | (MAXTL << 8) | (NWINDOWS - 1)),
4860 .fpu_version = 0x00000000,
4861 .mmu_version = 0,
4864 .name = "Sun UltraSparc IIIi",
4865 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4866 | (MAXTL << 8) | (NWINDOWS - 1)),
4867 .fpu_version = 0x00000000,
4868 .mmu_version = 0,
4871 .name = "Sun UltraSparc IV",
4872 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4873 | (MAXTL << 8) | (NWINDOWS - 1)),
4874 .fpu_version = 0x00000000,
4875 .mmu_version = 0,
4878 .name = "Sun UltraSparc IV+",
4879 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4880 | (MAXTL << 8) | (NWINDOWS - 1)),
4881 .fpu_version = 0x00000000,
4882 .mmu_version = 0,
4885 .name = "Sun UltraSparc IIIi+",
4886 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4887 | (MAXTL << 8) | (NWINDOWS - 1)),
4888 .fpu_version = 0x00000000,
4889 .mmu_version = 0,
4892 .name = "NEC UltraSparc I",
4893 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4894 | (MAXTL << 8) | (NWINDOWS - 1)),
4895 .fpu_version = 0x00000000,
4896 .mmu_version = 0,
4898 #else
4900 .name = "Fujitsu MB86900",
4901 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4902 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4903 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4904 .mmu_bm = 0x00004000,
4905 .mmu_ctpr_mask = 0x007ffff0,
4906 .mmu_cxr_mask = 0x0000003f,
4907 .mmu_sfsr_mask = 0xffffffff,
4908 .mmu_trcr_mask = 0xffffffff,
4911 .name = "Fujitsu MB86904",
4912 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4913 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4914 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4915 .mmu_bm = 0x00004000,
4916 .mmu_ctpr_mask = 0x00ffffc0,
4917 .mmu_cxr_mask = 0x000000ff,
4918 .mmu_sfsr_mask = 0x00016fff,
4919 .mmu_trcr_mask = 0x00ffffff,
4922 .name = "Fujitsu MB86907",
4923 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4924 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4925 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4926 .mmu_bm = 0x00004000,
4927 .mmu_ctpr_mask = 0xffffffc0,
4928 .mmu_cxr_mask = 0x000000ff,
4929 .mmu_sfsr_mask = 0x00016fff,
4930 .mmu_trcr_mask = 0xffffffff,
4933 .name = "LSI L64811",
4934 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4935 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4936 .mmu_version = 0x10 << 24,
4937 .mmu_bm = 0x00004000,
4938 .mmu_ctpr_mask = 0x007ffff0,
4939 .mmu_cxr_mask = 0x0000003f,
4940 .mmu_sfsr_mask = 0xffffffff,
4941 .mmu_trcr_mask = 0xffffffff,
4944 .name = "Cypress CY7C601",
4945 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4946 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4947 .mmu_version = 0x10 << 24,
4948 .mmu_bm = 0x00004000,
4949 .mmu_ctpr_mask = 0x007ffff0,
4950 .mmu_cxr_mask = 0x0000003f,
4951 .mmu_sfsr_mask = 0xffffffff,
4952 .mmu_trcr_mask = 0xffffffff,
4955 .name = "Cypress CY7C611",
4956 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4957 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4958 .mmu_version = 0x10 << 24,
4959 .mmu_bm = 0x00004000,
4960 .mmu_ctpr_mask = 0x007ffff0,
4961 .mmu_cxr_mask = 0x0000003f,
4962 .mmu_sfsr_mask = 0xffffffff,
4963 .mmu_trcr_mask = 0xffffffff,
4966 .name = "TI SuperSparc II",
4967 .iu_version = 0x40000000,
4968 .fpu_version = 0 << 17,
4969 .mmu_version = 0x04000000,
4970 .mmu_bm = 0x00002000,
4971 .mmu_ctpr_mask = 0xffffffc0,
4972 .mmu_cxr_mask = 0x0000ffff,
4973 .mmu_sfsr_mask = 0xffffffff,
4974 .mmu_trcr_mask = 0xffffffff,
4977 .name = "TI MicroSparc I",
4978 .iu_version = 0x41000000,
4979 .fpu_version = 4 << 17,
4980 .mmu_version = 0x41000000,
4981 .mmu_bm = 0x00004000,
4982 .mmu_ctpr_mask = 0x007ffff0,
4983 .mmu_cxr_mask = 0x0000003f,
4984 .mmu_sfsr_mask = 0x00016fff,
4985 .mmu_trcr_mask = 0x0000003f,
4988 .name = "TI MicroSparc II",
4989 .iu_version = 0x42000000,
4990 .fpu_version = 4 << 17,
4991 .mmu_version = 0x02000000,
4992 .mmu_bm = 0x00004000,
4993 .mmu_ctpr_mask = 0x00ffffc0,
4994 .mmu_cxr_mask = 0x000000ff,
4995 .mmu_sfsr_mask = 0x00016fff,
4996 .mmu_trcr_mask = 0x00ffffff,
4999 .name = "TI MicroSparc IIep",
5000 .iu_version = 0x42000000,
5001 .fpu_version = 4 << 17,
5002 .mmu_version = 0x04000000,
5003 .mmu_bm = 0x00004000,
5004 .mmu_ctpr_mask = 0x00ffffc0,
5005 .mmu_cxr_mask = 0x000000ff,
5006 .mmu_sfsr_mask = 0x00016bff,
5007 .mmu_trcr_mask = 0x00ffffff,
5010 .name = "TI SuperSparc 51",
5011 .iu_version = 0x43000000,
5012 .fpu_version = 0 << 17,
5013 .mmu_version = 0x04000000,
5014 .mmu_bm = 0x00002000,
5015 .mmu_ctpr_mask = 0xffffffc0,
5016 .mmu_cxr_mask = 0x0000ffff,
5017 .mmu_sfsr_mask = 0xffffffff,
5018 .mmu_trcr_mask = 0xffffffff,
5021 .name = "TI SuperSparc 61",
5022 .iu_version = 0x44000000,
5023 .fpu_version = 0 << 17,
5024 .mmu_version = 0x04000000,
5025 .mmu_bm = 0x00002000,
5026 .mmu_ctpr_mask = 0xffffffc0,
5027 .mmu_cxr_mask = 0x0000ffff,
5028 .mmu_sfsr_mask = 0xffffffff,
5029 .mmu_trcr_mask = 0xffffffff,
5032 .name = "Ross RT625",
5033 .iu_version = 0x1e000000,
5034 .fpu_version = 1 << 17,
5035 .mmu_version = 0x1e000000,
5036 .mmu_bm = 0x00004000,
5037 .mmu_ctpr_mask = 0x007ffff0,
5038 .mmu_cxr_mask = 0x0000003f,
5039 .mmu_sfsr_mask = 0xffffffff,
5040 .mmu_trcr_mask = 0xffffffff,
5043 .name = "Ross RT620",
5044 .iu_version = 0x1f000000,
5045 .fpu_version = 1 << 17,
5046 .mmu_version = 0x1f000000,
5047 .mmu_bm = 0x00004000,
5048 .mmu_ctpr_mask = 0x007ffff0,
5049 .mmu_cxr_mask = 0x0000003f,
5050 .mmu_sfsr_mask = 0xffffffff,
5051 .mmu_trcr_mask = 0xffffffff,
5054 .name = "BIT B5010",
5055 .iu_version = 0x20000000,
5056 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
5057 .mmu_version = 0x20000000,
5058 .mmu_bm = 0x00004000,
5059 .mmu_ctpr_mask = 0x007ffff0,
5060 .mmu_cxr_mask = 0x0000003f,
5061 .mmu_sfsr_mask = 0xffffffff,
5062 .mmu_trcr_mask = 0xffffffff,
5065 .name = "Matsushita MN10501",
5066 .iu_version = 0x50000000,
5067 .fpu_version = 0 << 17,
5068 .mmu_version = 0x50000000,
5069 .mmu_bm = 0x00004000,
5070 .mmu_ctpr_mask = 0x007ffff0,
5071 .mmu_cxr_mask = 0x0000003f,
5072 .mmu_sfsr_mask = 0xffffffff,
5073 .mmu_trcr_mask = 0xffffffff,
5076 .name = "Weitek W8601",
5077 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
5078 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5079 .mmu_version = 0x10 << 24,
5080 .mmu_bm = 0x00004000,
5081 .mmu_ctpr_mask = 0x007ffff0,
5082 .mmu_cxr_mask = 0x0000003f,
5083 .mmu_sfsr_mask = 0xffffffff,
5084 .mmu_trcr_mask = 0xffffffff,
5087 .name = "LEON2",
5088 .iu_version = 0xf2000000,
5089 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5090 .mmu_version = 0xf2000000,
5091 .mmu_bm = 0x00004000,
5092 .mmu_ctpr_mask = 0x007ffff0,
5093 .mmu_cxr_mask = 0x0000003f,
5094 .mmu_sfsr_mask = 0xffffffff,
5095 .mmu_trcr_mask = 0xffffffff,
5098 .name = "LEON3",
5099 .iu_version = 0xf3000000,
5100 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5101 .mmu_version = 0xf3000000,
5102 .mmu_bm = 0x00004000,
5103 .mmu_ctpr_mask = 0x007ffff0,
5104 .mmu_cxr_mask = 0x0000003f,
5105 .mmu_sfsr_mask = 0xffffffff,
5106 .mmu_trcr_mask = 0xffffffff,
5108 #endif
5111 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
5113 unsigned int i;
5115 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5116 if (strcasecmp(name, sparc_defs[i].name) == 0) {
5117 return &sparc_defs[i];
5120 return NULL;
5123 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
5125 unsigned int i;
5127 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5128 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
5129 sparc_defs[i].name,
5130 sparc_defs[i].iu_version,
5131 sparc_defs[i].fpu_version,
5132 sparc_defs[i].mmu_version);
5136 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5138 void cpu_dump_state(CPUState *env, FILE *f,
5139 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
5140 int flags)
5142 int i, x;
5144 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
5145 cpu_fprintf(f, "General Registers:\n");
5146 for (i = 0; i < 4; i++)
5147 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5148 cpu_fprintf(f, "\n");
5149 for (; i < 8; i++)
5150 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5151 cpu_fprintf(f, "\nCurrent Register Window:\n");
5152 for (x = 0; x < 3; x++) {
5153 for (i = 0; i < 4; i++)
5154 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5155 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
5156 env->regwptr[i + x * 8]);
5157 cpu_fprintf(f, "\n");
5158 for (; i < 8; i++)
5159 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5160 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
5161 env->regwptr[i + x * 8]);
5162 cpu_fprintf(f, "\n");
5164 cpu_fprintf(f, "\nFloating Point Registers:\n");
5165 for (i = 0; i < 32; i++) {
5166 if ((i & 3) == 0)
5167 cpu_fprintf(f, "%%f%02d:", i);
5168 cpu_fprintf(f, " %016lf", env->fpr[i]);
5169 if ((i & 3) == 3)
5170 cpu_fprintf(f, "\n");
5172 #ifdef TARGET_SPARC64
5173 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5174 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
5175 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5176 env->cansave, env->canrestore, env->otherwin, env->wstate,
5177 env->cleanwin, NWINDOWS - 1 - env->cwp);
5178 #else
5179 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
5180 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
5181 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
5182 env->psrs?'S':'-', env->psrps?'P':'-',
5183 env->psret?'E':'-', env->wim);
5184 #endif
5185 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
5188 #if defined(CONFIG_USER_ONLY)
5189 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5191 return addr;
5194 #else
5195 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
5196 int *access_index, target_ulong address, int rw,
5197 int mmu_idx);
5199 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5201 target_phys_addr_t phys_addr;
5202 int prot, access_index;
5204 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
5205 MMU_KERNEL_IDX) != 0)
5206 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
5207 0, MMU_KERNEL_IDX) != 0)
5208 return -1;
5209 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
5210 return -1;
5211 return phys_addr;
5213 #endif
5215 void helper_flush(target_ulong addr)
5217 addr &= ~7;
5218 tb_invalidate_page_range(addr, addr + 8);