Convert condition code changing versions of add, sub, logic, and div to TCG
[qemu/malc.git] / target-sparc / translate.c
blob3225b8a1523a0bf0c381f395264c56fa9657de42
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 TODO-list:
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
30 #include <stdarg.h>
31 #include <stdlib.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <inttypes.h>
36 #include "cpu.h"
37 #include "exec-all.h"
38 #include "disas.h"
39 #include "helper.h"
40 #include "tcg-op.h"
42 #define DEBUG_DISAS
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_dst, cpu_psr;
50 #ifdef TARGET_SPARC64
51 static TCGv cpu_xcc;
52 #endif
53 /* local register indexes (only used inside old micro ops) */
54 static TCGv cpu_tmp0;
56 typedef struct DisasContext {
57 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
58 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
59 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
60 int is_br;
61 int mem_idx;
62 int fpu_enabled;
63 struct TranslationBlock *tb;
64 } DisasContext;
66 typedef struct sparc_def_t sparc_def_t;
68 struct sparc_def_t {
69 const unsigned char *name;
70 target_ulong iu_version;
71 uint32_t fpu_version;
72 uint32_t mmu_version;
73 uint32_t mmu_bm;
74 uint32_t mmu_ctpr_mask;
75 uint32_t mmu_cxr_mask;
76 uint32_t mmu_sfsr_mask;
77 uint32_t mmu_trcr_mask;
80 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82 extern FILE *logfile;
83 extern int loglevel;
85 // This function uses non-native bit order
86 #define GET_FIELD(X, FROM, TO) \
87 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89 // This function uses the order in the manuals, i.e. bit 0 is 2^0
90 #define GET_FIELD_SP(X, FROM, TO) \
91 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
94 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #ifdef TARGET_SPARC64
97 #define FFPREG(r) (r)
98 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
99 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
100 #else
101 #define FFPREG(r) (r)
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 static void disas_sparc_insn(DisasContext * dc);
116 #ifdef TARGET_SPARC64
117 #define GEN32(func, NAME) \
118 static GenOpFunc * const NAME ## _table [64] = { \
119 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
120 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
121 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
122 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
123 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
124 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
125 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
126 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
127 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
128 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
129 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
130 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
131 }; \
132 static inline void func(int n) \
134 NAME ## _table[n](); \
136 #else
137 #define GEN32(func, NAME) \
138 static GenOpFunc *const NAME ## _table [32] = { \
139 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
140 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
141 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
142 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
143 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
144 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
145 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
146 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
147 }; \
148 static inline void func(int n) \
150 NAME ## _table[n](); \
152 #endif
154 /* floating point registers moves */
155 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
156 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
157 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
158 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
160 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
161 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
162 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
163 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
165 #if defined(CONFIG_USER_ONLY)
166 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
167 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
168 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
169 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
170 #endif
172 /* moves */
173 #ifdef CONFIG_USER_ONLY
174 #define supervisor(dc) 0
175 #ifdef TARGET_SPARC64
176 #define hypervisor(dc) 0
177 #endif
178 #define gen_op_ldst(name) gen_op_##name##_raw()
179 #else
180 #define supervisor(dc) (dc->mem_idx >= 1)
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) (dc->mem_idx == 2)
183 #define OP_LD_TABLE(width) \
184 static GenOpFunc * const gen_op_##width[] = { \
185 &gen_op_##width##_user, \
186 &gen_op_##width##_kernel, \
187 &gen_op_##width##_hypv, \
189 #else
190 #define OP_LD_TABLE(width) \
191 static GenOpFunc * const gen_op_##width[] = { \
192 &gen_op_##width##_user, \
193 &gen_op_##width##_kernel, \
195 #endif
196 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
197 #endif
199 #ifndef CONFIG_USER_ONLY
200 #ifdef __i386__
201 OP_LD_TABLE(std);
202 #endif /* __i386__ */
203 OP_LD_TABLE(stf);
204 OP_LD_TABLE(stdf);
205 OP_LD_TABLE(ldf);
206 OP_LD_TABLE(lddf);
207 #endif
209 #ifdef TARGET_ABI32
210 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
211 #else
212 #define ABI32_MASK(addr)
213 #endif
215 static inline void gen_movl_simm_T1(int32_t val)
217 tcg_gen_movi_tl(cpu_T[1], val);
220 static inline void gen_movl_reg_TN(int reg, TCGv tn)
222 if (reg == 0)
223 tcg_gen_movi_tl(tn, 0);
224 else if (reg < 8)
225 tcg_gen_ld_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
226 else {
227 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231 static inline void gen_movl_reg_T0(int reg)
233 gen_movl_reg_TN(reg, cpu_T[0]);
236 static inline void gen_movl_reg_T1(int reg)
238 gen_movl_reg_TN(reg, cpu_T[1]);
241 #ifdef __i386__
242 static inline void gen_movl_reg_T2(int reg)
244 gen_movl_reg_TN(reg, cpu_T[2]);
247 #endif /* __i386__ */
248 static inline void gen_movl_TN_reg(int reg, TCGv tn)
250 if (reg == 0)
251 return;
252 else if (reg < 8)
253 tcg_gen_st_tl(tn, cpu_env, offsetof(CPUState, gregs[reg]));
254 else {
255 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
259 static inline void gen_movl_T0_reg(int reg)
261 gen_movl_TN_reg(reg, cpu_T[0]);
264 static inline void gen_movl_T1_reg(int reg)
266 gen_movl_TN_reg(reg, cpu_T[1]);
269 static inline void gen_op_movl_T0_env(size_t offset)
271 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
274 static inline void gen_op_movl_env_T0(size_t offset)
276 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
279 static inline void gen_op_movtl_T0_env(size_t offset)
281 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
284 static inline void gen_op_movtl_env_T0(size_t offset)
286 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
289 static inline void gen_op_add_T1_T0(void)
291 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
294 static inline void gen_op_or_T1_T0(void)
296 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
299 static inline void gen_op_xor_T1_T0(void)
301 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
304 static inline void gen_jmp_im(target_ulong pc)
306 tcg_gen_movi_tl(cpu_tmp0, pc);
307 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
310 static inline void gen_movl_npc_im(target_ulong npc)
312 tcg_gen_movi_tl(cpu_tmp0, npc);
313 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
316 static inline void gen_goto_tb(DisasContext *s, int tb_num,
317 target_ulong pc, target_ulong npc)
319 TranslationBlock *tb;
321 tb = s->tb;
322 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
323 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
324 /* jump to same page: we can use a direct jump */
325 tcg_gen_goto_tb(tb_num);
326 gen_jmp_im(pc);
327 gen_movl_npc_im(npc);
328 tcg_gen_exit_tb((long)tb + tb_num);
329 } else {
330 /* jump to another page: currently not optimized */
331 gen_jmp_im(pc);
332 gen_movl_npc_im(npc);
333 tcg_gen_exit_tb(0);
337 // XXX suboptimal
338 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
340 tcg_gen_shri_i32(reg, src, 23);
341 tcg_gen_andi_tl(reg, reg, 0x1);
344 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
346 tcg_gen_shri_i32(reg, src, 22);
347 tcg_gen_andi_tl(reg, reg, 0x1);
350 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
352 tcg_gen_shri_i32(reg, src, 21);
353 tcg_gen_andi_tl(reg, reg, 0x1);
356 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
358 tcg_gen_shri_i32(reg, src, 20);
359 tcg_gen_andi_tl(reg, reg, 0x1);
362 static inline void gen_op_exception(int exception)
364 TCGv r_except;
366 r_except = tcg_temp_new(TCG_TYPE_I32);
367 tcg_gen_movi_i32(r_except, exception);
368 tcg_gen_helper_0_1(raise_exception, r_except);
371 static inline void gen_cc_clear(void)
373 tcg_gen_movi_i32(cpu_psr, 0);
374 #ifdef TARGET_SPARC64
375 tcg_gen_movi_i32(cpu_xcc, 0);
376 #endif
379 /* old op:
380 if (!T0)
381 env->psr |= PSR_ZERO;
382 if ((int32_t) T0 < 0)
383 env->psr |= PSR_NEG;
385 static inline void gen_cc_NZ(TCGv dst)
387 int l1, l2;
388 TCGv r_zero;
390 l1 = gen_new_label();
391 l2 = gen_new_label();
392 r_zero = tcg_temp_new(TCG_TYPE_TL);
393 tcg_gen_movi_tl(r_zero, 0);
394 tcg_gen_brcond_i32(TCG_COND_NE, dst, r_zero, l1);
395 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
396 gen_set_label(l1);
397 tcg_gen_brcond_i32(TCG_COND_GE, dst, r_zero, l2);
398 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
399 gen_set_label(l2);
400 #ifdef TARGET_SPARC64
402 int l3, l4;
404 l3 = gen_new_label();
405 l4 = gen_new_label();
406 tcg_gen_brcond_tl(TCG_COND_NE, dst, r_zero, l3);
407 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
408 gen_set_label(l3);
409 tcg_gen_brcond_tl(TCG_COND_GE, dst, r_zero, l4);
410 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
411 gen_set_label(l4);
413 #endif
416 /* old op:
417 if (T0 < src1)
418 env->psr |= PSR_CARRY;
420 static inline void gen_cc_C_add(TCGv dst, TCGv src1)
422 int l1;
424 l1 = gen_new_label();
425 tcg_gen_brcond_i32(TCG_COND_GEU, dst, src1, l1);
426 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
427 gen_set_label(l1);
428 #ifdef TARGET_SPARC64
430 int l2;
432 l2 = gen_new_label();
433 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l2);
434 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
435 gen_set_label(l2);
437 #endif
440 /* old op:
441 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
442 env->psr |= PSR_OVF;
444 static inline void gen_cc_V_add(TCGv dst, TCGv src1, TCGv src2)
446 TCGv r_temp, r_temp2, r_temp3, r_zero;
447 int l1;
449 l1 = gen_new_label();
451 r_temp = tcg_temp_new(TCG_TYPE_TL);
452 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
453 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
454 r_zero = tcg_temp_new(TCG_TYPE_TL);
455 tcg_gen_movi_tl(r_zero, 0);
456 tcg_gen_xor_tl(r_temp, src1, src2);
457 tcg_gen_xori_tl(r_temp, r_temp, -1);
458 tcg_gen_xor_tl(r_temp2, src1, dst);
459 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
460 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
461 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
462 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
463 gen_set_label(l1);
464 #ifdef TARGET_SPARC64
466 int l2;
468 l2 = gen_new_label();
469 tcg_gen_movi_tl(r_zero, 0);
470 tcg_gen_xor_tl(r_temp, src1, src2);
471 tcg_gen_xori_tl(r_temp, r_temp, -1);
472 tcg_gen_xor_tl(r_temp2, src1, dst);
473 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
474 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
475 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
476 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
477 gen_set_label(l2);
479 #endif
482 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
484 TCGv r_temp, r_temp2, r_temp3, r_zero;
485 int l1;
487 l1 = gen_new_label();
489 r_temp = tcg_temp_new(TCG_TYPE_TL);
490 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
491 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
492 r_zero = tcg_temp_new(TCG_TYPE_TL);
493 tcg_gen_movi_tl(r_zero, 0);
494 tcg_gen_xor_tl(r_temp, src1, src2);
495 tcg_gen_xori_tl(r_temp, r_temp, -1);
496 tcg_gen_xor_tl(r_temp2, src1, dst);
497 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
498 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
499 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
500 gen_op_exception(TT_TOVF);
501 gen_set_label(l1);
502 #ifdef TARGET_SPARC64
504 int l2;
506 l2 = gen_new_label();
507 tcg_gen_movi_tl(r_zero, 0);
508 tcg_gen_xor_tl(r_temp, src1, src2);
509 tcg_gen_xori_tl(r_temp, r_temp, -1);
510 tcg_gen_xor_tl(r_temp2, src1, dst);
511 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
512 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
513 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
514 gen_op_exception(TT_TOVF);
515 gen_set_label(l2);
517 #endif
520 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
522 int l1;
523 TCGv r_zero, r_temp;
525 l1 = gen_new_label();
526 r_zero = tcg_temp_new(TCG_TYPE_TL);
527 r_temp = tcg_temp_new(TCG_TYPE_TL);
528 tcg_gen_movi_tl(r_zero, 0);
529 tcg_gen_or_tl(r_temp, src1, src2);
530 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
531 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
532 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
533 gen_set_label(l1);
536 static inline void gen_tag_tv(TCGv src1, TCGv src2)
538 int l1;
539 TCGv r_zero, r_temp;
541 l1 = gen_new_label();
542 r_zero = tcg_temp_new(TCG_TYPE_TL);
543 r_temp = tcg_temp_new(TCG_TYPE_TL);
544 tcg_gen_movi_tl(r_zero, 0);
545 tcg_gen_or_tl(r_temp, src1, src2);
546 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
547 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
548 gen_op_exception(TT_TOVF);
549 gen_set_label(l1);
552 static inline void gen_op_add_T1_T0_cc(void)
554 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
555 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
556 gen_cc_clear();
557 gen_cc_NZ(cpu_T[0]);
558 gen_cc_C_add(cpu_T[0], cpu_cc_src);
559 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
562 static inline void gen_op_addx_T1_T0_cc(void)
564 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
565 gen_mov_reg_C(cpu_tmp0, cpu_psr);
566 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
567 gen_cc_clear();
568 gen_cc_C_add(cpu_T[0], cpu_cc_src);
569 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
570 gen_cc_C_add(cpu_T[0], cpu_cc_src);
571 gen_cc_NZ(cpu_T[0]);
572 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
575 static inline void gen_op_tadd_T1_T0_cc(void)
577 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
578 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
579 gen_cc_clear();
580 gen_cc_NZ(cpu_T[0]);
581 gen_cc_C_add(cpu_T[0], cpu_cc_src);
582 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
583 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
586 static inline void gen_op_tadd_T1_T0_ccTV(void)
588 gen_tag_tv(cpu_T[0], cpu_T[1]);
589 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
590 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
591 gen_add_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
592 gen_cc_clear();
593 gen_cc_NZ(cpu_T[0]);
594 gen_cc_C_add(cpu_T[0], cpu_cc_src);
597 /* old op:
598 if (src1 < T1)
599 env->psr |= PSR_CARRY;
601 static inline void gen_cc_C_sub(TCGv src1, TCGv src2)
603 int l1;
605 l1 = gen_new_label();
606 tcg_gen_brcond_i32(TCG_COND_GEU, src1, src2, l1);
607 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
608 gen_set_label(l1);
609 #ifdef TARGET_SPARC64
611 int l2;
613 l2 = gen_new_label();
614 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l2);
615 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
616 gen_set_label(l2);
618 #endif
621 /* old op:
622 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
623 env->psr |= PSR_OVF;
625 static inline void gen_cc_V_sub(TCGv dst, TCGv src1, TCGv src2)
627 TCGv r_temp, r_temp2, r_temp3, r_zero;
628 int l1;
630 l1 = gen_new_label();
632 r_temp = tcg_temp_new(TCG_TYPE_TL);
633 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
634 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
635 r_zero = tcg_temp_new(TCG_TYPE_TL);
636 tcg_gen_movi_tl(r_zero, 0);
637 tcg_gen_xor_tl(r_temp, src1, src2);
638 tcg_gen_xor_tl(r_temp2, src1, dst);
639 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
640 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
641 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
642 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
643 gen_set_label(l1);
644 #ifdef TARGET_SPARC64
646 int l2;
648 l2 = gen_new_label();
649 tcg_gen_movi_tl(r_zero, 0);
650 tcg_gen_xor_tl(r_temp, src1, src2);
651 tcg_gen_xor_tl(r_temp2, src1, dst);
652 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
653 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
654 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
655 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
656 gen_set_label(l2);
658 #endif
661 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
663 TCGv r_temp, r_temp2, r_temp3, r_zero;
664 int l1;
666 l1 = gen_new_label();
668 r_temp = tcg_temp_new(TCG_TYPE_TL);
669 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
670 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
671 r_zero = tcg_temp_new(TCG_TYPE_TL);
672 tcg_gen_movi_tl(r_zero, 0);
673 tcg_gen_xor_tl(r_temp, src1, src2);
674 tcg_gen_xor_tl(r_temp2, src1, dst);
675 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
676 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
677 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
678 gen_op_exception(TT_TOVF);
679 gen_set_label(l1);
680 #ifdef TARGET_SPARC64
682 int l2;
684 l2 = gen_new_label();
685 tcg_gen_movi_tl(r_zero, 0);
686 tcg_gen_xor_tl(r_temp, src1, src2);
687 tcg_gen_xor_tl(r_temp2, src1, dst);
688 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
689 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
690 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
691 gen_op_exception(TT_TOVF);
692 gen_set_label(l2);
694 #endif
697 static inline void gen_op_sub_T1_T0_cc(void)
699 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
700 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
701 gen_cc_clear();
702 gen_cc_NZ(cpu_T[0]);
703 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
704 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
707 static inline void gen_op_subx_T1_T0_cc(void)
709 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
710 gen_mov_reg_C(cpu_tmp0, cpu_psr);
711 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
712 gen_cc_clear();
713 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
714 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
715 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
716 gen_cc_NZ(cpu_T[0]);
717 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
720 static inline void gen_op_tsub_T1_T0_cc(void)
722 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
723 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
724 gen_cc_clear();
725 gen_cc_NZ(cpu_T[0]);
726 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
727 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
728 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
731 static inline void gen_op_tsub_T1_T0_ccTV(void)
733 gen_tag_tv(cpu_T[0], cpu_T[1]);
734 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
735 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
736 gen_sub_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
737 gen_cc_clear();
738 gen_cc_NZ(cpu_T[0]);
739 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
742 static inline void gen_op_div_cc(void)
744 int l1;
745 TCGv r_zero;
747 gen_cc_clear();
748 gen_cc_NZ(cpu_T[0]);
749 l1 = gen_new_label();
750 r_zero = tcg_temp_new(TCG_TYPE_TL);
751 tcg_gen_movi_tl(r_zero, 0);
752 tcg_gen_brcond_i32(TCG_COND_EQ, cpu_T[1], r_zero, l1);
753 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
754 gen_set_label(l1);
757 static inline void gen_op_logic_T0_cc(void)
759 gen_cc_clear();
760 gen_cc_NZ(cpu_T[0]);
763 // 1
764 static inline void gen_op_eval_ba(TCGv dst)
766 tcg_gen_movi_tl(dst, 1);
769 // Z
770 static inline void gen_op_eval_be(TCGv dst, TCGv src)
772 gen_mov_reg_Z(dst, src);
775 // Z | (N ^ V)
776 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
778 TCGv r_flag;
780 r_flag = tcg_temp_new(TCG_TYPE_TL);
781 gen_mov_reg_N(r_flag, src);
782 gen_mov_reg_V(dst, src);
783 tcg_gen_xor_tl(dst, dst, r_flag);
784 gen_mov_reg_Z(r_flag, src);
785 tcg_gen_or_tl(dst, dst, r_flag);
788 // N ^ V
789 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
791 TCGv r_V;
793 r_V = tcg_temp_new(TCG_TYPE_TL);
794 gen_mov_reg_V(r_V, src);
795 gen_mov_reg_N(dst, src);
796 tcg_gen_xor_tl(dst, dst, r_V);
799 // C | Z
800 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
802 TCGv r_Z;
804 r_Z = tcg_temp_new(TCG_TYPE_TL);
805 gen_mov_reg_Z(r_Z, src);
806 gen_mov_reg_C(dst, src);
807 tcg_gen_or_tl(dst, dst, r_Z);
810 // C
811 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
813 gen_mov_reg_C(dst, src);
816 // V
817 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
819 gen_mov_reg_V(dst, src);
822 // 0
823 static inline void gen_op_eval_bn(TCGv dst)
825 tcg_gen_movi_tl(dst, 0);
828 // N
829 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
831 gen_mov_reg_N(dst, src);
834 // !Z
835 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
837 gen_mov_reg_Z(dst, src);
838 tcg_gen_xori_tl(dst, dst, 0x1);
841 // !(Z | (N ^ V))
842 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
844 TCGv r_flag;
846 r_flag = tcg_temp_new(TCG_TYPE_TL);
847 gen_mov_reg_N(r_flag, src);
848 gen_mov_reg_V(dst, src);
849 tcg_gen_xor_tl(dst, dst, r_flag);
850 gen_mov_reg_Z(r_flag, src);
851 tcg_gen_or_tl(dst, dst, r_flag);
852 tcg_gen_xori_tl(dst, dst, 0x1);
855 // !(N ^ V)
856 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
858 TCGv r_V;
860 r_V = tcg_temp_new(TCG_TYPE_TL);
861 gen_mov_reg_V(r_V, src);
862 gen_mov_reg_N(dst, src);
863 tcg_gen_xor_tl(dst, dst, r_V);
864 tcg_gen_xori_tl(dst, dst, 0x1);
867 // !(C | Z)
868 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
870 TCGv r_Z;
872 r_Z = tcg_temp_new(TCG_TYPE_TL);
873 gen_mov_reg_Z(r_Z, src);
874 gen_mov_reg_C(dst, src);
875 tcg_gen_or_tl(dst, dst, r_Z);
876 tcg_gen_xori_tl(dst, dst, 0x1);
879 // !C
880 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
882 gen_mov_reg_C(dst, src);
883 tcg_gen_xori_tl(dst, dst, 0x1);
886 // !N
887 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
889 gen_mov_reg_N(dst, src);
890 tcg_gen_xori_tl(dst, dst, 0x1);
893 // !V
894 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
896 gen_mov_reg_V(dst, src);
897 tcg_gen_xori_tl(dst, dst, 0x1);
901 FPSR bit field FCC1 | FCC0:
905 3 unordered
907 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
908 unsigned int fcc_offset)
910 tcg_gen_shri_i32(reg, src, 10 + fcc_offset);
911 tcg_gen_andi_tl(reg, reg, 0x1);
914 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
915 unsigned int fcc_offset)
917 tcg_gen_shri_i32(reg, src, 11 + fcc_offset);
918 tcg_gen_andi_tl(reg, reg, 0x1);
921 // !0: FCC0 | FCC1
922 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
923 unsigned int fcc_offset)
925 TCGv r_fcc1;
927 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
928 gen_mov_reg_FCC0(dst, src, fcc_offset);
929 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
930 tcg_gen_or_tl(dst, dst, r_fcc1);
933 // 1 or 2: FCC0 ^ FCC1
934 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
937 TCGv r_fcc1;
939 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
940 gen_mov_reg_FCC0(dst, src, fcc_offset);
941 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
942 tcg_gen_xor_tl(dst, dst, r_fcc1);
945 // 1 or 3: FCC0
946 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
947 unsigned int fcc_offset)
949 gen_mov_reg_FCC0(dst, src, fcc_offset);
952 // 1: FCC0 & !FCC1
953 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
956 TCGv r_fcc1;
958 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
959 gen_mov_reg_FCC0(dst, src, fcc_offset);
960 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
961 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
962 tcg_gen_and_tl(dst, dst, r_fcc1);
965 // 2 or 3: FCC1
966 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
967 unsigned int fcc_offset)
969 gen_mov_reg_FCC1(dst, src, fcc_offset);
972 // 2: !FCC0 & FCC1
973 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
974 unsigned int fcc_offset)
976 TCGv r_fcc1;
978 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
979 gen_mov_reg_FCC0(dst, src, fcc_offset);
980 tcg_gen_xori_tl(dst, dst, 0x1);
981 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
982 tcg_gen_and_tl(dst, dst, r_fcc1);
985 // 3: FCC0 & FCC1
986 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
987 unsigned int fcc_offset)
989 TCGv r_fcc1;
991 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
992 gen_mov_reg_FCC0(dst, src, fcc_offset);
993 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
994 tcg_gen_and_tl(dst, dst, r_fcc1);
997 // 0: !(FCC0 | FCC1)
998 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
999 unsigned int fcc_offset)
1001 TCGv r_fcc1;
1003 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1004 gen_mov_reg_FCC0(dst, src, fcc_offset);
1005 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1006 tcg_gen_or_tl(dst, dst, r_fcc1);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1010 // 0 or 3: !(FCC0 ^ FCC1)
1011 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1012 unsigned int fcc_offset)
1014 TCGv r_fcc1;
1016 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, r_fcc1);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1035 TCGv r_fcc1;
1037 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1038 gen_mov_reg_FCC0(dst, src, fcc_offset);
1039 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1040 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
1041 tcg_gen_and_tl(dst, dst, r_fcc1);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1045 // 0 or 1: !FCC1
1046 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1047 unsigned int fcc_offset)
1049 gen_mov_reg_FCC1(dst, src, fcc_offset);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1053 // !2: !(!FCC0 & FCC1)
1054 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1055 unsigned int fcc_offset)
1057 TCGv r_fcc1;
1059 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 tcg_gen_xori_tl(dst, dst, 0x1);
1062 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1063 tcg_gen_and_tl(dst, dst, r_fcc1);
1064 tcg_gen_xori_tl(dst, dst, 0x1);
1067 // !3: !(FCC0 & FCC1)
1068 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1069 unsigned int fcc_offset)
1071 TCGv r_fcc1;
1073 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1074 gen_mov_reg_FCC0(dst, src, fcc_offset);
1075 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1076 tcg_gen_and_tl(dst, dst, r_fcc1);
1077 tcg_gen_xori_tl(dst, dst, 0x1);
1080 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1081 target_ulong pc2, TCGv r_cond)
1083 TCGv r_zero;
1084 int l1;
1086 l1 = gen_new_label();
1087 r_zero = tcg_temp_new(TCG_TYPE_TL);
1088 tcg_gen_movi_tl(r_zero, 0);
1090 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1092 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1094 gen_set_label(l1);
1095 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1098 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1099 target_ulong pc2, TCGv r_cond)
1101 TCGv r_zero;
1102 int l1;
1104 l1 = gen_new_label();
1105 r_zero = tcg_temp_new(TCG_TYPE_TL);
1106 tcg_gen_movi_tl(r_zero, 0);
1108 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1110 gen_goto_tb(dc, 0, pc2, pc1);
1112 gen_set_label(l1);
1113 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1116 static inline void gen_branch(DisasContext *dc, target_ulong pc,
1117 target_ulong npc)
1119 gen_goto_tb(dc, 0, pc, npc);
1122 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1123 TCGv r_cond)
1125 TCGv r_zero;
1126 int l1, l2;
1128 l1 = gen_new_label();
1129 l2 = gen_new_label();
1130 r_zero = tcg_temp_new(TCG_TYPE_TL);
1131 tcg_gen_movi_tl(r_zero, 0);
1133 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1135 gen_movl_npc_im(npc1);
1136 gen_op_jmp_label(l2);
1138 gen_set_label(l1);
1139 gen_movl_npc_im(npc2);
1140 gen_set_label(l2);
1143 /* call this function before using T2 as it may have been set for a jump */
1144 static inline void flush_T2(DisasContext * dc)
1146 if (dc->npc == JUMP_PC) {
1147 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1148 dc->npc = DYNAMIC_PC;
1152 static inline void save_npc(DisasContext * dc)
1154 if (dc->npc == JUMP_PC) {
1155 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1156 dc->npc = DYNAMIC_PC;
1157 } else if (dc->npc != DYNAMIC_PC) {
1158 gen_movl_npc_im(dc->npc);
1162 static inline void save_state(DisasContext * dc)
1164 gen_jmp_im(dc->pc);
1165 save_npc(dc);
1168 static inline void gen_mov_pc_npc(DisasContext * dc)
1170 if (dc->npc == JUMP_PC) {
1171 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1172 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1173 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1174 dc->pc = DYNAMIC_PC;
1175 } else if (dc->npc == DYNAMIC_PC) {
1176 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1177 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1178 dc->pc = DYNAMIC_PC;
1179 } else {
1180 dc->pc = dc->npc;
1184 static inline void gen_op_next_insn(void)
1186 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1187 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1188 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, 4);
1189 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1192 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1194 TCGv r_src;
1196 #ifdef TARGET_SPARC64
1197 if (cc)
1198 r_src = cpu_xcc;
1199 else
1200 r_src = cpu_psr;
1201 #else
1202 r_src = cpu_psr;
1203 #endif
1204 switch (cond) {
1205 case 0x0:
1206 gen_op_eval_bn(r_dst);
1207 break;
1208 case 0x1:
1209 gen_op_eval_be(r_dst, r_src);
1210 break;
1211 case 0x2:
1212 gen_op_eval_ble(r_dst, r_src);
1213 break;
1214 case 0x3:
1215 gen_op_eval_bl(r_dst, r_src);
1216 break;
1217 case 0x4:
1218 gen_op_eval_bleu(r_dst, r_src);
1219 break;
1220 case 0x5:
1221 gen_op_eval_bcs(r_dst, r_src);
1222 break;
1223 case 0x6:
1224 gen_op_eval_bneg(r_dst, r_src);
1225 break;
1226 case 0x7:
1227 gen_op_eval_bvs(r_dst, r_src);
1228 break;
1229 case 0x8:
1230 gen_op_eval_ba(r_dst);
1231 break;
1232 case 0x9:
1233 gen_op_eval_bne(r_dst, r_src);
1234 break;
1235 case 0xa:
1236 gen_op_eval_bg(r_dst, r_src);
1237 break;
1238 case 0xb:
1239 gen_op_eval_bge(r_dst, r_src);
1240 break;
1241 case 0xc:
1242 gen_op_eval_bgu(r_dst, r_src);
1243 break;
1244 case 0xd:
1245 gen_op_eval_bcc(r_dst, r_src);
1246 break;
1247 case 0xe:
1248 gen_op_eval_bpos(r_dst, r_src);
1249 break;
1250 case 0xf:
1251 gen_op_eval_bvc(r_dst, r_src);
1252 break;
1256 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1258 TCGv r_src;
1259 unsigned int offset;
1261 r_src = tcg_temp_new(TCG_TYPE_TL);
1262 tcg_gen_ld_tl(r_src, cpu_env, offsetof(CPUSPARCState, fsr));
1264 switch (cc) {
1265 default:
1266 case 0x0:
1267 offset = 0;
1268 break;
1269 case 0x1:
1270 offset = 32 - 10;
1271 break;
1272 case 0x2:
1273 offset = 34 - 10;
1274 break;
1275 case 0x3:
1276 offset = 36 - 10;
1277 break;
1280 switch (cond) {
1281 case 0x0:
1282 gen_op_eval_bn(r_dst);
1283 break;
1284 case 0x1:
1285 gen_op_eval_fbne(r_dst, r_src, offset);
1286 break;
1287 case 0x2:
1288 gen_op_eval_fblg(r_dst, r_src, offset);
1289 break;
1290 case 0x3:
1291 gen_op_eval_fbul(r_dst, r_src, offset);
1292 break;
1293 case 0x4:
1294 gen_op_eval_fbl(r_dst, r_src, offset);
1295 break;
1296 case 0x5:
1297 gen_op_eval_fbug(r_dst, r_src, offset);
1298 break;
1299 case 0x6:
1300 gen_op_eval_fbg(r_dst, r_src, offset);
1301 break;
1302 case 0x7:
1303 gen_op_eval_fbu(r_dst, r_src, offset);
1304 break;
1305 case 0x8:
1306 gen_op_eval_ba(r_dst);
1307 break;
1308 case 0x9:
1309 gen_op_eval_fbe(r_dst, r_src, offset);
1310 break;
1311 case 0xa:
1312 gen_op_eval_fbue(r_dst, r_src, offset);
1313 break;
1314 case 0xb:
1315 gen_op_eval_fbge(r_dst, r_src, offset);
1316 break;
1317 case 0xc:
1318 gen_op_eval_fbuge(r_dst, r_src, offset);
1319 break;
1320 case 0xd:
1321 gen_op_eval_fble(r_dst, r_src, offset);
1322 break;
1323 case 0xe:
1324 gen_op_eval_fbule(r_dst, r_src, offset);
1325 break;
1326 case 0xf:
1327 gen_op_eval_fbo(r_dst, r_src, offset);
1328 break;
1332 #ifdef TARGET_SPARC64
1333 // Inverted logic
1334 static const int gen_tcg_cond_reg[8] = {
1336 TCG_COND_NE,
1337 TCG_COND_GT,
1338 TCG_COND_GE,
1340 TCG_COND_EQ,
1341 TCG_COND_LE,
1342 TCG_COND_LT,
1345 static inline void gen_cond_reg(TCGv r_dst, int cond)
1347 TCGv r_zero;
1348 int l1;
1350 l1 = gen_new_label();
1351 r_zero = tcg_temp_new(TCG_TYPE_TL);
1352 tcg_gen_movi_tl(r_zero, 0);
1353 tcg_gen_mov_tl(r_dst, r_zero);
1354 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1355 tcg_gen_movi_tl(r_dst, 1);
1356 gen_set_label(l1);
1358 #endif
1360 /* XXX: potentially incorrect if dynamic npc */
1361 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1363 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1364 target_ulong target = dc->pc + offset;
1366 if (cond == 0x0) {
1367 /* unconditional not taken */
1368 if (a) {
1369 dc->pc = dc->npc + 4;
1370 dc->npc = dc->pc + 4;
1371 } else {
1372 dc->pc = dc->npc;
1373 dc->npc = dc->pc + 4;
1375 } else if (cond == 0x8) {
1376 /* unconditional taken */
1377 if (a) {
1378 dc->pc = target;
1379 dc->npc = dc->pc + 4;
1380 } else {
1381 dc->pc = dc->npc;
1382 dc->npc = target;
1384 } else {
1385 flush_T2(dc);
1386 gen_cond(cpu_T[2], cc, cond);
1387 if (a) {
1388 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1389 dc->is_br = 1;
1390 } else {
1391 dc->pc = dc->npc;
1392 dc->jump_pc[0] = target;
1393 dc->jump_pc[1] = dc->npc + 4;
1394 dc->npc = JUMP_PC;
1399 /* XXX: potentially incorrect if dynamic npc */
1400 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1402 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1403 target_ulong target = dc->pc + offset;
1405 if (cond == 0x0) {
1406 /* unconditional not taken */
1407 if (a) {
1408 dc->pc = dc->npc + 4;
1409 dc->npc = dc->pc + 4;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->npc = dc->pc + 4;
1414 } else if (cond == 0x8) {
1415 /* unconditional taken */
1416 if (a) {
1417 dc->pc = target;
1418 dc->npc = dc->pc + 4;
1419 } else {
1420 dc->pc = dc->npc;
1421 dc->npc = target;
1423 } else {
1424 flush_T2(dc);
1425 gen_fcond(cpu_T[2], cc, cond);
1426 if (a) {
1427 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1428 dc->is_br = 1;
1429 } else {
1430 dc->pc = dc->npc;
1431 dc->jump_pc[0] = target;
1432 dc->jump_pc[1] = dc->npc + 4;
1433 dc->npc = JUMP_PC;
1438 #ifdef TARGET_SPARC64
1439 /* XXX: potentially incorrect if dynamic npc */
1440 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
1442 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1443 target_ulong target = dc->pc + offset;
1445 flush_T2(dc);
1446 gen_cond_reg(cpu_T[2], cond);
1447 if (a) {
1448 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1449 dc->is_br = 1;
1450 } else {
1451 dc->pc = dc->npc;
1452 dc->jump_pc[0] = target;
1453 dc->jump_pc[1] = dc->npc + 4;
1454 dc->npc = JUMP_PC;
1458 static GenOpFunc * const gen_fcmps[4] = {
1459 helper_fcmps,
1460 helper_fcmps_fcc1,
1461 helper_fcmps_fcc2,
1462 helper_fcmps_fcc3,
1465 static GenOpFunc * const gen_fcmpd[4] = {
1466 helper_fcmpd,
1467 helper_fcmpd_fcc1,
1468 helper_fcmpd_fcc2,
1469 helper_fcmpd_fcc3,
1472 #if defined(CONFIG_USER_ONLY)
1473 static GenOpFunc * const gen_fcmpq[4] = {
1474 helper_fcmpq,
1475 helper_fcmpq_fcc1,
1476 helper_fcmpq_fcc2,
1477 helper_fcmpq_fcc3,
1479 #endif
1481 static GenOpFunc * const gen_fcmpes[4] = {
1482 helper_fcmpes,
1483 helper_fcmpes_fcc1,
1484 helper_fcmpes_fcc2,
1485 helper_fcmpes_fcc3,
1488 static GenOpFunc * const gen_fcmped[4] = {
1489 helper_fcmped,
1490 helper_fcmped_fcc1,
1491 helper_fcmped_fcc2,
1492 helper_fcmped_fcc3,
1495 #if defined(CONFIG_USER_ONLY)
1496 static GenOpFunc * const gen_fcmpeq[4] = {
1497 helper_fcmpeq,
1498 helper_fcmpeq_fcc1,
1499 helper_fcmpeq_fcc2,
1500 helper_fcmpeq_fcc3,
1502 #endif
1504 static inline void gen_op_fcmps(int fccno)
1506 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1509 static inline void gen_op_fcmpd(int fccno)
1511 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1514 #if defined(CONFIG_USER_ONLY)
1515 static inline void gen_op_fcmpq(int fccno)
1517 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1519 #endif
1521 static inline void gen_op_fcmpes(int fccno)
1523 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1526 static inline void gen_op_fcmped(int fccno)
1528 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1531 #if defined(CONFIG_USER_ONLY)
1532 static inline void gen_op_fcmpeq(int fccno)
1534 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1536 #endif
1538 #else
1540 static inline void gen_op_fcmps(int fccno)
1542 tcg_gen_helper_0_0(helper_fcmps);
1545 static inline void gen_op_fcmpd(int fccno)
1547 tcg_gen_helper_0_0(helper_fcmpd);
1550 #if defined(CONFIG_USER_ONLY)
1551 static inline void gen_op_fcmpq(int fccno)
1553 tcg_gen_helper_0_0(helper_fcmpq);
1555 #endif
1557 static inline void gen_op_fcmpes(int fccno)
1559 tcg_gen_helper_0_0(helper_fcmpes);
1562 static inline void gen_op_fcmped(int fccno)
1564 tcg_gen_helper_0_0(helper_fcmped);
1567 #if defined(CONFIG_USER_ONLY)
1568 static inline void gen_op_fcmpeq(int fccno)
1570 tcg_gen_helper_0_0(helper_fcmpeq);
1572 #endif
1574 #endif
1576 static inline void gen_op_fpexception_im(int fsr_flags)
1578 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1579 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~FSR_FTT_MASK);
1580 tcg_gen_ori_tl(cpu_tmp0, cpu_tmp0, fsr_flags);
1581 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1582 gen_op_exception(TT_FP_EXCP);
1585 static int gen_trap_ifnofpu(DisasContext * dc)
1587 #if !defined(CONFIG_USER_ONLY)
1588 if (!dc->fpu_enabled) {
1589 save_state(dc);
1590 gen_op_exception(TT_NFPU_INSN);
1591 dc->is_br = 1;
1592 return 1;
1594 #endif
1595 return 0;
1598 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1600 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1601 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1602 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1605 static inline void gen_clear_float_exceptions(void)
1607 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1610 /* asi moves */
1611 #ifdef TARGET_SPARC64
1612 static inline void gen_ld_asi(int insn, int size, int sign)
1614 int asi, offset;
1615 TCGv r_size, r_sign;
1617 r_size = tcg_temp_new(TCG_TYPE_I32);
1618 r_sign = tcg_temp_new(TCG_TYPE_I32);
1619 tcg_gen_movi_i32(r_size, size);
1620 tcg_gen_movi_i32(r_sign, sign);
1621 if (IS_IMM) {
1622 offset = GET_FIELD(insn, 25, 31);
1623 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1624 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1625 } else {
1626 asi = GET_FIELD(insn, 19, 26);
1627 tcg_gen_movi_i32(cpu_T[1], asi);
1629 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
1630 r_sign);
1633 static inline void gen_st_asi(int insn, int size)
1635 int asi, offset;
1636 TCGv r_asi, r_size;
1638 r_asi = tcg_temp_new(TCG_TYPE_I32);
1639 r_size = tcg_temp_new(TCG_TYPE_I32);
1640 tcg_gen_movi_i32(r_size, size);
1641 if (IS_IMM) {
1642 offset = GET_FIELD(insn, 25, 31);
1643 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1644 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1645 } else {
1646 asi = GET_FIELD(insn, 19, 26);
1647 tcg_gen_movi_i32(r_asi, asi);
1649 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
1652 static inline void gen_ldf_asi(int insn, int size, int rd)
1654 int asi, offset;
1655 TCGv r_asi, r_size, r_rd;
1657 r_asi = tcg_temp_new(TCG_TYPE_I32);
1658 r_size = tcg_temp_new(TCG_TYPE_I32);
1659 r_rd = tcg_temp_new(TCG_TYPE_I32);
1660 tcg_gen_movi_i32(r_size, size);
1661 tcg_gen_movi_i32(r_rd, rd);
1662 if (IS_IMM) {
1663 offset = GET_FIELD(insn, 25, 31);
1664 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1665 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1666 } else {
1667 asi = GET_FIELD(insn, 19, 26);
1668 tcg_gen_movi_i32(r_asi, asi);
1670 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
1673 static inline void gen_stf_asi(int insn, int size, int rd)
1675 int asi, offset;
1676 TCGv r_asi, r_size, r_rd;
1678 r_asi = tcg_temp_new(TCG_TYPE_I32);
1679 r_size = tcg_temp_new(TCG_TYPE_I32);
1680 r_rd = tcg_temp_new(TCG_TYPE_I32);
1681 tcg_gen_movi_i32(r_size, size);
1682 tcg_gen_movi_i32(r_rd, rd);
1683 if (IS_IMM) {
1684 offset = GET_FIELD(insn, 25, 31);
1685 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1686 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1687 } else {
1688 asi = GET_FIELD(insn, 19, 26);
1689 tcg_gen_movi_i32(r_asi, asi);
1691 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
1694 static inline void gen_swap_asi(int insn)
1696 int asi, offset;
1697 TCGv r_size, r_sign, r_temp;
1699 r_size = tcg_temp_new(TCG_TYPE_I32);
1700 r_sign = tcg_temp_new(TCG_TYPE_I32);
1701 r_temp = tcg_temp_new(TCG_TYPE_I32);
1702 tcg_gen_movi_i32(r_size, 4);
1703 tcg_gen_movi_i32(r_sign, 0);
1704 if (IS_IMM) {
1705 offset = GET_FIELD(insn, 25, 31);
1706 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1707 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1708 } else {
1709 asi = GET_FIELD(insn, 19, 26);
1710 tcg_gen_movi_i32(cpu_T[1], asi);
1712 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1713 r_sign);
1714 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1715 tcg_gen_mov_i32(cpu_T[1], r_temp);
1718 static inline void gen_ldda_asi(int insn)
1720 int asi, offset;
1721 TCGv r_size, r_sign, r_dword;
1723 r_size = tcg_temp_new(TCG_TYPE_I32);
1724 r_sign = tcg_temp_new(TCG_TYPE_I32);
1725 r_dword = tcg_temp_new(TCG_TYPE_I64);
1726 tcg_gen_movi_i32(r_size, 8);
1727 tcg_gen_movi_i32(r_sign, 0);
1728 if (IS_IMM) {
1729 offset = GET_FIELD(insn, 25, 31);
1730 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1731 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1732 } else {
1733 asi = GET_FIELD(insn, 19, 26);
1734 tcg_gen_movi_i32(cpu_T[1], asi);
1736 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1737 r_sign);
1738 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1739 tcg_gen_shri_i64(r_dword, r_dword, 32);
1740 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1743 static inline void gen_cas_asi(int insn, int rd)
1745 int asi, offset;
1746 TCGv r_val1, r_asi;
1748 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1749 r_asi = tcg_temp_new(TCG_TYPE_I32);
1750 gen_movl_reg_TN(rd, r_val1);
1751 if (IS_IMM) {
1752 offset = GET_FIELD(insn, 25, 31);
1753 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1754 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1755 } else {
1756 asi = GET_FIELD(insn, 19, 26);
1757 tcg_gen_movi_i32(r_asi, asi);
1759 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1760 r_asi);
1763 static inline void gen_casx_asi(int insn, int rd)
1765 int asi, offset;
1766 TCGv r_val1, r_asi;
1768 r_val1 = tcg_temp_new(TCG_TYPE_I64);
1769 r_asi = tcg_temp_new(TCG_TYPE_I32);
1770 gen_movl_reg_TN(rd, r_val1);
1771 if (IS_IMM) {
1772 offset = GET_FIELD(insn, 25, 31);
1773 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1774 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1775 } else {
1776 asi = GET_FIELD(insn, 19, 26);
1777 tcg_gen_movi_i32(r_asi, asi);
1779 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1780 r_asi);
1783 #elif !defined(CONFIG_USER_ONLY)
1785 static inline void gen_ld_asi(int insn, int size, int sign)
1787 int asi;
1788 TCGv r_size, r_sign, r_dword;
1790 r_size = tcg_temp_new(TCG_TYPE_I32);
1791 r_sign = tcg_temp_new(TCG_TYPE_I32);
1792 r_dword = tcg_temp_new(TCG_TYPE_I64);
1793 tcg_gen_movi_i32(r_size, size);
1794 tcg_gen_movi_i32(r_sign, sign);
1795 asi = GET_FIELD(insn, 19, 26);
1796 tcg_gen_movi_i32(cpu_T[1], asi);
1797 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1798 r_sign);
1799 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1802 static inline void gen_st_asi(int insn, int size)
1804 int asi;
1805 TCGv r_dword, r_asi, r_size;
1807 r_dword = tcg_temp_new(TCG_TYPE_I64);
1808 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1809 r_asi = tcg_temp_new(TCG_TYPE_I32);
1810 r_size = tcg_temp_new(TCG_TYPE_I32);
1811 asi = GET_FIELD(insn, 19, 26);
1812 tcg_gen_movi_i32(r_asi, asi);
1813 tcg_gen_movi_i32(r_size, size);
1814 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1817 static inline void gen_swap_asi(int insn)
1819 int asi;
1820 TCGv r_size, r_sign, r_temp;
1822 r_size = tcg_temp_new(TCG_TYPE_I32);
1823 r_sign = tcg_temp_new(TCG_TYPE_I32);
1824 r_temp = tcg_temp_new(TCG_TYPE_I32);
1825 tcg_gen_movi_i32(r_size, 4);
1826 tcg_gen_movi_i32(r_sign, 0);
1827 asi = GET_FIELD(insn, 19, 26);
1828 tcg_gen_movi_i32(cpu_T[1], asi);
1829 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1830 r_sign);
1831 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1832 tcg_gen_mov_i32(cpu_T[1], r_temp);
1835 static inline void gen_ldda_asi(int insn)
1837 int asi;
1838 TCGv r_size, r_sign, r_dword;
1840 r_size = tcg_temp_new(TCG_TYPE_I32);
1841 r_sign = tcg_temp_new(TCG_TYPE_I32);
1842 r_dword = tcg_temp_new(TCG_TYPE_I64);
1843 tcg_gen_movi_i32(r_size, 8);
1844 tcg_gen_movi_i32(r_sign, 0);
1845 asi = GET_FIELD(insn, 19, 26);
1846 tcg_gen_movi_i32(cpu_T[1], asi);
1847 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1848 r_sign);
1849 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1850 tcg_gen_shri_i64(r_dword, r_dword, 32);
1851 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1853 #endif
1855 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1856 static inline void gen_ldstub_asi(int insn)
1858 int asi;
1859 TCGv r_dword, r_asi, r_size;
1861 gen_ld_asi(insn, 1, 0);
1863 r_dword = tcg_temp_new(TCG_TYPE_I64);
1864 r_asi = tcg_temp_new(TCG_TYPE_I32);
1865 r_size = tcg_temp_new(TCG_TYPE_I32);
1866 asi = GET_FIELD(insn, 19, 26);
1867 tcg_gen_movi_i32(r_dword, 0xff);
1868 tcg_gen_movi_i32(r_asi, asi);
1869 tcg_gen_movi_i32(r_size, 1);
1870 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1872 #endif
1874 /* before an instruction, dc->pc must be static */
1875 static void disas_sparc_insn(DisasContext * dc)
1877 unsigned int insn, opc, rs1, rs2, rd;
1879 insn = ldl_code(dc->pc);
1880 opc = GET_FIELD(insn, 0, 1);
1882 rd = GET_FIELD(insn, 2, 6);
1883 switch (opc) {
1884 case 0: /* branches/sethi */
1886 unsigned int xop = GET_FIELD(insn, 7, 9);
1887 int32_t target;
1888 switch (xop) {
1889 #ifdef TARGET_SPARC64
1890 case 0x1: /* V9 BPcc */
1892 int cc;
1894 target = GET_FIELD_SP(insn, 0, 18);
1895 target = sign_extend(target, 18);
1896 target <<= 2;
1897 cc = GET_FIELD_SP(insn, 20, 21);
1898 if (cc == 0)
1899 do_branch(dc, target, insn, 0);
1900 else if (cc == 2)
1901 do_branch(dc, target, insn, 1);
1902 else
1903 goto illegal_insn;
1904 goto jmp_insn;
1906 case 0x3: /* V9 BPr */
1908 target = GET_FIELD_SP(insn, 0, 13) |
1909 (GET_FIELD_SP(insn, 20, 21) << 14);
1910 target = sign_extend(target, 16);
1911 target <<= 2;
1912 rs1 = GET_FIELD(insn, 13, 17);
1913 gen_movl_reg_T0(rs1);
1914 do_branch_reg(dc, target, insn);
1915 goto jmp_insn;
1917 case 0x5: /* V9 FBPcc */
1919 int cc = GET_FIELD_SP(insn, 20, 21);
1920 if (gen_trap_ifnofpu(dc))
1921 goto jmp_insn;
1922 target = GET_FIELD_SP(insn, 0, 18);
1923 target = sign_extend(target, 19);
1924 target <<= 2;
1925 do_fbranch(dc, target, insn, cc);
1926 goto jmp_insn;
1928 #else
1929 case 0x7: /* CBN+x */
1931 goto ncp_insn;
1933 #endif
1934 case 0x2: /* BN+x */
1936 target = GET_FIELD(insn, 10, 31);
1937 target = sign_extend(target, 22);
1938 target <<= 2;
1939 do_branch(dc, target, insn, 0);
1940 goto jmp_insn;
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc))
1945 goto jmp_insn;
1946 target = GET_FIELD(insn, 10, 31);
1947 target = sign_extend(target, 22);
1948 target <<= 2;
1949 do_fbranch(dc, target, insn, 0);
1950 goto jmp_insn;
1952 case 0x4: /* SETHI */
1953 #define OPTIM
1954 #if defined(OPTIM)
1955 if (rd) { // nop
1956 #endif
1957 uint32_t value = GET_FIELD(insn, 10, 31);
1958 tcg_gen_movi_tl(cpu_T[0], value << 10);
1959 gen_movl_T0_reg(rd);
1960 #if defined(OPTIM)
1962 #endif
1963 break;
1964 case 0x0: /* UNIMPL */
1965 default:
1966 goto illegal_insn;
1968 break;
1970 break;
1971 case 1:
1972 /*CALL*/ {
1973 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1975 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1976 gen_movl_T0_reg(15);
1977 target += dc->pc;
1978 gen_mov_pc_npc(dc);
1979 dc->npc = target;
1981 goto jmp_insn;
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop = GET_FIELD(insn, 7, 12);
1985 if (xop == 0x3a) { /* generate trap */
1986 int cond;
1988 rs1 = GET_FIELD(insn, 13, 17);
1989 gen_movl_reg_T0(rs1);
1990 if (IS_IMM) {
1991 rs2 = GET_FIELD(insn, 25, 31);
1992 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
1993 } else {
1994 rs2 = GET_FIELD(insn, 27, 31);
1995 #if defined(OPTIM)
1996 if (rs2 != 0) {
1997 #endif
1998 gen_movl_reg_T1(rs2);
1999 gen_op_add_T1_T0();
2000 #if defined(OPTIM)
2002 #endif
2004 cond = GET_FIELD(insn, 3, 6);
2005 if (cond == 0x8) {
2006 save_state(dc);
2007 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
2008 } else if (cond != 0) {
2009 #ifdef TARGET_SPARC64
2010 /* V9 icc/xcc */
2011 int cc = GET_FIELD_SP(insn, 11, 12);
2012 flush_T2(dc);
2013 save_state(dc);
2014 if (cc == 0)
2015 gen_cond(cpu_T[2], 0, cond);
2016 else if (cc == 2)
2017 gen_cond(cpu_T[2], 1, cond);
2018 else
2019 goto illegal_insn;
2020 #else
2021 flush_T2(dc);
2022 save_state(dc);
2023 gen_cond(cpu_T[2], 0, cond);
2024 #endif
2025 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], cpu_T[2]);
2027 gen_op_next_insn();
2028 tcg_gen_exit_tb(0);
2029 dc->is_br = 1;
2030 goto jmp_insn;
2031 } else if (xop == 0x28) {
2032 rs1 = GET_FIELD(insn, 13, 17);
2033 switch(rs1) {
2034 case 0: /* rdy */
2035 #ifndef TARGET_SPARC64
2036 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2037 manual, rdy on the microSPARC
2038 II */
2039 case 0x0f: /* stbar in the SPARCv8 manual,
2040 rdy on the microSPARC II */
2041 case 0x10 ... 0x1f: /* implementation-dependent in the
2042 SPARCv8 manual, rdy on the
2043 microSPARC II */
2044 #endif
2045 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
2046 gen_movl_T0_reg(rd);
2047 break;
2048 #ifdef TARGET_SPARC64
2049 case 0x2: /* V9 rdccr */
2050 gen_op_rdccr();
2051 gen_movl_T0_reg(rd);
2052 break;
2053 case 0x3: /* V9 rdasi */
2054 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
2055 gen_movl_T0_reg(rd);
2056 break;
2057 case 0x4: /* V9 rdtick */
2059 TCGv r_tickptr;
2061 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2062 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2063 offsetof(CPUState, tick));
2064 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2065 r_tickptr);
2066 gen_movl_T0_reg(rd);
2068 break;
2069 case 0x5: /* V9 rdpc */
2070 tcg_gen_movi_tl(cpu_T[0], dc->pc);
2071 gen_movl_T0_reg(rd);
2072 break;
2073 case 0x6: /* V9 rdfprs */
2074 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
2075 gen_movl_T0_reg(rd);
2076 break;
2077 case 0xf: /* V9 membar */
2078 break; /* no effect */
2079 case 0x13: /* Graphics Status */
2080 if (gen_trap_ifnofpu(dc))
2081 goto jmp_insn;
2082 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
2083 gen_movl_T0_reg(rd);
2084 break;
2085 case 0x17: /* Tick compare */
2086 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
2087 gen_movl_T0_reg(rd);
2088 break;
2089 case 0x18: /* System tick */
2091 TCGv r_tickptr;
2093 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2094 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2095 offsetof(CPUState, stick));
2096 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2097 r_tickptr);
2098 gen_movl_T0_reg(rd);
2100 break;
2101 case 0x19: /* System tick compare */
2102 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
2103 gen_movl_T0_reg(rd);
2104 break;
2105 case 0x10: /* Performance Control */
2106 case 0x11: /* Performance Instrumentation Counter */
2107 case 0x12: /* Dispatch Control */
2108 case 0x14: /* Softint set, WO */
2109 case 0x15: /* Softint clear, WO */
2110 case 0x16: /* Softint write */
2111 #endif
2112 default:
2113 goto illegal_insn;
2115 #if !defined(CONFIG_USER_ONLY)
2116 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2117 #ifndef TARGET_SPARC64
2118 if (!supervisor(dc))
2119 goto priv_insn;
2120 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
2121 #else
2122 if (!hypervisor(dc))
2123 goto priv_insn;
2124 rs1 = GET_FIELD(insn, 13, 17);
2125 switch (rs1) {
2126 case 0: // hpstate
2127 // gen_op_rdhpstate();
2128 break;
2129 case 1: // htstate
2130 // gen_op_rdhtstate();
2131 break;
2132 case 3: // hintp
2133 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
2134 break;
2135 case 5: // htba
2136 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
2137 break;
2138 case 6: // hver
2139 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
2140 break;
2141 case 31: // hstick_cmpr
2142 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2143 break;
2144 default:
2145 goto illegal_insn;
2147 #endif
2148 gen_movl_T0_reg(rd);
2149 break;
2150 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2151 if (!supervisor(dc))
2152 goto priv_insn;
2153 #ifdef TARGET_SPARC64
2154 rs1 = GET_FIELD(insn, 13, 17);
2155 switch (rs1) {
2156 case 0: // tpc
2158 TCGv r_tsptr;
2160 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2161 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2162 offsetof(CPUState, tsptr));
2163 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2164 offsetof(trap_state, tpc));
2166 break;
2167 case 1: // tnpc
2169 TCGv r_tsptr;
2171 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2172 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2173 offsetof(CPUState, tsptr));
2174 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2175 offsetof(trap_state, tnpc));
2177 break;
2178 case 2: // tstate
2180 TCGv r_tsptr;
2182 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2183 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2184 offsetof(CPUState, tsptr));
2185 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2186 offsetof(trap_state, tstate));
2188 break;
2189 case 3: // tt
2191 TCGv r_tsptr;
2193 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2194 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2195 offsetof(CPUState, tsptr));
2196 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
2197 offsetof(trap_state, tt));
2199 break;
2200 case 4: // tick
2202 TCGv r_tickptr;
2204 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2205 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2206 offsetof(CPUState, tick));
2207 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2208 r_tickptr);
2209 gen_movl_T0_reg(rd);
2211 break;
2212 case 5: // tba
2213 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2214 break;
2215 case 6: // pstate
2216 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
2217 break;
2218 case 7: // tl
2219 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
2220 break;
2221 case 8: // pil
2222 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
2223 break;
2224 case 9: // cwp
2225 gen_op_rdcwp();
2226 break;
2227 case 10: // cansave
2228 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
2229 break;
2230 case 11: // canrestore
2231 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
2232 break;
2233 case 12: // cleanwin
2234 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
2235 break;
2236 case 13: // otherwin
2237 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
2238 break;
2239 case 14: // wstate
2240 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
2241 break;
2242 case 16: // UA2005 gl
2243 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
2244 break;
2245 case 26: // UA2005 strand status
2246 if (!hypervisor(dc))
2247 goto priv_insn;
2248 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
2249 break;
2250 case 31: // ver
2251 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
2252 break;
2253 case 15: // fq
2254 default:
2255 goto illegal_insn;
2257 #else
2258 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
2259 #endif
2260 gen_movl_T0_reg(rd);
2261 break;
2262 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2263 #ifdef TARGET_SPARC64
2264 gen_op_flushw();
2265 #else
2266 if (!supervisor(dc))
2267 goto priv_insn;
2268 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2269 gen_movl_T0_reg(rd);
2270 #endif
2271 break;
2272 #endif
2273 } else if (xop == 0x34) { /* FPU Operations */
2274 if (gen_trap_ifnofpu(dc))
2275 goto jmp_insn;
2276 gen_op_clear_ieee_excp_and_FTT();
2277 rs1 = GET_FIELD(insn, 13, 17);
2278 rs2 = GET_FIELD(insn, 27, 31);
2279 xop = GET_FIELD(insn, 18, 26);
2280 switch (xop) {
2281 case 0x1: /* fmovs */
2282 gen_op_load_fpr_FT0(rs2);
2283 gen_op_store_FT0_fpr(rd);
2284 break;
2285 case 0x5: /* fnegs */
2286 gen_op_load_fpr_FT1(rs2);
2287 gen_op_fnegs();
2288 gen_op_store_FT0_fpr(rd);
2289 break;
2290 case 0x9: /* fabss */
2291 gen_op_load_fpr_FT1(rs2);
2292 tcg_gen_helper_0_0(helper_fabss);
2293 gen_op_store_FT0_fpr(rd);
2294 break;
2295 case 0x29: /* fsqrts */
2296 gen_op_load_fpr_FT1(rs2);
2297 gen_clear_float_exceptions();
2298 tcg_gen_helper_0_0(helper_fsqrts);
2299 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2300 gen_op_store_FT0_fpr(rd);
2301 break;
2302 case 0x2a: /* fsqrtd */
2303 gen_op_load_fpr_DT1(DFPREG(rs2));
2304 gen_clear_float_exceptions();
2305 tcg_gen_helper_0_0(helper_fsqrtd);
2306 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2307 gen_op_store_DT0_fpr(DFPREG(rd));
2308 break;
2309 case 0x2b: /* fsqrtq */
2310 #if defined(CONFIG_USER_ONLY)
2311 gen_op_load_fpr_QT1(QFPREG(rs2));
2312 gen_clear_float_exceptions();
2313 tcg_gen_helper_0_0(helper_fsqrtq);
2314 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2315 gen_op_store_QT0_fpr(QFPREG(rd));
2316 break;
2317 #else
2318 goto nfpu_insn;
2319 #endif
2320 case 0x41:
2321 gen_op_load_fpr_FT0(rs1);
2322 gen_op_load_fpr_FT1(rs2);
2323 gen_clear_float_exceptions();
2324 gen_op_fadds();
2325 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2326 gen_op_store_FT0_fpr(rd);
2327 break;
2328 case 0x42:
2329 gen_op_load_fpr_DT0(DFPREG(rs1));
2330 gen_op_load_fpr_DT1(DFPREG(rs2));
2331 gen_clear_float_exceptions();
2332 gen_op_faddd();
2333 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2334 gen_op_store_DT0_fpr(DFPREG(rd));
2335 break;
2336 case 0x43: /* faddq */
2337 #if defined(CONFIG_USER_ONLY)
2338 gen_op_load_fpr_QT0(QFPREG(rs1));
2339 gen_op_load_fpr_QT1(QFPREG(rs2));
2340 gen_clear_float_exceptions();
2341 gen_op_faddq();
2342 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2343 gen_op_store_QT0_fpr(QFPREG(rd));
2344 break;
2345 #else
2346 goto nfpu_insn;
2347 #endif
2348 case 0x45:
2349 gen_op_load_fpr_FT0(rs1);
2350 gen_op_load_fpr_FT1(rs2);
2351 gen_clear_float_exceptions();
2352 gen_op_fsubs();
2353 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2354 gen_op_store_FT0_fpr(rd);
2355 break;
2356 case 0x46:
2357 gen_op_load_fpr_DT0(DFPREG(rs1));
2358 gen_op_load_fpr_DT1(DFPREG(rs2));
2359 gen_clear_float_exceptions();
2360 gen_op_fsubd();
2361 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2362 gen_op_store_DT0_fpr(DFPREG(rd));
2363 break;
2364 case 0x47: /* fsubq */
2365 #if defined(CONFIG_USER_ONLY)
2366 gen_op_load_fpr_QT0(QFPREG(rs1));
2367 gen_op_load_fpr_QT1(QFPREG(rs2));
2368 gen_clear_float_exceptions();
2369 gen_op_fsubq();
2370 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2371 gen_op_store_QT0_fpr(QFPREG(rd));
2372 break;
2373 #else
2374 goto nfpu_insn;
2375 #endif
2376 case 0x49:
2377 gen_op_load_fpr_FT0(rs1);
2378 gen_op_load_fpr_FT1(rs2);
2379 gen_clear_float_exceptions();
2380 gen_op_fmuls();
2381 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2382 gen_op_store_FT0_fpr(rd);
2383 break;
2384 case 0x4a:
2385 gen_op_load_fpr_DT0(DFPREG(rs1));
2386 gen_op_load_fpr_DT1(DFPREG(rs2));
2387 gen_clear_float_exceptions();
2388 gen_op_fmuld();
2389 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2390 gen_op_store_DT0_fpr(DFPREG(rd));
2391 break;
2392 case 0x4b: /* fmulq */
2393 #if defined(CONFIG_USER_ONLY)
2394 gen_op_load_fpr_QT0(QFPREG(rs1));
2395 gen_op_load_fpr_QT1(QFPREG(rs2));
2396 gen_clear_float_exceptions();
2397 gen_op_fmulq();
2398 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2399 gen_op_store_QT0_fpr(QFPREG(rd));
2400 break;
2401 #else
2402 goto nfpu_insn;
2403 #endif
2404 case 0x4d:
2405 gen_op_load_fpr_FT0(rs1);
2406 gen_op_load_fpr_FT1(rs2);
2407 gen_clear_float_exceptions();
2408 gen_op_fdivs();
2409 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2410 gen_op_store_FT0_fpr(rd);
2411 break;
2412 case 0x4e:
2413 gen_op_load_fpr_DT0(DFPREG(rs1));
2414 gen_op_load_fpr_DT1(DFPREG(rs2));
2415 gen_clear_float_exceptions();
2416 gen_op_fdivd();
2417 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2418 gen_op_store_DT0_fpr(DFPREG(rd));
2419 break;
2420 case 0x4f: /* fdivq */
2421 #if defined(CONFIG_USER_ONLY)
2422 gen_op_load_fpr_QT0(QFPREG(rs1));
2423 gen_op_load_fpr_QT1(QFPREG(rs2));
2424 gen_clear_float_exceptions();
2425 gen_op_fdivq();
2426 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2427 gen_op_store_QT0_fpr(QFPREG(rd));
2428 break;
2429 #else
2430 goto nfpu_insn;
2431 #endif
2432 case 0x69:
2433 gen_op_load_fpr_FT0(rs1);
2434 gen_op_load_fpr_FT1(rs2);
2435 gen_clear_float_exceptions();
2436 gen_op_fsmuld();
2437 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2438 gen_op_store_DT0_fpr(DFPREG(rd));
2439 break;
2440 case 0x6e: /* fdmulq */
2441 #if defined(CONFIG_USER_ONLY)
2442 gen_op_load_fpr_DT0(DFPREG(rs1));
2443 gen_op_load_fpr_DT1(DFPREG(rs2));
2444 gen_clear_float_exceptions();
2445 gen_op_fdmulq();
2446 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2447 gen_op_store_QT0_fpr(QFPREG(rd));
2448 break;
2449 #else
2450 goto nfpu_insn;
2451 #endif
2452 case 0xc4:
2453 gen_op_load_fpr_FT1(rs2);
2454 gen_clear_float_exceptions();
2455 gen_op_fitos();
2456 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2457 gen_op_store_FT0_fpr(rd);
2458 break;
2459 case 0xc6:
2460 gen_op_load_fpr_DT1(DFPREG(rs2));
2461 gen_clear_float_exceptions();
2462 gen_op_fdtos();
2463 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2464 gen_op_store_FT0_fpr(rd);
2465 break;
2466 case 0xc7: /* fqtos */
2467 #if defined(CONFIG_USER_ONLY)
2468 gen_op_load_fpr_QT1(QFPREG(rs2));
2469 gen_clear_float_exceptions();
2470 gen_op_fqtos();
2471 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2472 gen_op_store_FT0_fpr(rd);
2473 break;
2474 #else
2475 goto nfpu_insn;
2476 #endif
2477 case 0xc8:
2478 gen_op_load_fpr_FT1(rs2);
2479 gen_op_fitod();
2480 gen_op_store_DT0_fpr(DFPREG(rd));
2481 break;
2482 case 0xc9:
2483 gen_op_load_fpr_FT1(rs2);
2484 gen_op_fstod();
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2486 break;
2487 case 0xcb: /* fqtod */
2488 #if defined(CONFIG_USER_ONLY)
2489 gen_op_load_fpr_QT1(QFPREG(rs2));
2490 gen_clear_float_exceptions();
2491 gen_op_fqtod();
2492 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2493 gen_op_store_DT0_fpr(DFPREG(rd));
2494 break;
2495 #else
2496 goto nfpu_insn;
2497 #endif
2498 case 0xcc: /* fitoq */
2499 #if defined(CONFIG_USER_ONLY)
2500 gen_op_load_fpr_FT1(rs2);
2501 gen_op_fitoq();
2502 gen_op_store_QT0_fpr(QFPREG(rd));
2503 break;
2504 #else
2505 goto nfpu_insn;
2506 #endif
2507 case 0xcd: /* fstoq */
2508 #if defined(CONFIG_USER_ONLY)
2509 gen_op_load_fpr_FT1(rs2);
2510 gen_op_fstoq();
2511 gen_op_store_QT0_fpr(QFPREG(rd));
2512 break;
2513 #else
2514 goto nfpu_insn;
2515 #endif
2516 case 0xce: /* fdtoq */
2517 #if defined(CONFIG_USER_ONLY)
2518 gen_op_load_fpr_DT1(DFPREG(rs2));
2519 gen_op_fdtoq();
2520 gen_op_store_QT0_fpr(QFPREG(rd));
2521 break;
2522 #else
2523 goto nfpu_insn;
2524 #endif
2525 case 0xd1:
2526 gen_op_load_fpr_FT1(rs2);
2527 gen_clear_float_exceptions();
2528 gen_op_fstoi();
2529 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2530 gen_op_store_FT0_fpr(rd);
2531 break;
2532 case 0xd2:
2533 gen_op_load_fpr_DT1(DFPREG(rs2));
2534 gen_clear_float_exceptions();
2535 gen_op_fdtoi();
2536 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2537 gen_op_store_FT0_fpr(rd);
2538 break;
2539 case 0xd3: /* fqtoi */
2540 #if defined(CONFIG_USER_ONLY)
2541 gen_op_load_fpr_QT1(QFPREG(rs2));
2542 gen_clear_float_exceptions();
2543 gen_op_fqtoi();
2544 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2545 gen_op_store_FT0_fpr(rd);
2546 break;
2547 #else
2548 goto nfpu_insn;
2549 #endif
2550 #ifdef TARGET_SPARC64
2551 case 0x2: /* V9 fmovd */
2552 gen_op_load_fpr_DT0(DFPREG(rs2));
2553 gen_op_store_DT0_fpr(DFPREG(rd));
2554 break;
2555 case 0x3: /* V9 fmovq */
2556 #if defined(CONFIG_USER_ONLY)
2557 gen_op_load_fpr_QT0(QFPREG(rs2));
2558 gen_op_store_QT0_fpr(QFPREG(rd));
2559 break;
2560 #else
2561 goto nfpu_insn;
2562 #endif
2563 case 0x6: /* V9 fnegd */
2564 gen_op_load_fpr_DT1(DFPREG(rs2));
2565 gen_op_fnegd();
2566 gen_op_store_DT0_fpr(DFPREG(rd));
2567 break;
2568 case 0x7: /* V9 fnegq */
2569 #if defined(CONFIG_USER_ONLY)
2570 gen_op_load_fpr_QT1(QFPREG(rs2));
2571 gen_op_fnegq();
2572 gen_op_store_QT0_fpr(QFPREG(rd));
2573 break;
2574 #else
2575 goto nfpu_insn;
2576 #endif
2577 case 0xa: /* V9 fabsd */
2578 gen_op_load_fpr_DT1(DFPREG(rs2));
2579 tcg_gen_helper_0_0(helper_fabsd);
2580 gen_op_store_DT0_fpr(DFPREG(rd));
2581 break;
2582 case 0xb: /* V9 fabsq */
2583 #if defined(CONFIG_USER_ONLY)
2584 gen_op_load_fpr_QT1(QFPREG(rs2));
2585 tcg_gen_helper_0_0(helper_fabsq);
2586 gen_op_store_QT0_fpr(QFPREG(rd));
2587 break;
2588 #else
2589 goto nfpu_insn;
2590 #endif
2591 case 0x81: /* V9 fstox */
2592 gen_op_load_fpr_FT1(rs2);
2593 gen_clear_float_exceptions();
2594 gen_op_fstox();
2595 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2596 gen_op_store_DT0_fpr(DFPREG(rd));
2597 break;
2598 case 0x82: /* V9 fdtox */
2599 gen_op_load_fpr_DT1(DFPREG(rs2));
2600 gen_clear_float_exceptions();
2601 gen_op_fdtox();
2602 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2603 gen_op_store_DT0_fpr(DFPREG(rd));
2604 break;
2605 case 0x83: /* V9 fqtox */
2606 #if defined(CONFIG_USER_ONLY)
2607 gen_op_load_fpr_QT1(QFPREG(rs2));
2608 gen_clear_float_exceptions();
2609 gen_op_fqtox();
2610 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2611 gen_op_store_DT0_fpr(DFPREG(rd));
2612 break;
2613 #else
2614 goto nfpu_insn;
2615 #endif
2616 case 0x84: /* V9 fxtos */
2617 gen_op_load_fpr_DT1(DFPREG(rs2));
2618 gen_clear_float_exceptions();
2619 gen_op_fxtos();
2620 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2621 gen_op_store_FT0_fpr(rd);
2622 break;
2623 case 0x88: /* V9 fxtod */
2624 gen_op_load_fpr_DT1(DFPREG(rs2));
2625 gen_clear_float_exceptions();
2626 gen_op_fxtod();
2627 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2628 gen_op_store_DT0_fpr(DFPREG(rd));
2629 break;
2630 case 0x8c: /* V9 fxtoq */
2631 #if defined(CONFIG_USER_ONLY)
2632 gen_op_load_fpr_DT1(DFPREG(rs2));
2633 gen_clear_float_exceptions();
2634 gen_op_fxtoq();
2635 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2636 gen_op_store_QT0_fpr(QFPREG(rd));
2637 break;
2638 #else
2639 goto nfpu_insn;
2640 #endif
2641 #endif
2642 default:
2643 goto illegal_insn;
2645 } else if (xop == 0x35) { /* FPU Operations */
2646 #ifdef TARGET_SPARC64
2647 int cond;
2648 #endif
2649 if (gen_trap_ifnofpu(dc))
2650 goto jmp_insn;
2651 gen_op_clear_ieee_excp_and_FTT();
2652 rs1 = GET_FIELD(insn, 13, 17);
2653 rs2 = GET_FIELD(insn, 27, 31);
2654 xop = GET_FIELD(insn, 18, 26);
2655 #ifdef TARGET_SPARC64
2656 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2657 TCGv r_zero;
2658 int l1;
2660 l1 = gen_new_label();
2661 r_zero = tcg_temp_new(TCG_TYPE_TL);
2662 cond = GET_FIELD_SP(insn, 14, 17);
2663 rs1 = GET_FIELD(insn, 13, 17);
2664 gen_movl_reg_T0(rs1);
2665 tcg_gen_movi_tl(r_zero, 0);
2666 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2667 gen_op_load_fpr_FT0(rs2);
2668 gen_op_store_FT0_fpr(rd);
2669 gen_set_label(l1);
2670 break;
2671 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2672 TCGv r_zero;
2673 int l1;
2675 l1 = gen_new_label();
2676 r_zero = tcg_temp_new(TCG_TYPE_TL);
2677 cond = GET_FIELD_SP(insn, 14, 17);
2678 rs1 = GET_FIELD(insn, 13, 17);
2679 gen_movl_reg_T0(rs1);
2680 tcg_gen_movi_tl(r_zero, 0);
2681 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2682 gen_op_load_fpr_DT0(DFPREG(rs2));
2683 gen_op_store_DT0_fpr(DFPREG(rd));
2684 gen_set_label(l1);
2685 break;
2686 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2687 #if defined(CONFIG_USER_ONLY)
2688 TCGv r_zero;
2689 int l1;
2691 l1 = gen_new_label();
2692 r_zero = tcg_temp_new(TCG_TYPE_TL);
2693 cond = GET_FIELD_SP(insn, 14, 17);
2694 rs1 = GET_FIELD(insn, 13, 17);
2695 gen_movl_reg_T0(rs1);
2696 tcg_gen_movi_tl(r_zero, 0);
2697 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2698 gen_op_load_fpr_QT0(QFPREG(rs2));
2699 gen_op_store_QT0_fpr(QFPREG(rd));
2700 gen_set_label(l1);
2701 break;
2702 #else
2703 goto nfpu_insn;
2704 #endif
2706 #endif
2707 switch (xop) {
2708 #ifdef TARGET_SPARC64
2709 #define FMOVCC(size_FDQ, fcc) \
2711 TCGv r_zero, r_cond; \
2712 int l1; \
2714 l1 = gen_new_label(); \
2715 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2716 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2717 tcg_gen_movi_tl(r_zero, 0); \
2718 cond = GET_FIELD_SP(insn, 14, 17); \
2719 gen_fcond(r_cond, fcc, cond); \
2720 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2721 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2722 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2723 gen_set_label(l1); \
2725 case 0x001: /* V9 fmovscc %fcc0 */
2726 FMOVCC(F, 0);
2727 break;
2728 case 0x002: /* V9 fmovdcc %fcc0 */
2729 FMOVCC(D, 0);
2730 break;
2731 case 0x003: /* V9 fmovqcc %fcc0 */
2732 #if defined(CONFIG_USER_ONLY)
2733 FMOVCC(Q, 0);
2734 break;
2735 #else
2736 goto nfpu_insn;
2737 #endif
2738 case 0x041: /* V9 fmovscc %fcc1 */
2739 FMOVCC(F, 1);
2740 break;
2741 case 0x042: /* V9 fmovdcc %fcc1 */
2742 FMOVCC(D, 1);
2743 break;
2744 case 0x043: /* V9 fmovqcc %fcc1 */
2745 #if defined(CONFIG_USER_ONLY)
2746 FMOVCC(Q, 1);
2747 break;
2748 #else
2749 goto nfpu_insn;
2750 #endif
2751 case 0x081: /* V9 fmovscc %fcc2 */
2752 FMOVCC(F, 2);
2753 break;
2754 case 0x082: /* V9 fmovdcc %fcc2 */
2755 FMOVCC(D, 2);
2756 break;
2757 case 0x083: /* V9 fmovqcc %fcc2 */
2758 #if defined(CONFIG_USER_ONLY)
2759 FMOVCC(Q, 2);
2760 break;
2761 #else
2762 goto nfpu_insn;
2763 #endif
2764 case 0x0c1: /* V9 fmovscc %fcc3 */
2765 FMOVCC(F, 3);
2766 break;
2767 case 0x0c2: /* V9 fmovdcc %fcc3 */
2768 FMOVCC(D, 3);
2769 break;
2770 case 0x0c3: /* V9 fmovqcc %fcc3 */
2771 #if defined(CONFIG_USER_ONLY)
2772 FMOVCC(Q, 3);
2773 break;
2774 #else
2775 goto nfpu_insn;
2776 #endif
2777 #undef FMOVCC
2778 #define FMOVCC(size_FDQ, icc) \
2780 TCGv r_zero, r_cond; \
2781 int l1; \
2783 l1 = gen_new_label(); \
2784 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2785 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2786 tcg_gen_movi_tl(r_zero, 0); \
2787 cond = GET_FIELD_SP(insn, 14, 17); \
2788 gen_cond(r_cond, icc, cond); \
2789 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2790 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2791 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2792 gen_set_label(l1); \
2795 case 0x101: /* V9 fmovscc %icc */
2796 FMOVCC(F, 0);
2797 break;
2798 case 0x102: /* V9 fmovdcc %icc */
2799 FMOVCC(D, 0);
2800 case 0x103: /* V9 fmovqcc %icc */
2801 #if defined(CONFIG_USER_ONLY)
2802 FMOVCC(D, 0);
2803 break;
2804 #else
2805 goto nfpu_insn;
2806 #endif
2807 case 0x181: /* V9 fmovscc %xcc */
2808 FMOVCC(F, 1);
2809 break;
2810 case 0x182: /* V9 fmovdcc %xcc */
2811 FMOVCC(D, 1);
2812 break;
2813 case 0x183: /* V9 fmovqcc %xcc */
2814 #if defined(CONFIG_USER_ONLY)
2815 FMOVCC(Q, 1);
2816 break;
2817 #else
2818 goto nfpu_insn;
2819 #endif
2820 #undef FMOVCC
2821 #endif
2822 case 0x51: /* fcmps, V9 %fcc */
2823 gen_op_load_fpr_FT0(rs1);
2824 gen_op_load_fpr_FT1(rs2);
2825 gen_op_fcmps(rd & 3);
2826 break;
2827 case 0x52: /* fcmpd, V9 %fcc */
2828 gen_op_load_fpr_DT0(DFPREG(rs1));
2829 gen_op_load_fpr_DT1(DFPREG(rs2));
2830 gen_op_fcmpd(rd & 3);
2831 break;
2832 case 0x53: /* fcmpq, V9 %fcc */
2833 #if defined(CONFIG_USER_ONLY)
2834 gen_op_load_fpr_QT0(QFPREG(rs1));
2835 gen_op_load_fpr_QT1(QFPREG(rs2));
2836 gen_op_fcmpq(rd & 3);
2837 break;
2838 #else /* !defined(CONFIG_USER_ONLY) */
2839 goto nfpu_insn;
2840 #endif
2841 case 0x55: /* fcmpes, V9 %fcc */
2842 gen_op_load_fpr_FT0(rs1);
2843 gen_op_load_fpr_FT1(rs2);
2844 gen_op_fcmpes(rd & 3);
2845 break;
2846 case 0x56: /* fcmped, V9 %fcc */
2847 gen_op_load_fpr_DT0(DFPREG(rs1));
2848 gen_op_load_fpr_DT1(DFPREG(rs2));
2849 gen_op_fcmped(rd & 3);
2850 break;
2851 case 0x57: /* fcmpeq, V9 %fcc */
2852 #if defined(CONFIG_USER_ONLY)
2853 gen_op_load_fpr_QT0(QFPREG(rs1));
2854 gen_op_load_fpr_QT1(QFPREG(rs2));
2855 gen_op_fcmpeq(rd & 3);
2856 break;
2857 #else/* !defined(CONFIG_USER_ONLY) */
2858 goto nfpu_insn;
2859 #endif
2860 default:
2861 goto illegal_insn;
2863 #if defined(OPTIM)
2864 } else if (xop == 0x2) {
2865 // clr/mov shortcut
2867 rs1 = GET_FIELD(insn, 13, 17);
2868 if (rs1 == 0) {
2869 // or %g0, x, y -> mov T0, x; mov y, T0
2870 if (IS_IMM) { /* immediate */
2871 rs2 = GET_FIELDs(insn, 19, 31);
2872 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2873 } else { /* register */
2874 rs2 = GET_FIELD(insn, 27, 31);
2875 gen_movl_reg_T0(rs2);
2877 } else {
2878 gen_movl_reg_T0(rs1);
2879 if (IS_IMM) { /* immediate */
2880 rs2 = GET_FIELDs(insn, 19, 31);
2881 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2882 } else { /* register */
2883 // or x, %g0, y -> mov T1, x; mov y, T1
2884 rs2 = GET_FIELD(insn, 27, 31);
2885 if (rs2 != 0) {
2886 gen_movl_reg_T1(rs2);
2887 gen_op_or_T1_T0();
2891 gen_movl_T0_reg(rd);
2892 #endif
2893 #ifdef TARGET_SPARC64
2894 } else if (xop == 0x25) { /* sll, V9 sllx */
2895 rs1 = GET_FIELD(insn, 13, 17);
2896 gen_movl_reg_T0(rs1);
2897 if (IS_IMM) { /* immediate */
2898 rs2 = GET_FIELDs(insn, 20, 31);
2899 if (insn & (1 << 12)) {
2900 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2901 } else {
2902 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2903 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2905 } else { /* register */
2906 rs2 = GET_FIELD(insn, 27, 31);
2907 gen_movl_reg_T1(rs2);
2908 if (insn & (1 << 12)) {
2909 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2910 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2911 } else {
2912 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2913 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2914 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2917 gen_movl_T0_reg(rd);
2918 } else if (xop == 0x26) { /* srl, V9 srlx */
2919 rs1 = GET_FIELD(insn, 13, 17);
2920 gen_movl_reg_T0(rs1);
2921 if (IS_IMM) { /* immediate */
2922 rs2 = GET_FIELDs(insn, 20, 31);
2923 if (insn & (1 << 12)) {
2924 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2925 } else {
2926 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2927 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2929 } else { /* register */
2930 rs2 = GET_FIELD(insn, 27, 31);
2931 gen_movl_reg_T1(rs2);
2932 if (insn & (1 << 12)) {
2933 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2934 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2935 } else {
2936 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2937 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2938 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2941 gen_movl_T0_reg(rd);
2942 } else if (xop == 0x27) { /* sra, V9 srax */
2943 rs1 = GET_FIELD(insn, 13, 17);
2944 gen_movl_reg_T0(rs1);
2945 if (IS_IMM) { /* immediate */
2946 rs2 = GET_FIELDs(insn, 20, 31);
2947 if (insn & (1 << 12)) {
2948 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2949 } else {
2950 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2951 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2952 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2954 } else { /* register */
2955 rs2 = GET_FIELD(insn, 27, 31);
2956 gen_movl_reg_T1(rs2);
2957 if (insn & (1 << 12)) {
2958 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2959 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2960 } else {
2961 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2962 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2963 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2966 gen_movl_T0_reg(rd);
2967 #endif
2968 } else if (xop < 0x36) {
2969 rs1 = GET_FIELD(insn, 13, 17);
2970 gen_movl_reg_T0(rs1);
2971 if (IS_IMM) { /* immediate */
2972 rs2 = GET_FIELDs(insn, 19, 31);
2973 gen_movl_simm_T1(rs2);
2974 } else { /* register */
2975 rs2 = GET_FIELD(insn, 27, 31);
2976 gen_movl_reg_T1(rs2);
2978 if (xop < 0x20) {
2979 switch (xop & ~0x10) {
2980 case 0x0:
2981 if (xop & 0x10)
2982 gen_op_add_T1_T0_cc();
2983 else
2984 gen_op_add_T1_T0();
2985 break;
2986 case 0x1:
2987 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2988 if (xop & 0x10)
2989 gen_op_logic_T0_cc();
2990 break;
2991 case 0x2:
2992 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2993 if (xop & 0x10)
2994 gen_op_logic_T0_cc();
2995 break;
2996 case 0x3:
2997 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2998 if (xop & 0x10)
2999 gen_op_logic_T0_cc();
3000 break;
3001 case 0x4:
3002 if (xop & 0x10)
3003 gen_op_sub_T1_T0_cc();
3004 else
3005 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3006 break;
3007 case 0x5:
3008 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3009 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3010 if (xop & 0x10)
3011 gen_op_logic_T0_cc();
3012 break;
3013 case 0x6:
3014 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3015 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3016 if (xop & 0x10)
3017 gen_op_logic_T0_cc();
3018 break;
3019 case 0x7:
3020 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3021 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3022 if (xop & 0x10)
3023 gen_op_logic_T0_cc();
3024 break;
3025 case 0x8:
3026 if (xop & 0x10)
3027 gen_op_addx_T1_T0_cc();
3028 else {
3029 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3030 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3031 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3033 break;
3034 #ifdef TARGET_SPARC64
3035 case 0x9: /* V9 mulx */
3036 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3037 break;
3038 #endif
3039 case 0xa:
3040 gen_op_umul_T1_T0();
3041 if (xop & 0x10)
3042 gen_op_logic_T0_cc();
3043 break;
3044 case 0xb:
3045 gen_op_smul_T1_T0();
3046 if (xop & 0x10)
3047 gen_op_logic_T0_cc();
3048 break;
3049 case 0xc:
3050 if (xop & 0x10)
3051 gen_op_subx_T1_T0_cc();
3052 else {
3053 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3054 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3055 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3057 break;
3058 #ifdef TARGET_SPARC64
3059 case 0xd: /* V9 udivx */
3060 gen_op_udivx_T1_T0();
3061 break;
3062 #endif
3063 case 0xe:
3064 gen_op_udiv_T1_T0();
3065 if (xop & 0x10)
3066 gen_op_div_cc();
3067 break;
3068 case 0xf:
3069 gen_op_sdiv_T1_T0();
3070 if (xop & 0x10)
3071 gen_op_div_cc();
3072 break;
3073 default:
3074 goto illegal_insn;
3076 gen_movl_T0_reg(rd);
3077 } else {
3078 switch (xop) {
3079 case 0x20: /* taddcc */
3080 gen_op_tadd_T1_T0_cc();
3081 gen_movl_T0_reg(rd);
3082 break;
3083 case 0x21: /* tsubcc */
3084 gen_op_tsub_T1_T0_cc();
3085 gen_movl_T0_reg(rd);
3086 break;
3087 case 0x22: /* taddcctv */
3088 save_state(dc);
3089 gen_op_tadd_T1_T0_ccTV();
3090 gen_movl_T0_reg(rd);
3091 break;
3092 case 0x23: /* tsubcctv */
3093 save_state(dc);
3094 gen_op_tsub_T1_T0_ccTV();
3095 gen_movl_T0_reg(rd);
3096 break;
3097 case 0x24: /* mulscc */
3098 gen_op_mulscc_T1_T0();
3099 gen_movl_T0_reg(rd);
3100 break;
3101 #ifndef TARGET_SPARC64
3102 case 0x25: /* sll */
3103 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3104 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3105 gen_movl_T0_reg(rd);
3106 break;
3107 case 0x26: /* srl */
3108 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3109 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3110 gen_movl_T0_reg(rd);
3111 break;
3112 case 0x27: /* sra */
3113 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3114 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3115 gen_movl_T0_reg(rd);
3116 break;
3117 #endif
3118 case 0x30:
3120 switch(rd) {
3121 case 0: /* wry */
3122 gen_op_xor_T1_T0();
3123 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3124 break;
3125 #ifndef TARGET_SPARC64
3126 case 0x01 ... 0x0f: /* undefined in the
3127 SPARCv8 manual, nop
3128 on the microSPARC
3129 II */
3130 case 0x10 ... 0x1f: /* implementation-dependent
3131 in the SPARCv8
3132 manual, nop on the
3133 microSPARC II */
3134 break;
3135 #else
3136 case 0x2: /* V9 wrccr */
3137 gen_op_xor_T1_T0();
3138 gen_op_wrccr();
3139 break;
3140 case 0x3: /* V9 wrasi */
3141 gen_op_xor_T1_T0();
3142 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
3143 break;
3144 case 0x6: /* V9 wrfprs */
3145 gen_op_xor_T1_T0();
3146 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
3147 save_state(dc);
3148 gen_op_next_insn();
3149 tcg_gen_exit_tb(0);
3150 dc->is_br = 1;
3151 break;
3152 case 0xf: /* V9 sir, nop if user */
3153 #if !defined(CONFIG_USER_ONLY)
3154 if (supervisor(dc))
3155 ; // XXX
3156 #endif
3157 break;
3158 case 0x13: /* Graphics Status */
3159 if (gen_trap_ifnofpu(dc))
3160 goto jmp_insn;
3161 gen_op_xor_T1_T0();
3162 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
3163 break;
3164 case 0x17: /* Tick compare */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (!supervisor(dc))
3167 goto illegal_insn;
3168 #endif
3170 TCGv r_tickptr;
3172 gen_op_xor_T1_T0();
3173 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3174 tick_cmpr));
3175 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3176 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3177 offsetof(CPUState, tick));
3178 tcg_gen_helper_0_2(helper_tick_set_limit,
3179 r_tickptr, cpu_T[0]);
3181 break;
3182 case 0x18: /* System tick */
3183 #if !defined(CONFIG_USER_ONLY)
3184 if (!supervisor(dc))
3185 goto illegal_insn;
3186 #endif
3188 TCGv r_tickptr;
3190 gen_op_xor_T1_T0();
3191 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3192 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3193 offsetof(CPUState, stick));
3194 tcg_gen_helper_0_2(helper_tick_set_count,
3195 r_tickptr, cpu_T[0]);
3197 break;
3198 case 0x19: /* System tick compare */
3199 #if !defined(CONFIG_USER_ONLY)
3200 if (!supervisor(dc))
3201 goto illegal_insn;
3202 #endif
3204 TCGv r_tickptr;
3206 gen_op_xor_T1_T0();
3207 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3208 stick_cmpr));
3209 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3210 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3211 offsetof(CPUState, stick));
3212 tcg_gen_helper_0_2(helper_tick_set_limit,
3213 r_tickptr, cpu_T[0]);
3215 break;
3217 case 0x10: /* Performance Control */
3218 case 0x11: /* Performance Instrumentation Counter */
3219 case 0x12: /* Dispatch Control */
3220 case 0x14: /* Softint set */
3221 case 0x15: /* Softint clear */
3222 case 0x16: /* Softint write */
3223 #endif
3224 default:
3225 goto illegal_insn;
3228 break;
3229 #if !defined(CONFIG_USER_ONLY)
3230 case 0x31: /* wrpsr, V9 saved, restored */
3232 if (!supervisor(dc))
3233 goto priv_insn;
3234 #ifdef TARGET_SPARC64
3235 switch (rd) {
3236 case 0:
3237 gen_op_saved();
3238 break;
3239 case 1:
3240 gen_op_restored();
3241 break;
3242 case 2: /* UA2005 allclean */
3243 case 3: /* UA2005 otherw */
3244 case 4: /* UA2005 normalw */
3245 case 5: /* UA2005 invalw */
3246 // XXX
3247 default:
3248 goto illegal_insn;
3250 #else
3251 gen_op_xor_T1_T0();
3252 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
3253 save_state(dc);
3254 gen_op_next_insn();
3255 tcg_gen_exit_tb(0);
3256 dc->is_br = 1;
3257 #endif
3259 break;
3260 case 0x32: /* wrwim, V9 wrpr */
3262 if (!supervisor(dc))
3263 goto priv_insn;
3264 gen_op_xor_T1_T0();
3265 #ifdef TARGET_SPARC64
3266 switch (rd) {
3267 case 0: // tpc
3269 TCGv r_tsptr;
3271 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3272 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3273 offsetof(CPUState, tsptr));
3274 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3275 offsetof(trap_state, tpc));
3277 break;
3278 case 1: // tnpc
3280 TCGv r_tsptr;
3282 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3283 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3284 offsetof(CPUState, tsptr));
3285 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3286 offsetof(trap_state, tnpc));
3288 break;
3289 case 2: // tstate
3291 TCGv r_tsptr;
3293 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3294 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3295 offsetof(CPUState, tsptr));
3296 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3297 offsetof(trap_state, tstate));
3299 break;
3300 case 3: // tt
3302 TCGv r_tsptr;
3304 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3305 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3306 offsetof(CPUState, tsptr));
3307 tcg_gen_st_i32(cpu_T[0], r_tsptr,
3308 offsetof(trap_state, tt));
3310 break;
3311 case 4: // tick
3313 TCGv r_tickptr;
3315 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3316 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3317 offsetof(CPUState, tick));
3318 tcg_gen_helper_0_2(helper_tick_set_count,
3319 r_tickptr, cpu_T[0]);
3321 break;
3322 case 5: // tba
3323 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3324 break;
3325 case 6: // pstate
3326 save_state(dc);
3327 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
3328 gen_op_next_insn();
3329 tcg_gen_exit_tb(0);
3330 dc->is_br = 1;
3331 break;
3332 case 7: // tl
3333 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
3334 break;
3335 case 8: // pil
3336 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
3337 break;
3338 case 9: // cwp
3339 gen_op_wrcwp();
3340 break;
3341 case 10: // cansave
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
3343 break;
3344 case 11: // canrestore
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
3346 break;
3347 case 12: // cleanwin
3348 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
3349 break;
3350 case 13: // otherwin
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
3352 break;
3353 case 14: // wstate
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
3355 break;
3356 case 16: // UA2005 gl
3357 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
3358 break;
3359 case 26: // UA2005 strand status
3360 if (!hypervisor(dc))
3361 goto priv_insn;
3362 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
3363 break;
3364 default:
3365 goto illegal_insn;
3367 #else
3368 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
3369 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
3370 #endif
3372 break;
3373 case 0x33: /* wrtbr, UA2005 wrhpr */
3375 #ifndef TARGET_SPARC64
3376 if (!supervisor(dc))
3377 goto priv_insn;
3378 gen_op_xor_T1_T0();
3379 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3380 #else
3381 if (!hypervisor(dc))
3382 goto priv_insn;
3383 gen_op_xor_T1_T0();
3384 switch (rd) {
3385 case 0: // hpstate
3386 // XXX gen_op_wrhpstate();
3387 save_state(dc);
3388 gen_op_next_insn();
3389 tcg_gen_exit_tb(0);
3390 dc->is_br = 1;
3391 break;
3392 case 1: // htstate
3393 // XXX gen_op_wrhtstate();
3394 break;
3395 case 3: // hintp
3396 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
3397 break;
3398 case 5: // htba
3399 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
3400 break;
3401 case 31: // hstick_cmpr
3403 TCGv r_tickptr;
3405 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3406 hstick_cmpr));
3407 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3408 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3409 offsetof(CPUState, hstick));
3410 tcg_gen_helper_0_2(helper_tick_set_limit,
3411 r_tickptr, cpu_T[0]);
3413 break;
3414 case 6: // hver readonly
3415 default:
3416 goto illegal_insn;
3418 #endif
3420 break;
3421 #endif
3422 #ifdef TARGET_SPARC64
3423 case 0x2c: /* V9 movcc */
3425 int cc = GET_FIELD_SP(insn, 11, 12);
3426 int cond = GET_FIELD_SP(insn, 14, 17);
3427 TCGv r_zero;
3428 int l1;
3430 flush_T2(dc);
3431 if (insn & (1 << 18)) {
3432 if (cc == 0)
3433 gen_cond(cpu_T[2], 0, cond);
3434 else if (cc == 2)
3435 gen_cond(cpu_T[2], 1, cond);
3436 else
3437 goto illegal_insn;
3438 } else {
3439 gen_fcond(cpu_T[2], cc, cond);
3442 l1 = gen_new_label();
3444 r_zero = tcg_temp_new(TCG_TYPE_TL);
3445 tcg_gen_movi_tl(r_zero, 0);
3446 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_T[2], r_zero, l1);
3447 if (IS_IMM) { /* immediate */
3448 rs2 = GET_FIELD_SPs(insn, 0, 10);
3449 gen_movl_simm_T1(rs2);
3450 } else {
3451 rs2 = GET_FIELD_SP(insn, 0, 4);
3452 gen_movl_reg_T1(rs2);
3454 gen_movl_T1_reg(rd);
3455 gen_set_label(l1);
3456 break;
3458 case 0x2d: /* V9 sdivx */
3459 gen_op_sdivx_T1_T0();
3460 gen_movl_T0_reg(rd);
3461 break;
3462 case 0x2e: /* V9 popc */
3464 if (IS_IMM) { /* immediate */
3465 rs2 = GET_FIELD_SPs(insn, 0, 12);
3466 gen_movl_simm_T1(rs2);
3467 // XXX optimize: popc(constant)
3469 else {
3470 rs2 = GET_FIELD_SP(insn, 0, 4);
3471 gen_movl_reg_T1(rs2);
3473 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
3474 cpu_T[1]);
3475 gen_movl_T0_reg(rd);
3477 case 0x2f: /* V9 movr */
3479 int cond = GET_FIELD_SP(insn, 10, 12);
3480 TCGv r_zero;
3481 int l1;
3483 rs1 = GET_FIELD(insn, 13, 17);
3484 gen_movl_reg_T0(rs1);
3486 l1 = gen_new_label();
3488 r_zero = tcg_temp_new(TCG_TYPE_TL);
3489 tcg_gen_movi_tl(r_zero, 0);
3490 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
3491 if (IS_IMM) { /* immediate */
3492 rs2 = GET_FIELD_SPs(insn, 0, 9);
3493 gen_movl_simm_T1(rs2);
3494 } else {
3495 rs2 = GET_FIELD_SP(insn, 0, 4);
3496 gen_movl_reg_T1(rs2);
3498 gen_movl_T1_reg(rd);
3499 gen_set_label(l1);
3500 break;
3502 #endif
3503 default:
3504 goto illegal_insn;
3507 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3508 #ifdef TARGET_SPARC64
3509 int opf = GET_FIELD_SP(insn, 5, 13);
3510 rs1 = GET_FIELD(insn, 13, 17);
3511 rs2 = GET_FIELD(insn, 27, 31);
3512 if (gen_trap_ifnofpu(dc))
3513 goto jmp_insn;
3515 switch (opf) {
3516 case 0x000: /* VIS I edge8cc */
3517 case 0x001: /* VIS II edge8n */
3518 case 0x002: /* VIS I edge8lcc */
3519 case 0x003: /* VIS II edge8ln */
3520 case 0x004: /* VIS I edge16cc */
3521 case 0x005: /* VIS II edge16n */
3522 case 0x006: /* VIS I edge16lcc */
3523 case 0x007: /* VIS II edge16ln */
3524 case 0x008: /* VIS I edge32cc */
3525 case 0x009: /* VIS II edge32n */
3526 case 0x00a: /* VIS I edge32lcc */
3527 case 0x00b: /* VIS II edge32ln */
3528 // XXX
3529 goto illegal_insn;
3530 case 0x010: /* VIS I array8 */
3531 gen_movl_reg_T0(rs1);
3532 gen_movl_reg_T1(rs2);
3533 gen_op_array8();
3534 gen_movl_T0_reg(rd);
3535 break;
3536 case 0x012: /* VIS I array16 */
3537 gen_movl_reg_T0(rs1);
3538 gen_movl_reg_T1(rs2);
3539 gen_op_array16();
3540 gen_movl_T0_reg(rd);
3541 break;
3542 case 0x014: /* VIS I array32 */
3543 gen_movl_reg_T0(rs1);
3544 gen_movl_reg_T1(rs2);
3545 gen_op_array32();
3546 gen_movl_T0_reg(rd);
3547 break;
3548 case 0x018: /* VIS I alignaddr */
3549 gen_movl_reg_T0(rs1);
3550 gen_movl_reg_T1(rs2);
3551 gen_op_alignaddr();
3552 gen_movl_T0_reg(rd);
3553 break;
3554 case 0x019: /* VIS II bmask */
3555 case 0x01a: /* VIS I alignaddrl */
3556 // XXX
3557 goto illegal_insn;
3558 case 0x020: /* VIS I fcmple16 */
3559 gen_op_load_fpr_DT0(DFPREG(rs1));
3560 gen_op_load_fpr_DT1(DFPREG(rs2));
3561 gen_op_fcmple16();
3562 gen_op_store_DT0_fpr(DFPREG(rd));
3563 break;
3564 case 0x022: /* VIS I fcmpne16 */
3565 gen_op_load_fpr_DT0(DFPREG(rs1));
3566 gen_op_load_fpr_DT1(DFPREG(rs2));
3567 gen_op_fcmpne16();
3568 gen_op_store_DT0_fpr(DFPREG(rd));
3569 break;
3570 case 0x024: /* VIS I fcmple32 */
3571 gen_op_load_fpr_DT0(DFPREG(rs1));
3572 gen_op_load_fpr_DT1(DFPREG(rs2));
3573 gen_op_fcmple32();
3574 gen_op_store_DT0_fpr(DFPREG(rd));
3575 break;
3576 case 0x026: /* VIS I fcmpne32 */
3577 gen_op_load_fpr_DT0(DFPREG(rs1));
3578 gen_op_load_fpr_DT1(DFPREG(rs2));
3579 gen_op_fcmpne32();
3580 gen_op_store_DT0_fpr(DFPREG(rd));
3581 break;
3582 case 0x028: /* VIS I fcmpgt16 */
3583 gen_op_load_fpr_DT0(DFPREG(rs1));
3584 gen_op_load_fpr_DT1(DFPREG(rs2));
3585 gen_op_fcmpgt16();
3586 gen_op_store_DT0_fpr(DFPREG(rd));
3587 break;
3588 case 0x02a: /* VIS I fcmpeq16 */
3589 gen_op_load_fpr_DT0(DFPREG(rs1));
3590 gen_op_load_fpr_DT1(DFPREG(rs2));
3591 gen_op_fcmpeq16();
3592 gen_op_store_DT0_fpr(DFPREG(rd));
3593 break;
3594 case 0x02c: /* VIS I fcmpgt32 */
3595 gen_op_load_fpr_DT0(DFPREG(rs1));
3596 gen_op_load_fpr_DT1(DFPREG(rs2));
3597 gen_op_fcmpgt32();
3598 gen_op_store_DT0_fpr(DFPREG(rd));
3599 break;
3600 case 0x02e: /* VIS I fcmpeq32 */
3601 gen_op_load_fpr_DT0(DFPREG(rs1));
3602 gen_op_load_fpr_DT1(DFPREG(rs2));
3603 gen_op_fcmpeq32();
3604 gen_op_store_DT0_fpr(DFPREG(rd));
3605 break;
3606 case 0x031: /* VIS I fmul8x16 */
3607 gen_op_load_fpr_DT0(DFPREG(rs1));
3608 gen_op_load_fpr_DT1(DFPREG(rs2));
3609 gen_op_fmul8x16();
3610 gen_op_store_DT0_fpr(DFPREG(rd));
3611 break;
3612 case 0x033: /* VIS I fmul8x16au */
3613 gen_op_load_fpr_DT0(DFPREG(rs1));
3614 gen_op_load_fpr_DT1(DFPREG(rs2));
3615 gen_op_fmul8x16au();
3616 gen_op_store_DT0_fpr(DFPREG(rd));
3617 break;
3618 case 0x035: /* VIS I fmul8x16al */
3619 gen_op_load_fpr_DT0(DFPREG(rs1));
3620 gen_op_load_fpr_DT1(DFPREG(rs2));
3621 gen_op_fmul8x16al();
3622 gen_op_store_DT0_fpr(DFPREG(rd));
3623 break;
3624 case 0x036: /* VIS I fmul8sux16 */
3625 gen_op_load_fpr_DT0(DFPREG(rs1));
3626 gen_op_load_fpr_DT1(DFPREG(rs2));
3627 gen_op_fmul8sux16();
3628 gen_op_store_DT0_fpr(DFPREG(rd));
3629 break;
3630 case 0x037: /* VIS I fmul8ulx16 */
3631 gen_op_load_fpr_DT0(DFPREG(rs1));
3632 gen_op_load_fpr_DT1(DFPREG(rs2));
3633 gen_op_fmul8ulx16();
3634 gen_op_store_DT0_fpr(DFPREG(rd));
3635 break;
3636 case 0x038: /* VIS I fmuld8sux16 */
3637 gen_op_load_fpr_DT0(DFPREG(rs1));
3638 gen_op_load_fpr_DT1(DFPREG(rs2));
3639 gen_op_fmuld8sux16();
3640 gen_op_store_DT0_fpr(DFPREG(rd));
3641 break;
3642 case 0x039: /* VIS I fmuld8ulx16 */
3643 gen_op_load_fpr_DT0(DFPREG(rs1));
3644 gen_op_load_fpr_DT1(DFPREG(rs2));
3645 gen_op_fmuld8ulx16();
3646 gen_op_store_DT0_fpr(DFPREG(rd));
3647 break;
3648 case 0x03a: /* VIS I fpack32 */
3649 case 0x03b: /* VIS I fpack16 */
3650 case 0x03d: /* VIS I fpackfix */
3651 case 0x03e: /* VIS I pdist */
3652 // XXX
3653 goto illegal_insn;
3654 case 0x048: /* VIS I faligndata */
3655 gen_op_load_fpr_DT0(DFPREG(rs1));
3656 gen_op_load_fpr_DT1(DFPREG(rs2));
3657 gen_op_faligndata();
3658 gen_op_store_DT0_fpr(DFPREG(rd));
3659 break;
3660 case 0x04b: /* VIS I fpmerge */
3661 gen_op_load_fpr_DT0(DFPREG(rs1));
3662 gen_op_load_fpr_DT1(DFPREG(rs2));
3663 gen_op_fpmerge();
3664 gen_op_store_DT0_fpr(DFPREG(rd));
3665 break;
3666 case 0x04c: /* VIS II bshuffle */
3667 // XXX
3668 goto illegal_insn;
3669 case 0x04d: /* VIS I fexpand */
3670 gen_op_load_fpr_DT0(DFPREG(rs1));
3671 gen_op_load_fpr_DT1(DFPREG(rs2));
3672 gen_op_fexpand();
3673 gen_op_store_DT0_fpr(DFPREG(rd));
3674 break;
3675 case 0x050: /* VIS I fpadd16 */
3676 gen_op_load_fpr_DT0(DFPREG(rs1));
3677 gen_op_load_fpr_DT1(DFPREG(rs2));
3678 gen_op_fpadd16();
3679 gen_op_store_DT0_fpr(DFPREG(rd));
3680 break;
3681 case 0x051: /* VIS I fpadd16s */
3682 gen_op_load_fpr_FT0(rs1);
3683 gen_op_load_fpr_FT1(rs2);
3684 gen_op_fpadd16s();
3685 gen_op_store_FT0_fpr(rd);
3686 break;
3687 case 0x052: /* VIS I fpadd32 */
3688 gen_op_load_fpr_DT0(DFPREG(rs1));
3689 gen_op_load_fpr_DT1(DFPREG(rs2));
3690 gen_op_fpadd32();
3691 gen_op_store_DT0_fpr(DFPREG(rd));
3692 break;
3693 case 0x053: /* VIS I fpadd32s */
3694 gen_op_load_fpr_FT0(rs1);
3695 gen_op_load_fpr_FT1(rs2);
3696 gen_op_fpadd32s();
3697 gen_op_store_FT0_fpr(rd);
3698 break;
3699 case 0x054: /* VIS I fpsub16 */
3700 gen_op_load_fpr_DT0(DFPREG(rs1));
3701 gen_op_load_fpr_DT1(DFPREG(rs2));
3702 gen_op_fpsub16();
3703 gen_op_store_DT0_fpr(DFPREG(rd));
3704 break;
3705 case 0x055: /* VIS I fpsub16s */
3706 gen_op_load_fpr_FT0(rs1);
3707 gen_op_load_fpr_FT1(rs2);
3708 gen_op_fpsub16s();
3709 gen_op_store_FT0_fpr(rd);
3710 break;
3711 case 0x056: /* VIS I fpsub32 */
3712 gen_op_load_fpr_DT0(DFPREG(rs1));
3713 gen_op_load_fpr_DT1(DFPREG(rs2));
3714 gen_op_fpadd32();
3715 gen_op_store_DT0_fpr(DFPREG(rd));
3716 break;
3717 case 0x057: /* VIS I fpsub32s */
3718 gen_op_load_fpr_FT0(rs1);
3719 gen_op_load_fpr_FT1(rs2);
3720 gen_op_fpsub32s();
3721 gen_op_store_FT0_fpr(rd);
3722 break;
3723 case 0x060: /* VIS I fzero */
3724 gen_op_movl_DT0_0();
3725 gen_op_store_DT0_fpr(DFPREG(rd));
3726 break;
3727 case 0x061: /* VIS I fzeros */
3728 gen_op_movl_FT0_0();
3729 gen_op_store_FT0_fpr(rd);
3730 break;
3731 case 0x062: /* VIS I fnor */
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 gen_op_fnor();
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3736 break;
3737 case 0x063: /* VIS I fnors */
3738 gen_op_load_fpr_FT0(rs1);
3739 gen_op_load_fpr_FT1(rs2);
3740 gen_op_fnors();
3741 gen_op_store_FT0_fpr(rd);
3742 break;
3743 case 0x064: /* VIS I fandnot2 */
3744 gen_op_load_fpr_DT1(DFPREG(rs1));
3745 gen_op_load_fpr_DT0(DFPREG(rs2));
3746 gen_op_fandnot();
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x065: /* VIS I fandnot2s */
3750 gen_op_load_fpr_FT1(rs1);
3751 gen_op_load_fpr_FT0(rs2);
3752 gen_op_fandnots();
3753 gen_op_store_FT0_fpr(rd);
3754 break;
3755 case 0x066: /* VIS I fnot2 */
3756 gen_op_load_fpr_DT1(DFPREG(rs2));
3757 gen_op_fnot();
3758 gen_op_store_DT0_fpr(DFPREG(rd));
3759 break;
3760 case 0x067: /* VIS I fnot2s */
3761 gen_op_load_fpr_FT1(rs2);
3762 gen_op_fnot();
3763 gen_op_store_FT0_fpr(rd);
3764 break;
3765 case 0x068: /* VIS I fandnot1 */
3766 gen_op_load_fpr_DT0(DFPREG(rs1));
3767 gen_op_load_fpr_DT1(DFPREG(rs2));
3768 gen_op_fandnot();
3769 gen_op_store_DT0_fpr(DFPREG(rd));
3770 break;
3771 case 0x069: /* VIS I fandnot1s */
3772 gen_op_load_fpr_FT0(rs1);
3773 gen_op_load_fpr_FT1(rs2);
3774 gen_op_fandnots();
3775 gen_op_store_FT0_fpr(rd);
3776 break;
3777 case 0x06a: /* VIS I fnot1 */
3778 gen_op_load_fpr_DT1(DFPREG(rs1));
3779 gen_op_fnot();
3780 gen_op_store_DT0_fpr(DFPREG(rd));
3781 break;
3782 case 0x06b: /* VIS I fnot1s */
3783 gen_op_load_fpr_FT1(rs1);
3784 gen_op_fnot();
3785 gen_op_store_FT0_fpr(rd);
3786 break;
3787 case 0x06c: /* VIS I fxor */
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 gen_op_fxor();
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3792 break;
3793 case 0x06d: /* VIS I fxors */
3794 gen_op_load_fpr_FT0(rs1);
3795 gen_op_load_fpr_FT1(rs2);
3796 gen_op_fxors();
3797 gen_op_store_FT0_fpr(rd);
3798 break;
3799 case 0x06e: /* VIS I fnand */
3800 gen_op_load_fpr_DT0(DFPREG(rs1));
3801 gen_op_load_fpr_DT1(DFPREG(rs2));
3802 gen_op_fnand();
3803 gen_op_store_DT0_fpr(DFPREG(rd));
3804 break;
3805 case 0x06f: /* VIS I fnands */
3806 gen_op_load_fpr_FT0(rs1);
3807 gen_op_load_fpr_FT1(rs2);
3808 gen_op_fnands();
3809 gen_op_store_FT0_fpr(rd);
3810 break;
3811 case 0x070: /* VIS I fand */
3812 gen_op_load_fpr_DT0(DFPREG(rs1));
3813 gen_op_load_fpr_DT1(DFPREG(rs2));
3814 gen_op_fand();
3815 gen_op_store_DT0_fpr(DFPREG(rd));
3816 break;
3817 case 0x071: /* VIS I fands */
3818 gen_op_load_fpr_FT0(rs1);
3819 gen_op_load_fpr_FT1(rs2);
3820 gen_op_fands();
3821 gen_op_store_FT0_fpr(rd);
3822 break;
3823 case 0x072: /* VIS I fxnor */
3824 gen_op_load_fpr_DT0(DFPREG(rs1));
3825 gen_op_load_fpr_DT1(DFPREG(rs2));
3826 gen_op_fxnor();
3827 gen_op_store_DT0_fpr(DFPREG(rd));
3828 break;
3829 case 0x073: /* VIS I fxnors */
3830 gen_op_load_fpr_FT0(rs1);
3831 gen_op_load_fpr_FT1(rs2);
3832 gen_op_fxnors();
3833 gen_op_store_FT0_fpr(rd);
3834 break;
3835 case 0x074: /* VIS I fsrc1 */
3836 gen_op_load_fpr_DT0(DFPREG(rs1));
3837 gen_op_store_DT0_fpr(DFPREG(rd));
3838 break;
3839 case 0x075: /* VIS I fsrc1s */
3840 gen_op_load_fpr_FT0(rs1);
3841 gen_op_store_FT0_fpr(rd);
3842 break;
3843 case 0x076: /* VIS I fornot2 */
3844 gen_op_load_fpr_DT1(DFPREG(rs1));
3845 gen_op_load_fpr_DT0(DFPREG(rs2));
3846 gen_op_fornot();
3847 gen_op_store_DT0_fpr(DFPREG(rd));
3848 break;
3849 case 0x077: /* VIS I fornot2s */
3850 gen_op_load_fpr_FT1(rs1);
3851 gen_op_load_fpr_FT0(rs2);
3852 gen_op_fornots();
3853 gen_op_store_FT0_fpr(rd);
3854 break;
3855 case 0x078: /* VIS I fsrc2 */
3856 gen_op_load_fpr_DT0(DFPREG(rs2));
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x079: /* VIS I fsrc2s */
3860 gen_op_load_fpr_FT0(rs2);
3861 gen_op_store_FT0_fpr(rd);
3862 break;
3863 case 0x07a: /* VIS I fornot1 */
3864 gen_op_load_fpr_DT0(DFPREG(rs1));
3865 gen_op_load_fpr_DT1(DFPREG(rs2));
3866 gen_op_fornot();
3867 gen_op_store_DT0_fpr(DFPREG(rd));
3868 break;
3869 case 0x07b: /* VIS I fornot1s */
3870 gen_op_load_fpr_FT0(rs1);
3871 gen_op_load_fpr_FT1(rs2);
3872 gen_op_fornots();
3873 gen_op_store_FT0_fpr(rd);
3874 break;
3875 case 0x07c: /* VIS I for */
3876 gen_op_load_fpr_DT0(DFPREG(rs1));
3877 gen_op_load_fpr_DT1(DFPREG(rs2));
3878 gen_op_for();
3879 gen_op_store_DT0_fpr(DFPREG(rd));
3880 break;
3881 case 0x07d: /* VIS I fors */
3882 gen_op_load_fpr_FT0(rs1);
3883 gen_op_load_fpr_FT1(rs2);
3884 gen_op_fors();
3885 gen_op_store_FT0_fpr(rd);
3886 break;
3887 case 0x07e: /* VIS I fone */
3888 gen_op_movl_DT0_1();
3889 gen_op_store_DT0_fpr(DFPREG(rd));
3890 break;
3891 case 0x07f: /* VIS I fones */
3892 gen_op_movl_FT0_1();
3893 gen_op_store_FT0_fpr(rd);
3894 break;
3895 case 0x080: /* VIS I shutdown */
3896 case 0x081: /* VIS II siam */
3897 // XXX
3898 goto illegal_insn;
3899 default:
3900 goto illegal_insn;
3902 #else
3903 goto ncp_insn;
3904 #endif
3905 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3906 #ifdef TARGET_SPARC64
3907 goto illegal_insn;
3908 #else
3909 goto ncp_insn;
3910 #endif
3911 #ifdef TARGET_SPARC64
3912 } else if (xop == 0x39) { /* V9 return */
3913 rs1 = GET_FIELD(insn, 13, 17);
3914 save_state(dc);
3915 gen_movl_reg_T0(rs1);
3916 if (IS_IMM) { /* immediate */
3917 rs2 = GET_FIELDs(insn, 19, 31);
3918 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3919 } else { /* register */
3920 rs2 = GET_FIELD(insn, 27, 31);
3921 #if defined(OPTIM)
3922 if (rs2) {
3923 #endif
3924 gen_movl_reg_T1(rs2);
3925 gen_op_add_T1_T0();
3926 #if defined(OPTIM)
3928 #endif
3930 gen_op_restore();
3931 gen_mov_pc_npc(dc);
3932 gen_op_check_align_T0_3();
3933 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3934 dc->npc = DYNAMIC_PC;
3935 goto jmp_insn;
3936 #endif
3937 } else {
3938 rs1 = GET_FIELD(insn, 13, 17);
3939 gen_movl_reg_T0(rs1);
3940 if (IS_IMM) { /* immediate */
3941 rs2 = GET_FIELDs(insn, 19, 31);
3942 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3943 } else { /* register */
3944 rs2 = GET_FIELD(insn, 27, 31);
3945 #if defined(OPTIM)
3946 if (rs2) {
3947 #endif
3948 gen_movl_reg_T1(rs2);
3949 gen_op_add_T1_T0();
3950 #if defined(OPTIM)
3952 #endif
3954 switch (xop) {
3955 case 0x38: /* jmpl */
3957 if (rd != 0) {
3958 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3959 gen_movl_T1_reg(rd);
3961 gen_mov_pc_npc(dc);
3962 gen_op_check_align_T0_3();
3963 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3964 dc->npc = DYNAMIC_PC;
3966 goto jmp_insn;
3967 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3968 case 0x39: /* rett, V9 return */
3970 if (!supervisor(dc))
3971 goto priv_insn;
3972 gen_mov_pc_npc(dc);
3973 gen_op_check_align_T0_3();
3974 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3975 dc->npc = DYNAMIC_PC;
3976 tcg_gen_helper_0_0(helper_rett);
3978 goto jmp_insn;
3979 #endif
3980 case 0x3b: /* flush */
3981 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3982 break;
3983 case 0x3c: /* save */
3984 save_state(dc);
3985 gen_op_save();
3986 gen_movl_T0_reg(rd);
3987 break;
3988 case 0x3d: /* restore */
3989 save_state(dc);
3990 gen_op_restore();
3991 gen_movl_T0_reg(rd);
3992 break;
3993 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3994 case 0x3e: /* V9 done/retry */
3996 switch (rd) {
3997 case 0:
3998 if (!supervisor(dc))
3999 goto priv_insn;
4000 dc->npc = DYNAMIC_PC;
4001 dc->pc = DYNAMIC_PC;
4002 tcg_gen_helper_0_0(helper_done);
4003 goto jmp_insn;
4004 case 1:
4005 if (!supervisor(dc))
4006 goto priv_insn;
4007 dc->npc = DYNAMIC_PC;
4008 dc->pc = DYNAMIC_PC;
4009 tcg_gen_helper_0_0(helper_retry);
4010 goto jmp_insn;
4011 default:
4012 goto illegal_insn;
4015 break;
4016 #endif
4017 default:
4018 goto illegal_insn;
4021 break;
4023 break;
4024 case 3: /* load/store instructions */
4026 unsigned int xop = GET_FIELD(insn, 7, 12);
4027 rs1 = GET_FIELD(insn, 13, 17);
4028 save_state(dc);
4029 gen_movl_reg_T0(rs1);
4030 if (xop == 0x3c || xop == 0x3e)
4032 rs2 = GET_FIELD(insn, 27, 31);
4033 gen_movl_reg_T1(rs2);
4035 else if (IS_IMM) { /* immediate */
4036 rs2 = GET_FIELDs(insn, 19, 31);
4037 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
4038 } else { /* register */
4039 rs2 = GET_FIELD(insn, 27, 31);
4040 #if defined(OPTIM)
4041 if (rs2 != 0) {
4042 #endif
4043 gen_movl_reg_T1(rs2);
4044 gen_op_add_T1_T0();
4045 #if defined(OPTIM)
4047 #endif
4049 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4050 (xop > 0x17 && xop <= 0x1d ) ||
4051 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4052 switch (xop) {
4053 case 0x0: /* load unsigned word */
4054 gen_op_check_align_T0_3();
4055 ABI32_MASK(cpu_T[0]);
4056 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
4057 break;
4058 case 0x1: /* load unsigned byte */
4059 ABI32_MASK(cpu_T[0]);
4060 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
4061 break;
4062 case 0x2: /* load unsigned halfword */
4063 gen_op_check_align_T0_1();
4064 ABI32_MASK(cpu_T[0]);
4065 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
4066 break;
4067 case 0x3: /* load double word */
4068 if (rd & 1)
4069 goto illegal_insn;
4070 else {
4071 TCGv r_dword;
4073 r_dword = tcg_temp_new(TCG_TYPE_I64);
4074 gen_op_check_align_T0_7();
4075 ABI32_MASK(cpu_T[0]);
4076 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
4077 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
4078 gen_movl_T0_reg(rd + 1);
4079 tcg_gen_shri_i64(r_dword, r_dword, 32);
4080 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
4082 break;
4083 case 0x9: /* load signed byte */
4084 ABI32_MASK(cpu_T[0]);
4085 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4086 break;
4087 case 0xa: /* load signed halfword */
4088 gen_op_check_align_T0_1();
4089 ABI32_MASK(cpu_T[0]);
4090 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
4091 break;
4092 case 0xd: /* ldstub -- XXX: should be atomically */
4093 tcg_gen_movi_i32(cpu_tmp0, 0xff);
4094 ABI32_MASK(cpu_T[0]);
4095 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4096 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
4097 break;
4098 case 0x0f: /* swap register with memory. Also atomically */
4099 gen_op_check_align_T0_3();
4100 gen_movl_reg_T1(rd);
4101 ABI32_MASK(cpu_T[0]);
4102 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
4103 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4104 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
4105 break;
4106 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4107 case 0x10: /* load word alternate */
4108 #ifndef TARGET_SPARC64
4109 if (IS_IMM)
4110 goto illegal_insn;
4111 if (!supervisor(dc))
4112 goto priv_insn;
4113 #endif
4114 gen_op_check_align_T0_3();
4115 gen_ld_asi(insn, 4, 0);
4116 break;
4117 case 0x11: /* load unsigned byte alternate */
4118 #ifndef TARGET_SPARC64
4119 if (IS_IMM)
4120 goto illegal_insn;
4121 if (!supervisor(dc))
4122 goto priv_insn;
4123 #endif
4124 gen_ld_asi(insn, 1, 0);
4125 break;
4126 case 0x12: /* load unsigned halfword alternate */
4127 #ifndef TARGET_SPARC64
4128 if (IS_IMM)
4129 goto illegal_insn;
4130 if (!supervisor(dc))
4131 goto priv_insn;
4132 #endif
4133 gen_op_check_align_T0_1();
4134 gen_ld_asi(insn, 2, 0);
4135 break;
4136 case 0x13: /* load double word alternate */
4137 #ifndef TARGET_SPARC64
4138 if (IS_IMM)
4139 goto illegal_insn;
4140 if (!supervisor(dc))
4141 goto priv_insn;
4142 #endif
4143 if (rd & 1)
4144 goto illegal_insn;
4145 gen_op_check_align_T0_7();
4146 gen_ldda_asi(insn);
4147 gen_movl_T0_reg(rd + 1);
4148 break;
4149 case 0x19: /* load signed byte alternate */
4150 #ifndef TARGET_SPARC64
4151 if (IS_IMM)
4152 goto illegal_insn;
4153 if (!supervisor(dc))
4154 goto priv_insn;
4155 #endif
4156 gen_ld_asi(insn, 1, 1);
4157 break;
4158 case 0x1a: /* load signed halfword alternate */
4159 #ifndef TARGET_SPARC64
4160 if (IS_IMM)
4161 goto illegal_insn;
4162 if (!supervisor(dc))
4163 goto priv_insn;
4164 #endif
4165 gen_op_check_align_T0_1();
4166 gen_ld_asi(insn, 2, 1);
4167 break;
4168 case 0x1d: /* ldstuba -- XXX: should be atomically */
4169 #ifndef TARGET_SPARC64
4170 if (IS_IMM)
4171 goto illegal_insn;
4172 if (!supervisor(dc))
4173 goto priv_insn;
4174 #endif
4175 gen_ldstub_asi(insn);
4176 break;
4177 case 0x1f: /* swap reg with alt. memory. Also atomically */
4178 #ifndef TARGET_SPARC64
4179 if (IS_IMM)
4180 goto illegal_insn;
4181 if (!supervisor(dc))
4182 goto priv_insn;
4183 #endif
4184 gen_op_check_align_T0_3();
4185 gen_movl_reg_T1(rd);
4186 gen_swap_asi(insn);
4187 break;
4189 #ifndef TARGET_SPARC64
4190 case 0x30: /* ldc */
4191 case 0x31: /* ldcsr */
4192 case 0x33: /* lddc */
4193 goto ncp_insn;
4194 #endif
4195 #endif
4196 #ifdef TARGET_SPARC64
4197 case 0x08: /* V9 ldsw */
4198 gen_op_check_align_T0_3();
4199 ABI32_MASK(cpu_T[0]);
4200 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
4201 break;
4202 case 0x0b: /* V9 ldx */
4203 gen_op_check_align_T0_7();
4204 ABI32_MASK(cpu_T[0]);
4205 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
4206 break;
4207 case 0x18: /* V9 ldswa */
4208 gen_op_check_align_T0_3();
4209 gen_ld_asi(insn, 4, 1);
4210 break;
4211 case 0x1b: /* V9 ldxa */
4212 gen_op_check_align_T0_7();
4213 gen_ld_asi(insn, 8, 0);
4214 break;
4215 case 0x2d: /* V9 prefetch, no effect */
4216 goto skip_move;
4217 case 0x30: /* V9 ldfa */
4218 gen_op_check_align_T0_3();
4219 gen_ldf_asi(insn, 4, rd);
4220 goto skip_move;
4221 case 0x33: /* V9 lddfa */
4222 gen_op_check_align_T0_3();
4223 gen_ldf_asi(insn, 8, DFPREG(rd));
4224 goto skip_move;
4225 case 0x3d: /* V9 prefetcha, no effect */
4226 goto skip_move;
4227 case 0x32: /* V9 ldqfa */
4228 #if defined(CONFIG_USER_ONLY)
4229 gen_op_check_align_T0_3();
4230 gen_ldf_asi(insn, 16, QFPREG(rd));
4231 goto skip_move;
4232 #else
4233 goto nfpu_insn;
4234 #endif
4235 #endif
4236 default:
4237 goto illegal_insn;
4239 gen_movl_T1_reg(rd);
4240 #ifdef TARGET_SPARC64
4241 skip_move: ;
4242 #endif
4243 } else if (xop >= 0x20 && xop < 0x24) {
4244 if (gen_trap_ifnofpu(dc))
4245 goto jmp_insn;
4246 switch (xop) {
4247 case 0x20: /* load fpreg */
4248 gen_op_check_align_T0_3();
4249 gen_op_ldst(ldf);
4250 gen_op_store_FT0_fpr(rd);
4251 break;
4252 case 0x21: /* load fsr */
4253 gen_op_check_align_T0_3();
4254 gen_op_ldst(ldf);
4255 gen_op_ldfsr();
4256 tcg_gen_helper_0_0(helper_ldfsr);
4257 break;
4258 case 0x22: /* load quad fpreg */
4259 #if defined(CONFIG_USER_ONLY)
4260 gen_op_check_align_T0_7();
4261 gen_op_ldst(ldqf);
4262 gen_op_store_QT0_fpr(QFPREG(rd));
4263 break;
4264 #else
4265 goto nfpu_insn;
4266 #endif
4267 case 0x23: /* load double fpreg */
4268 gen_op_check_align_T0_7();
4269 gen_op_ldst(lddf);
4270 gen_op_store_DT0_fpr(DFPREG(rd));
4271 break;
4272 default:
4273 goto illegal_insn;
4275 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4276 xop == 0xe || xop == 0x1e) {
4277 gen_movl_reg_T1(rd);
4278 switch (xop) {
4279 case 0x4: /* store word */
4280 gen_op_check_align_T0_3();
4281 ABI32_MASK(cpu_T[0]);
4282 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4283 break;
4284 case 0x5: /* store byte */
4285 ABI32_MASK(cpu_T[0]);
4286 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
4287 break;
4288 case 0x6: /* store halfword */
4289 gen_op_check_align_T0_1();
4290 ABI32_MASK(cpu_T[0]);
4291 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
4292 break;
4293 case 0x7: /* store double word */
4294 if (rd & 1)
4295 goto illegal_insn;
4296 #ifndef __i386__
4297 else {
4298 TCGv r_dword, r_low;
4300 gen_op_check_align_T0_7();
4301 r_dword = tcg_temp_new(TCG_TYPE_I64);
4302 r_low = tcg_temp_new(TCG_TYPE_I32);
4303 gen_movl_reg_TN(rd + 1, r_low);
4304 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4305 r_low);
4306 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
4308 #else /* __i386__ */
4309 gen_op_check_align_T0_7();
4310 flush_T2(dc);
4311 gen_movl_reg_T2(rd + 1);
4312 gen_op_ldst(std);
4313 #endif /* __i386__ */
4314 break;
4315 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4316 case 0x14: /* store word alternate */
4317 #ifndef TARGET_SPARC64
4318 if (IS_IMM)
4319 goto illegal_insn;
4320 if (!supervisor(dc))
4321 goto priv_insn;
4322 #endif
4323 gen_op_check_align_T0_3();
4324 gen_st_asi(insn, 4);
4325 break;
4326 case 0x15: /* store byte alternate */
4327 #ifndef TARGET_SPARC64
4328 if (IS_IMM)
4329 goto illegal_insn;
4330 if (!supervisor(dc))
4331 goto priv_insn;
4332 #endif
4333 gen_st_asi(insn, 1);
4334 break;
4335 case 0x16: /* store halfword alternate */
4336 #ifndef TARGET_SPARC64
4337 if (IS_IMM)
4338 goto illegal_insn;
4339 if (!supervisor(dc))
4340 goto priv_insn;
4341 #endif
4342 gen_op_check_align_T0_1();
4343 gen_st_asi(insn, 2);
4344 break;
4345 case 0x17: /* store double word alternate */
4346 #ifndef TARGET_SPARC64
4347 if (IS_IMM)
4348 goto illegal_insn;
4349 if (!supervisor(dc))
4350 goto priv_insn;
4351 #endif
4352 if (rd & 1)
4353 goto illegal_insn;
4354 else {
4355 int asi;
4356 TCGv r_dword, r_temp, r_size;
4358 gen_op_check_align_T0_7();
4359 r_dword = tcg_temp_new(TCG_TYPE_I64);
4360 r_temp = tcg_temp_new(TCG_TYPE_I32);
4361 r_size = tcg_temp_new(TCG_TYPE_I32);
4362 gen_movl_reg_TN(rd + 1, r_temp);
4363 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4364 r_temp);
4365 #ifdef TARGET_SPARC64
4366 if (IS_IMM) {
4367 int offset;
4369 offset = GET_FIELD(insn, 25, 31);
4370 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
4371 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
4372 } else {
4373 #endif
4374 asi = GET_FIELD(insn, 19, 26);
4375 tcg_gen_movi_i32(r_temp, asi);
4376 #ifdef TARGET_SPARC64
4378 #endif
4379 tcg_gen_movi_i32(r_size, 8);
4380 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
4382 break;
4383 #endif
4384 #ifdef TARGET_SPARC64
4385 case 0x0e: /* V9 stx */
4386 gen_op_check_align_T0_7();
4387 ABI32_MASK(cpu_T[0]);
4388 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
4389 break;
4390 case 0x1e: /* V9 stxa */
4391 gen_op_check_align_T0_7();
4392 gen_st_asi(insn, 8);
4393 break;
4394 #endif
4395 default:
4396 goto illegal_insn;
4398 } else if (xop > 0x23 && xop < 0x28) {
4399 if (gen_trap_ifnofpu(dc))
4400 goto jmp_insn;
4401 switch (xop) {
4402 case 0x24:
4403 gen_op_check_align_T0_3();
4404 gen_op_load_fpr_FT0(rd);
4405 gen_op_ldst(stf);
4406 break;
4407 case 0x25: /* stfsr, V9 stxfsr */
4408 #ifdef CONFIG_USER_ONLY
4409 gen_op_check_align_T0_3();
4410 #endif
4411 gen_op_stfsr();
4412 gen_op_ldst(stf);
4413 break;
4414 case 0x26:
4415 #ifdef TARGET_SPARC64
4416 #if defined(CONFIG_USER_ONLY)
4417 /* V9 stqf, store quad fpreg */
4418 gen_op_check_align_T0_7();
4419 gen_op_load_fpr_QT0(QFPREG(rd));
4420 gen_op_ldst(stqf);
4421 break;
4422 #else
4423 goto nfpu_insn;
4424 #endif
4425 #else /* !TARGET_SPARC64 */
4426 /* stdfq, store floating point queue */
4427 #if defined(CONFIG_USER_ONLY)
4428 goto illegal_insn;
4429 #else
4430 if (!supervisor(dc))
4431 goto priv_insn;
4432 if (gen_trap_ifnofpu(dc))
4433 goto jmp_insn;
4434 goto nfq_insn;
4435 #endif
4436 #endif
4437 case 0x27:
4438 gen_op_check_align_T0_7();
4439 gen_op_load_fpr_DT0(DFPREG(rd));
4440 gen_op_ldst(stdf);
4441 break;
4442 default:
4443 goto illegal_insn;
4445 } else if (xop > 0x33 && xop < 0x3f) {
4446 switch (xop) {
4447 #ifdef TARGET_SPARC64
4448 case 0x34: /* V9 stfa */
4449 gen_op_check_align_T0_3();
4450 gen_op_load_fpr_FT0(rd);
4451 gen_stf_asi(insn, 4, rd);
4452 break;
4453 case 0x36: /* V9 stqfa */
4454 #if defined(CONFIG_USER_ONLY)
4455 gen_op_check_align_T0_7();
4456 gen_op_load_fpr_QT0(QFPREG(rd));
4457 gen_stf_asi(insn, 16, QFPREG(rd));
4458 break;
4459 #else
4460 goto nfpu_insn;
4461 #endif
4462 case 0x37: /* V9 stdfa */
4463 gen_op_check_align_T0_3();
4464 gen_op_load_fpr_DT0(DFPREG(rd));
4465 gen_stf_asi(insn, 8, DFPREG(rd));
4466 break;
4467 case 0x3c: /* V9 casa */
4468 gen_op_check_align_T0_3();
4469 gen_cas_asi(insn, rd);
4470 gen_movl_T1_reg(rd);
4471 break;
4472 case 0x3e: /* V9 casxa */
4473 gen_op_check_align_T0_7();
4474 gen_casx_asi(insn, rd);
4475 gen_movl_T1_reg(rd);
4476 break;
4477 #else
4478 case 0x34: /* stc */
4479 case 0x35: /* stcsr */
4480 case 0x36: /* stdcq */
4481 case 0x37: /* stdc */
4482 goto ncp_insn;
4483 #endif
4484 default:
4485 goto illegal_insn;
4488 else
4489 goto illegal_insn;
4491 break;
4493 /* default case for non jump instructions */
4494 if (dc->npc == DYNAMIC_PC) {
4495 dc->pc = DYNAMIC_PC;
4496 gen_op_next_insn();
4497 } else if (dc->npc == JUMP_PC) {
4498 /* we can do a static jump */
4499 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
4500 dc->is_br = 1;
4501 } else {
4502 dc->pc = dc->npc;
4503 dc->npc = dc->npc + 4;
4505 jmp_insn:
4506 return;
4507 illegal_insn:
4508 save_state(dc);
4509 gen_op_exception(TT_ILL_INSN);
4510 dc->is_br = 1;
4511 return;
4512 #if !defined(CONFIG_USER_ONLY)
4513 priv_insn:
4514 save_state(dc);
4515 gen_op_exception(TT_PRIV_INSN);
4516 dc->is_br = 1;
4517 return;
4518 nfpu_insn:
4519 save_state(dc);
4520 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4521 dc->is_br = 1;
4522 return;
4523 #ifndef TARGET_SPARC64
4524 nfq_insn:
4525 save_state(dc);
4526 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4527 dc->is_br = 1;
4528 return;
4529 #endif
4530 #endif
4531 #ifndef TARGET_SPARC64
4532 ncp_insn:
4533 save_state(dc);
4534 gen_op_exception(TT_NCP_INSN);
4535 dc->is_br = 1;
4536 return;
4537 #endif
4540 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4544 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4545 int spc, CPUSPARCState *env)
4547 target_ulong pc_start, last_pc;
4548 uint16_t *gen_opc_end;
4549 DisasContext dc1, *dc = &dc1;
4550 int j, lj = -1;
4552 memset(dc, 0, sizeof(DisasContext));
4553 dc->tb = tb;
4554 pc_start = tb->pc;
4555 dc->pc = pc_start;
4556 last_pc = dc->pc;
4557 dc->npc = (target_ulong) tb->cs_base;
4558 dc->mem_idx = cpu_mmu_index(env);
4559 dc->fpu_enabled = cpu_fpu_enabled(env);
4560 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4562 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4564 do {
4565 if (env->nb_breakpoints > 0) {
4566 for(j = 0; j < env->nb_breakpoints; j++) {
4567 if (env->breakpoints[j] == dc->pc) {
4568 if (dc->pc != pc_start)
4569 save_state(dc);
4570 tcg_gen_helper_0_0(helper_debug);
4571 tcg_gen_exit_tb(0);
4572 dc->is_br = 1;
4573 goto exit_gen_loop;
4577 if (spc) {
4578 if (loglevel > 0)
4579 fprintf(logfile, "Search PC...\n");
4580 j = gen_opc_ptr - gen_opc_buf;
4581 if (lj < j) {
4582 lj++;
4583 while (lj < j)
4584 gen_opc_instr_start[lj++] = 0;
4585 gen_opc_pc[lj] = dc->pc;
4586 gen_opc_npc[lj] = dc->npc;
4587 gen_opc_instr_start[lj] = 1;
4590 last_pc = dc->pc;
4591 disas_sparc_insn(dc);
4593 if (dc->is_br)
4594 break;
4595 /* if the next PC is different, we abort now */
4596 if (dc->pc != (last_pc + 4))
4597 break;
4598 /* if we reach a page boundary, we stop generation so that the
4599 PC of a TT_TFAULT exception is always in the right page */
4600 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4601 break;
4602 /* if single step mode, we generate only one instruction and
4603 generate an exception */
4604 if (env->singlestep_enabled) {
4605 gen_jmp_im(dc->pc);
4606 tcg_gen_exit_tb(0);
4607 break;
4609 } while ((gen_opc_ptr < gen_opc_end) &&
4610 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4612 exit_gen_loop:
4613 if (!dc->is_br) {
4614 if (dc->pc != DYNAMIC_PC &&
4615 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4616 /* static PC and NPC: we can use direct chaining */
4617 gen_branch(dc, dc->pc, dc->npc);
4618 } else {
4619 if (dc->pc != DYNAMIC_PC)
4620 gen_jmp_im(dc->pc);
4621 save_npc(dc);
4622 tcg_gen_exit_tb(0);
4625 *gen_opc_ptr = INDEX_op_end;
4626 if (spc) {
4627 j = gen_opc_ptr - gen_opc_buf;
4628 lj++;
4629 while (lj <= j)
4630 gen_opc_instr_start[lj++] = 0;
4631 #if 0
4632 if (loglevel > 0) {
4633 page_dump(logfile);
4635 #endif
4636 gen_opc_jump_pc[0] = dc->jump_pc[0];
4637 gen_opc_jump_pc[1] = dc->jump_pc[1];
4638 } else {
4639 tb->size = last_pc + 4 - pc_start;
4641 #ifdef DEBUG_DISAS
4642 if (loglevel & CPU_LOG_TB_IN_ASM) {
4643 fprintf(logfile, "--------------\n");
4644 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4645 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4646 fprintf(logfile, "\n");
4648 #endif
4649 return 0;
4652 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4654 return gen_intermediate_code_internal(tb, 0, env);
4657 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4659 return gen_intermediate_code_internal(tb, 1, env);
4662 void cpu_reset(CPUSPARCState *env)
4664 tlb_flush(env, 1);
4665 env->cwp = 0;
4666 env->wim = 1;
4667 env->regwptr = env->regbase + (env->cwp * 16);
4668 #if defined(CONFIG_USER_ONLY)
4669 env->user_mode_only = 1;
4670 #ifdef TARGET_SPARC64
4671 env->cleanwin = NWINDOWS - 2;
4672 env->cansave = NWINDOWS - 2;
4673 env->pstate = PS_RMO | PS_PEF | PS_IE;
4674 env->asi = 0x82; // Primary no-fault
4675 #endif
4676 #else
4677 env->psret = 0;
4678 env->psrs = 1;
4679 env->psrps = 1;
4680 #ifdef TARGET_SPARC64
4681 env->pstate = PS_PRIV;
4682 env->hpstate = HS_PRIV;
4683 env->pc = 0x1fff0000000ULL;
4684 env->tsptr = &env->ts[env->tl];
4685 #else
4686 env->pc = 0;
4687 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4688 env->mmuregs[0] |= env->mmu_bm;
4689 #endif
4690 env->npc = env->pc + 4;
4691 #endif
4694 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4696 CPUSPARCState *env;
4697 const sparc_def_t *def;
4698 static int inited;
4700 def = cpu_sparc_find_by_name(cpu_model);
4701 if (!def)
4702 return NULL;
4704 env = qemu_mallocz(sizeof(CPUSPARCState));
4705 if (!env)
4706 return NULL;
4707 cpu_exec_init(env);
4708 env->cpu_model_str = cpu_model;
4709 env->version = def->iu_version;
4710 env->fsr = def->fpu_version;
4711 #if !defined(TARGET_SPARC64)
4712 env->mmu_bm = def->mmu_bm;
4713 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4714 env->mmu_cxr_mask = def->mmu_cxr_mask;
4715 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4716 env->mmu_trcr_mask = def->mmu_trcr_mask;
4717 env->mmuregs[0] |= def->mmu_version;
4718 cpu_sparc_set_id(env, 0);
4719 #endif
4721 /* init various static tables */
4722 if (!inited) {
4723 inited = 1;
4725 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4726 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4727 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4728 offsetof(CPUState, regwptr),
4729 "regwptr");
4730 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4731 #ifdef TARGET_SPARC64
4732 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4733 TCG_AREG0, offsetof(CPUState, t0), "T0");
4734 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4735 TCG_AREG0, offsetof(CPUState, t1), "T1");
4736 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4737 TCG_AREG0, offsetof(CPUState, t2), "T2");
4738 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4739 TCG_AREG0, offsetof(CPUState, xcc),
4740 "xcc");
4741 #else
4742 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4743 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4744 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4745 #endif
4746 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4747 TCG_AREG0, offsetof(CPUState, cc_src),
4748 "cc_src");
4749 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4750 TCG_AREG0, offsetof(CPUState, cc_dst),
4751 "cc_dst");
4752 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4753 TCG_AREG0, offsetof(CPUState, psr),
4754 "psr");
4757 cpu_reset(env);
4759 return env;
4762 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4764 #if !defined(TARGET_SPARC64)
4765 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4766 #endif
4769 static const sparc_def_t sparc_defs[] = {
4770 #ifdef TARGET_SPARC64
4772 .name = "Fujitsu Sparc64",
4773 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4774 | (MAXTL << 8) | (NWINDOWS - 1)),
4775 .fpu_version = 0x00000000,
4776 .mmu_version = 0,
4779 .name = "Fujitsu Sparc64 III",
4780 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4781 | (MAXTL << 8) | (NWINDOWS - 1)),
4782 .fpu_version = 0x00000000,
4783 .mmu_version = 0,
4786 .name = "Fujitsu Sparc64 IV",
4787 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4788 | (MAXTL << 8) | (NWINDOWS - 1)),
4789 .fpu_version = 0x00000000,
4790 .mmu_version = 0,
4793 .name = "Fujitsu Sparc64 V",
4794 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4795 | (MAXTL << 8) | (NWINDOWS - 1)),
4796 .fpu_version = 0x00000000,
4797 .mmu_version = 0,
4800 .name = "TI UltraSparc I",
4801 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4802 | (MAXTL << 8) | (NWINDOWS - 1)),
4803 .fpu_version = 0x00000000,
4804 .mmu_version = 0,
4807 .name = "TI UltraSparc II",
4808 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4809 | (MAXTL << 8) | (NWINDOWS - 1)),
4810 .fpu_version = 0x00000000,
4811 .mmu_version = 0,
4814 .name = "TI UltraSparc IIi",
4815 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4816 | (MAXTL << 8) | (NWINDOWS - 1)),
4817 .fpu_version = 0x00000000,
4818 .mmu_version = 0,
4821 .name = "TI UltraSparc IIe",
4822 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4823 | (MAXTL << 8) | (NWINDOWS - 1)),
4824 .fpu_version = 0x00000000,
4825 .mmu_version = 0,
4828 .name = "Sun UltraSparc III",
4829 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4830 | (MAXTL << 8) | (NWINDOWS - 1)),
4831 .fpu_version = 0x00000000,
4832 .mmu_version = 0,
4835 .name = "Sun UltraSparc III Cu",
4836 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4837 | (MAXTL << 8) | (NWINDOWS - 1)),
4838 .fpu_version = 0x00000000,
4839 .mmu_version = 0,
4842 .name = "Sun UltraSparc IIIi",
4843 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4844 | (MAXTL << 8) | (NWINDOWS - 1)),
4845 .fpu_version = 0x00000000,
4846 .mmu_version = 0,
4849 .name = "Sun UltraSparc IV",
4850 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4851 | (MAXTL << 8) | (NWINDOWS - 1)),
4852 .fpu_version = 0x00000000,
4853 .mmu_version = 0,
4856 .name = "Sun UltraSparc IV+",
4857 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4858 | (MAXTL << 8) | (NWINDOWS - 1)),
4859 .fpu_version = 0x00000000,
4860 .mmu_version = 0,
4863 .name = "Sun UltraSparc IIIi+",
4864 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4865 | (MAXTL << 8) | (NWINDOWS - 1)),
4866 .fpu_version = 0x00000000,
4867 .mmu_version = 0,
4870 .name = "NEC UltraSparc I",
4871 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4872 | (MAXTL << 8) | (NWINDOWS - 1)),
4873 .fpu_version = 0x00000000,
4874 .mmu_version = 0,
4876 #else
4878 .name = "Fujitsu MB86900",
4879 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4880 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4881 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4882 .mmu_bm = 0x00004000,
4883 .mmu_ctpr_mask = 0x007ffff0,
4884 .mmu_cxr_mask = 0x0000003f,
4885 .mmu_sfsr_mask = 0xffffffff,
4886 .mmu_trcr_mask = 0xffffffff,
4889 .name = "Fujitsu MB86904",
4890 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4891 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4892 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4893 .mmu_bm = 0x00004000,
4894 .mmu_ctpr_mask = 0x00ffffc0,
4895 .mmu_cxr_mask = 0x000000ff,
4896 .mmu_sfsr_mask = 0x00016fff,
4897 .mmu_trcr_mask = 0x00ffffff,
4900 .name = "Fujitsu MB86907",
4901 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4902 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4903 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4904 .mmu_bm = 0x00004000,
4905 .mmu_ctpr_mask = 0xffffffc0,
4906 .mmu_cxr_mask = 0x000000ff,
4907 .mmu_sfsr_mask = 0x00016fff,
4908 .mmu_trcr_mask = 0xffffffff,
4911 .name = "LSI L64811",
4912 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4913 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4914 .mmu_version = 0x10 << 24,
4915 .mmu_bm = 0x00004000,
4916 .mmu_ctpr_mask = 0x007ffff0,
4917 .mmu_cxr_mask = 0x0000003f,
4918 .mmu_sfsr_mask = 0xffffffff,
4919 .mmu_trcr_mask = 0xffffffff,
4922 .name = "Cypress CY7C601",
4923 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4924 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4925 .mmu_version = 0x10 << 24,
4926 .mmu_bm = 0x00004000,
4927 .mmu_ctpr_mask = 0x007ffff0,
4928 .mmu_cxr_mask = 0x0000003f,
4929 .mmu_sfsr_mask = 0xffffffff,
4930 .mmu_trcr_mask = 0xffffffff,
4933 .name = "Cypress CY7C611",
4934 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4935 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4936 .mmu_version = 0x10 << 24,
4937 .mmu_bm = 0x00004000,
4938 .mmu_ctpr_mask = 0x007ffff0,
4939 .mmu_cxr_mask = 0x0000003f,
4940 .mmu_sfsr_mask = 0xffffffff,
4941 .mmu_trcr_mask = 0xffffffff,
4944 .name = "TI SuperSparc II",
4945 .iu_version = 0x40000000,
4946 .fpu_version = 0 << 17,
4947 .mmu_version = 0x04000000,
4948 .mmu_bm = 0x00002000,
4949 .mmu_ctpr_mask = 0xffffffc0,
4950 .mmu_cxr_mask = 0x0000ffff,
4951 .mmu_sfsr_mask = 0xffffffff,
4952 .mmu_trcr_mask = 0xffffffff,
4955 .name = "TI MicroSparc I",
4956 .iu_version = 0x41000000,
4957 .fpu_version = 4 << 17,
4958 .mmu_version = 0x41000000,
4959 .mmu_bm = 0x00004000,
4960 .mmu_ctpr_mask = 0x007ffff0,
4961 .mmu_cxr_mask = 0x0000003f,
4962 .mmu_sfsr_mask = 0x00016fff,
4963 .mmu_trcr_mask = 0x0000003f,
4966 .name = "TI MicroSparc II",
4967 .iu_version = 0x42000000,
4968 .fpu_version = 4 << 17,
4969 .mmu_version = 0x02000000,
4970 .mmu_bm = 0x00004000,
4971 .mmu_ctpr_mask = 0x00ffffc0,
4972 .mmu_cxr_mask = 0x000000ff,
4973 .mmu_sfsr_mask = 0x00016fff,
4974 .mmu_trcr_mask = 0x00ffffff,
4977 .name = "TI MicroSparc IIep",
4978 .iu_version = 0x42000000,
4979 .fpu_version = 4 << 17,
4980 .mmu_version = 0x04000000,
4981 .mmu_bm = 0x00004000,
4982 .mmu_ctpr_mask = 0x00ffffc0,
4983 .mmu_cxr_mask = 0x000000ff,
4984 .mmu_sfsr_mask = 0x00016bff,
4985 .mmu_trcr_mask = 0x00ffffff,
4988 .name = "TI SuperSparc 51",
4989 .iu_version = 0x43000000,
4990 .fpu_version = 0 << 17,
4991 .mmu_version = 0x04000000,
4992 .mmu_bm = 0x00002000,
4993 .mmu_ctpr_mask = 0xffffffc0,
4994 .mmu_cxr_mask = 0x0000ffff,
4995 .mmu_sfsr_mask = 0xffffffff,
4996 .mmu_trcr_mask = 0xffffffff,
4999 .name = "TI SuperSparc 61",
5000 .iu_version = 0x44000000,
5001 .fpu_version = 0 << 17,
5002 .mmu_version = 0x04000000,
5003 .mmu_bm = 0x00002000,
5004 .mmu_ctpr_mask = 0xffffffc0,
5005 .mmu_cxr_mask = 0x0000ffff,
5006 .mmu_sfsr_mask = 0xffffffff,
5007 .mmu_trcr_mask = 0xffffffff,
5010 .name = "Ross RT625",
5011 .iu_version = 0x1e000000,
5012 .fpu_version = 1 << 17,
5013 .mmu_version = 0x1e000000,
5014 .mmu_bm = 0x00004000,
5015 .mmu_ctpr_mask = 0x007ffff0,
5016 .mmu_cxr_mask = 0x0000003f,
5017 .mmu_sfsr_mask = 0xffffffff,
5018 .mmu_trcr_mask = 0xffffffff,
5021 .name = "Ross RT620",
5022 .iu_version = 0x1f000000,
5023 .fpu_version = 1 << 17,
5024 .mmu_version = 0x1f000000,
5025 .mmu_bm = 0x00004000,
5026 .mmu_ctpr_mask = 0x007ffff0,
5027 .mmu_cxr_mask = 0x0000003f,
5028 .mmu_sfsr_mask = 0xffffffff,
5029 .mmu_trcr_mask = 0xffffffff,
5032 .name = "BIT B5010",
5033 .iu_version = 0x20000000,
5034 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
5035 .mmu_version = 0x20000000,
5036 .mmu_bm = 0x00004000,
5037 .mmu_ctpr_mask = 0x007ffff0,
5038 .mmu_cxr_mask = 0x0000003f,
5039 .mmu_sfsr_mask = 0xffffffff,
5040 .mmu_trcr_mask = 0xffffffff,
5043 .name = "Matsushita MN10501",
5044 .iu_version = 0x50000000,
5045 .fpu_version = 0 << 17,
5046 .mmu_version = 0x50000000,
5047 .mmu_bm = 0x00004000,
5048 .mmu_ctpr_mask = 0x007ffff0,
5049 .mmu_cxr_mask = 0x0000003f,
5050 .mmu_sfsr_mask = 0xffffffff,
5051 .mmu_trcr_mask = 0xffffffff,
5054 .name = "Weitek W8601",
5055 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
5056 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5057 .mmu_version = 0x10 << 24,
5058 .mmu_bm = 0x00004000,
5059 .mmu_ctpr_mask = 0x007ffff0,
5060 .mmu_cxr_mask = 0x0000003f,
5061 .mmu_sfsr_mask = 0xffffffff,
5062 .mmu_trcr_mask = 0xffffffff,
5065 .name = "LEON2",
5066 .iu_version = 0xf2000000,
5067 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5068 .mmu_version = 0xf2000000,
5069 .mmu_bm = 0x00004000,
5070 .mmu_ctpr_mask = 0x007ffff0,
5071 .mmu_cxr_mask = 0x0000003f,
5072 .mmu_sfsr_mask = 0xffffffff,
5073 .mmu_trcr_mask = 0xffffffff,
5076 .name = "LEON3",
5077 .iu_version = 0xf3000000,
5078 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5079 .mmu_version = 0xf3000000,
5080 .mmu_bm = 0x00004000,
5081 .mmu_ctpr_mask = 0x007ffff0,
5082 .mmu_cxr_mask = 0x0000003f,
5083 .mmu_sfsr_mask = 0xffffffff,
5084 .mmu_trcr_mask = 0xffffffff,
5086 #endif
5089 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
5091 unsigned int i;
5093 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5094 if (strcasecmp(name, sparc_defs[i].name) == 0) {
5095 return &sparc_defs[i];
5098 return NULL;
5101 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
5103 unsigned int i;
5105 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5106 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
5107 sparc_defs[i].name,
5108 sparc_defs[i].iu_version,
5109 sparc_defs[i].fpu_version,
5110 sparc_defs[i].mmu_version);
5114 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5116 void cpu_dump_state(CPUState *env, FILE *f,
5117 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
5118 int flags)
5120 int i, x;
5122 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
5123 cpu_fprintf(f, "General Registers:\n");
5124 for (i = 0; i < 4; i++)
5125 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5126 cpu_fprintf(f, "\n");
5127 for (; i < 8; i++)
5128 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5129 cpu_fprintf(f, "\nCurrent Register Window:\n");
5130 for (x = 0; x < 3; x++) {
5131 for (i = 0; i < 4; i++)
5132 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5133 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
5134 env->regwptr[i + x * 8]);
5135 cpu_fprintf(f, "\n");
5136 for (; i < 8; i++)
5137 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5138 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
5139 env->regwptr[i + x * 8]);
5140 cpu_fprintf(f, "\n");
5142 cpu_fprintf(f, "\nFloating Point Registers:\n");
5143 for (i = 0; i < 32; i++) {
5144 if ((i & 3) == 0)
5145 cpu_fprintf(f, "%%f%02d:", i);
5146 cpu_fprintf(f, " %016lf", env->fpr[i]);
5147 if ((i & 3) == 3)
5148 cpu_fprintf(f, "\n");
5150 #ifdef TARGET_SPARC64
5151 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5152 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
5153 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5154 env->cansave, env->canrestore, env->otherwin, env->wstate,
5155 env->cleanwin, NWINDOWS - 1 - env->cwp);
5156 #else
5157 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
5158 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
5159 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
5160 env->psrs?'S':'-', env->psrps?'P':'-',
5161 env->psret?'E':'-', env->wim);
5162 #endif
5163 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
5166 #if defined(CONFIG_USER_ONLY)
5167 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5169 return addr;
5172 #else
5173 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
5174 int *access_index, target_ulong address, int rw,
5175 int mmu_idx);
5177 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5179 target_phys_addr_t phys_addr;
5180 int prot, access_index;
5182 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
5183 MMU_KERNEL_IDX) != 0)
5184 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
5185 0, MMU_KERNEL_IDX) != 0)
5186 return -1;
5187 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
5188 return -1;
5189 return phys_addr;
5191 #endif
5193 void helper_flush(target_ulong addr)
5195 addr &= ~7;
5196 tb_invalidate_page_range(addr, addr + 8);