qemu/pci: helper routines for pci access
[qemu.git] / target-sparc / translate.c
blob6de40791b517827d965d69029b62894d22a11cef
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_cc_op;
47 static TCGv_i32 cpu_psr;
48 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
49 static TCGv cpu_y;
50 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_tbr;
52 #endif
53 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
54 #ifdef TARGET_SPARC64
55 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
56 static TCGv cpu_gsr;
57 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
58 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
59 static TCGv_i32 cpu_softint;
60 #else
61 static TCGv cpu_wim;
62 #endif
63 /* local register indexes (only used inside old micro ops) */
64 static TCGv cpu_tmp0;
65 static TCGv_i32 cpu_tmp32;
66 static TCGv_i64 cpu_tmp64;
67 /* Floating point registers */
68 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
70 #include "gen-icount.h"
72 typedef struct DisasContext {
73 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
74 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
76 int is_br;
77 int mem_idx;
78 int fpu_enabled;
79 int address_mask_32bit;
80 uint32_t cc_op; /* current CC operation */
81 struct TranslationBlock *tb;
82 sparc_def_t *def;
83 } DisasContext;
85 // This function uses non-native bit order
86 #define GET_FIELD(X, FROM, TO) \
87 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89 // This function uses the order in the manuals, i.e. bit 0 is 2^0
90 #define GET_FIELD_SP(X, FROM, TO) \
91 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
94 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96 #ifdef TARGET_SPARC64
97 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
98 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #else
100 #define DFPREG(r) (r & 0x1e)
101 #define QFPREG(r) (r & 0x1c)
102 #endif
104 #define UA2005_HTRAP_MASK 0xff
105 #define V8_TRAP_MASK 0x7f
107 static int sign_extend(int x, int len)
109 len = 32 - len;
110 return (x << len) >> len;
113 #define IS_IMM (insn & (1<<13))
115 /* floating point registers moves */
116 static void gen_op_load_fpr_DT0(unsigned int src)
118 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.upper));
120 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.lower));
124 static void gen_op_load_fpr_DT1(unsigned int src)
126 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.upper));
128 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.lower));
132 static void gen_op_store_DT0_fpr(unsigned int dst)
134 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.upper));
136 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.lower));
140 static void gen_op_load_fpr_QT0(unsigned int src)
142 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upmost));
144 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upper));
146 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lower));
148 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lowest));
152 static void gen_op_load_fpr_QT1(unsigned int src)
154 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upmost));
156 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upper));
158 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lower));
160 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lowest));
164 static void gen_op_store_QT0_fpr(unsigned int dst)
166 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upmost));
168 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upper));
170 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lower));
172 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lowest));
176 /* moves */
177 #ifdef CONFIG_USER_ONLY
178 #define supervisor(dc) 0
179 #ifdef TARGET_SPARC64
180 #define hypervisor(dc) 0
181 #endif
182 #else
183 #define supervisor(dc) (dc->mem_idx >= 1)
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) (dc->mem_idx == 2)
186 #else
187 #endif
188 #endif
190 #ifdef TARGET_SPARC64
191 #ifndef TARGET_ABI32
192 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #else
194 #define AM_CHECK(dc) (1)
195 #endif
196 #endif
198 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
200 #ifdef TARGET_SPARC64
201 if (AM_CHECK(dc))
202 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
203 #endif
206 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 if (reg == 0)
209 tcg_gen_movi_tl(tn, 0);
210 else if (reg < 8)
211 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 else {
213 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_movl_TN_reg(int reg, TCGv tn)
219 if (reg == 0)
220 return;
221 else if (reg < 8)
222 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 else {
224 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
228 static inline void gen_goto_tb(DisasContext *s, int tb_num,
229 target_ulong pc, target_ulong npc)
231 TranslationBlock *tb;
233 tb = s->tb;
234 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
235 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
236 /* jump to same page: we can use a direct jump */
237 tcg_gen_goto_tb(tb_num);
238 tcg_gen_movi_tl(cpu_pc, pc);
239 tcg_gen_movi_tl(cpu_npc, npc);
240 tcg_gen_exit_tb((long)tb + tb_num);
241 } else {
242 /* jump to another page: currently not optimized */
243 tcg_gen_movi_tl(cpu_pc, pc);
244 tcg_gen_movi_tl(cpu_npc, npc);
245 tcg_gen_exit_tb(0);
249 // XXX suboptimal
250 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
252 tcg_gen_extu_i32_tl(reg, src);
253 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
254 tcg_gen_andi_tl(reg, reg, 0x1);
257 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
259 tcg_gen_extu_i32_tl(reg, src);
260 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
261 tcg_gen_andi_tl(reg, reg, 0x1);
264 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
266 tcg_gen_extu_i32_tl(reg, src);
267 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
268 tcg_gen_andi_tl(reg, reg, 0x1);
271 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
273 tcg_gen_extu_i32_tl(reg, src);
274 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
275 tcg_gen_andi_tl(reg, reg, 0x1);
278 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
280 TCGv r_temp;
281 TCGv_i32 r_const;
282 int l1;
284 l1 = gen_new_label();
286 r_temp = tcg_temp_new();
287 tcg_gen_xor_tl(r_temp, src1, src2);
288 tcg_gen_not_tl(r_temp, r_temp);
289 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
290 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
291 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
292 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
293 r_const = tcg_const_i32(TT_TOVF);
294 gen_helper_raise_exception(r_const);
295 tcg_temp_free_i32(r_const);
296 gen_set_label(l1);
297 tcg_temp_free(r_temp);
300 static inline void gen_tag_tv(TCGv src1, TCGv src2)
302 int l1;
303 TCGv_i32 r_const;
305 l1 = gen_new_label();
306 tcg_gen_or_tl(cpu_tmp0, src1, src2);
307 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
308 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
309 r_const = tcg_const_i32(TT_TOVF);
310 gen_helper_raise_exception(r_const);
311 tcg_temp_free_i32(r_const);
312 gen_set_label(l1);
315 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
317 tcg_gen_mov_tl(cpu_cc_src, src1);
318 tcg_gen_movi_tl(cpu_cc_src2, src2);
319 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
320 tcg_gen_mov_tl(dst, cpu_cc_dst);
323 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
325 tcg_gen_mov_tl(cpu_cc_src, src1);
326 tcg_gen_mov_tl(cpu_cc_src2, src2);
327 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
328 tcg_gen_mov_tl(dst, cpu_cc_dst);
331 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
333 tcg_gen_mov_tl(cpu_cc_src, src1);
334 tcg_gen_movi_tl(cpu_cc_src2, src2);
335 gen_mov_reg_C(cpu_tmp0, cpu_psr);
336 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
337 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
338 tcg_gen_mov_tl(dst, cpu_cc_dst);
341 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
343 tcg_gen_mov_tl(cpu_cc_src, src1);
344 tcg_gen_mov_tl(cpu_cc_src2, src2);
345 gen_mov_reg_C(cpu_tmp0, cpu_psr);
346 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
347 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
348 tcg_gen_mov_tl(dst, cpu_cc_dst);
351 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
353 tcg_gen_mov_tl(cpu_cc_src, src1);
354 tcg_gen_mov_tl(cpu_cc_src2, src2);
355 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
356 tcg_gen_mov_tl(dst, cpu_cc_dst);
359 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
361 tcg_gen_mov_tl(cpu_cc_src, src1);
362 tcg_gen_mov_tl(cpu_cc_src2, src2);
363 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
364 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
366 tcg_gen_mov_tl(dst, cpu_cc_dst);
369 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
371 TCGv r_temp;
372 TCGv_i32 r_const;
373 int l1;
375 l1 = gen_new_label();
377 r_temp = tcg_temp_new();
378 tcg_gen_xor_tl(r_temp, src1, src2);
379 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
380 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
381 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
382 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
383 r_const = tcg_const_i32(TT_TOVF);
384 gen_helper_raise_exception(r_const);
385 tcg_temp_free_i32(r_const);
386 gen_set_label(l1);
387 tcg_temp_free(r_temp);
390 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
392 tcg_gen_mov_tl(cpu_cc_src, src1);
393 tcg_gen_movi_tl(cpu_cc_src2, src2);
394 if (src2 == 0) {
395 tcg_gen_mov_tl(cpu_cc_dst, src1);
396 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
397 dc->cc_op = CC_OP_LOGIC;
398 } else {
399 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
400 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
401 dc->cc_op = CC_OP_SUB;
403 tcg_gen_mov_tl(dst, cpu_cc_dst);
406 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
408 tcg_gen_mov_tl(cpu_cc_src, src1);
409 tcg_gen_mov_tl(cpu_cc_src2, src2);
410 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
411 tcg_gen_mov_tl(dst, cpu_cc_dst);
414 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
416 tcg_gen_mov_tl(cpu_cc_src, src1);
417 tcg_gen_movi_tl(cpu_cc_src2, src2);
418 gen_mov_reg_C(cpu_tmp0, cpu_psr);
419 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
420 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
421 tcg_gen_mov_tl(dst, cpu_cc_dst);
424 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
426 tcg_gen_mov_tl(cpu_cc_src, src1);
427 tcg_gen_mov_tl(cpu_cc_src2, src2);
428 gen_mov_reg_C(cpu_tmp0, cpu_psr);
429 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
430 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
431 tcg_gen_mov_tl(dst, cpu_cc_dst);
434 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
436 tcg_gen_mov_tl(cpu_cc_src, src1);
437 tcg_gen_mov_tl(cpu_cc_src2, src2);
438 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
439 tcg_gen_mov_tl(dst, cpu_cc_dst);
442 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
444 tcg_gen_mov_tl(cpu_cc_src, src1);
445 tcg_gen_mov_tl(cpu_cc_src2, src2);
446 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
447 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
449 tcg_gen_mov_tl(dst, cpu_cc_dst);
452 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
454 TCGv r_temp;
455 int l1;
457 l1 = gen_new_label();
458 r_temp = tcg_temp_new();
460 /* old op:
461 if (!(env->y & 1))
462 T1 = 0;
464 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
465 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
466 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
467 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
468 tcg_gen_movi_tl(cpu_cc_src2, 0);
469 gen_set_label(l1);
471 // b2 = T0 & 1;
472 // env->y = (b2 << 31) | (env->y >> 1);
473 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
474 tcg_gen_shli_tl(r_temp, r_temp, 31);
475 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
476 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
477 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
478 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
480 // b1 = N ^ V;
481 gen_mov_reg_N(cpu_tmp0, cpu_psr);
482 gen_mov_reg_V(r_temp, cpu_psr);
483 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
484 tcg_temp_free(r_temp);
486 // T0 = (b1 << 31) | (T0 >> 1);
487 // src1 = T0;
488 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
489 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
490 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
492 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
494 tcg_gen_mov_tl(dst, cpu_cc_dst);
497 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
499 TCGv_i64 r_temp, r_temp2;
501 r_temp = tcg_temp_new_i64();
502 r_temp2 = tcg_temp_new_i64();
504 tcg_gen_extu_tl_i64(r_temp, src2);
505 tcg_gen_extu_tl_i64(r_temp2, src1);
506 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
508 tcg_gen_shri_i64(r_temp, r_temp2, 32);
509 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
510 tcg_temp_free_i64(r_temp);
511 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
512 #ifdef TARGET_SPARC64
513 tcg_gen_mov_i64(dst, r_temp2);
514 #else
515 tcg_gen_trunc_i64_tl(dst, r_temp2);
516 #endif
517 tcg_temp_free_i64(r_temp2);
520 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
522 TCGv_i64 r_temp, r_temp2;
524 r_temp = tcg_temp_new_i64();
525 r_temp2 = tcg_temp_new_i64();
527 tcg_gen_ext_tl_i64(r_temp, src2);
528 tcg_gen_ext_tl_i64(r_temp2, src1);
529 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
531 tcg_gen_shri_i64(r_temp, r_temp2, 32);
532 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
533 tcg_temp_free_i64(r_temp);
534 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
535 #ifdef TARGET_SPARC64
536 tcg_gen_mov_i64(dst, r_temp2);
537 #else
538 tcg_gen_trunc_i64_tl(dst, r_temp2);
539 #endif
540 tcg_temp_free_i64(r_temp2);
543 #ifdef TARGET_SPARC64
544 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
546 TCGv_i32 r_const;
547 int l1;
549 l1 = gen_new_label();
550 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
551 r_const = tcg_const_i32(TT_DIV_ZERO);
552 gen_helper_raise_exception(r_const);
553 tcg_temp_free_i32(r_const);
554 gen_set_label(l1);
557 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
559 int l1, l2;
561 l1 = gen_new_label();
562 l2 = gen_new_label();
563 tcg_gen_mov_tl(cpu_cc_src, src1);
564 tcg_gen_mov_tl(cpu_cc_src2, src2);
565 gen_trap_ifdivzero_tl(cpu_cc_src2);
566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
567 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
568 tcg_gen_movi_i64(dst, INT64_MIN);
569 tcg_gen_br(l2);
570 gen_set_label(l1);
571 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
572 gen_set_label(l2);
574 #endif
576 // 1
577 static inline void gen_op_eval_ba(TCGv dst)
579 tcg_gen_movi_tl(dst, 1);
582 // Z
583 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
585 gen_mov_reg_Z(dst, src);
588 // Z | (N ^ V)
589 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
591 gen_mov_reg_N(cpu_tmp0, src);
592 gen_mov_reg_V(dst, src);
593 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
594 gen_mov_reg_Z(cpu_tmp0, src);
595 tcg_gen_or_tl(dst, dst, cpu_tmp0);
598 // N ^ V
599 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
601 gen_mov_reg_V(cpu_tmp0, src);
602 gen_mov_reg_N(dst, src);
603 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
606 // C | Z
607 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
609 gen_mov_reg_Z(cpu_tmp0, src);
610 gen_mov_reg_C(dst, src);
611 tcg_gen_or_tl(dst, dst, cpu_tmp0);
614 // C
615 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
617 gen_mov_reg_C(dst, src);
620 // V
621 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
623 gen_mov_reg_V(dst, src);
626 // 0
627 static inline void gen_op_eval_bn(TCGv dst)
629 tcg_gen_movi_tl(dst, 0);
632 // N
633 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
635 gen_mov_reg_N(dst, src);
638 // !Z
639 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
641 gen_mov_reg_Z(dst, src);
642 tcg_gen_xori_tl(dst, dst, 0x1);
645 // !(Z | (N ^ V))
646 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
648 gen_mov_reg_N(cpu_tmp0, src);
649 gen_mov_reg_V(dst, src);
650 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
651 gen_mov_reg_Z(cpu_tmp0, src);
652 tcg_gen_or_tl(dst, dst, cpu_tmp0);
653 tcg_gen_xori_tl(dst, dst, 0x1);
656 // !(N ^ V)
657 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
659 gen_mov_reg_V(cpu_tmp0, src);
660 gen_mov_reg_N(dst, src);
661 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
662 tcg_gen_xori_tl(dst, dst, 0x1);
665 // !(C | Z)
666 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
668 gen_mov_reg_Z(cpu_tmp0, src);
669 gen_mov_reg_C(dst, src);
670 tcg_gen_or_tl(dst, dst, cpu_tmp0);
671 tcg_gen_xori_tl(dst, dst, 0x1);
674 // !C
675 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
677 gen_mov_reg_C(dst, src);
678 tcg_gen_xori_tl(dst, dst, 0x1);
681 // !N
682 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
684 gen_mov_reg_N(dst, src);
685 tcg_gen_xori_tl(dst, dst, 0x1);
688 // !V
689 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
691 gen_mov_reg_V(dst, src);
692 tcg_gen_xori_tl(dst, dst, 0x1);
696 FPSR bit field FCC1 | FCC0:
700 3 unordered
702 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
703 unsigned int fcc_offset)
705 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
706 tcg_gen_andi_tl(reg, reg, 0x1);
709 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
710 unsigned int fcc_offset)
712 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
713 tcg_gen_andi_tl(reg, reg, 0x1);
716 // !0: FCC0 | FCC1
717 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
718 unsigned int fcc_offset)
720 gen_mov_reg_FCC0(dst, src, fcc_offset);
721 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
722 tcg_gen_or_tl(dst, dst, cpu_tmp0);
725 // 1 or 2: FCC0 ^ FCC1
726 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
727 unsigned int fcc_offset)
729 gen_mov_reg_FCC0(dst, src, fcc_offset);
730 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
731 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
734 // 1 or 3: FCC0
735 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
736 unsigned int fcc_offset)
738 gen_mov_reg_FCC0(dst, src, fcc_offset);
741 // 1: FCC0 & !FCC1
742 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
743 unsigned int fcc_offset)
745 gen_mov_reg_FCC0(dst, src, fcc_offset);
746 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
747 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
748 tcg_gen_and_tl(dst, dst, cpu_tmp0);
751 // 2 or 3: FCC1
752 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
753 unsigned int fcc_offset)
755 gen_mov_reg_FCC1(dst, src, fcc_offset);
758 // 2: !FCC0 & FCC1
759 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
760 unsigned int fcc_offset)
762 gen_mov_reg_FCC0(dst, src, fcc_offset);
763 tcg_gen_xori_tl(dst, dst, 0x1);
764 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
765 tcg_gen_and_tl(dst, dst, cpu_tmp0);
768 // 3: FCC0 & FCC1
769 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
770 unsigned int fcc_offset)
772 gen_mov_reg_FCC0(dst, src, fcc_offset);
773 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
774 tcg_gen_and_tl(dst, dst, cpu_tmp0);
777 // 0: !(FCC0 | FCC1)
778 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
779 unsigned int fcc_offset)
781 gen_mov_reg_FCC0(dst, src, fcc_offset);
782 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
783 tcg_gen_or_tl(dst, dst, cpu_tmp0);
784 tcg_gen_xori_tl(dst, dst, 0x1);
787 // 0 or 3: !(FCC0 ^ FCC1)
788 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
789 unsigned int fcc_offset)
791 gen_mov_reg_FCC0(dst, src, fcc_offset);
792 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
793 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
794 tcg_gen_xori_tl(dst, dst, 0x1);
797 // 0 or 2: !FCC0
798 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
799 unsigned int fcc_offset)
801 gen_mov_reg_FCC0(dst, src, fcc_offset);
802 tcg_gen_xori_tl(dst, dst, 0x1);
805 // !1: !(FCC0 & !FCC1)
806 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
807 unsigned int fcc_offset)
809 gen_mov_reg_FCC0(dst, src, fcc_offset);
810 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
811 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
812 tcg_gen_and_tl(dst, dst, cpu_tmp0);
813 tcg_gen_xori_tl(dst, dst, 0x1);
816 // 0 or 1: !FCC1
817 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
818 unsigned int fcc_offset)
820 gen_mov_reg_FCC1(dst, src, fcc_offset);
821 tcg_gen_xori_tl(dst, dst, 0x1);
824 // !2: !(!FCC0 & FCC1)
825 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
826 unsigned int fcc_offset)
828 gen_mov_reg_FCC0(dst, src, fcc_offset);
829 tcg_gen_xori_tl(dst, dst, 0x1);
830 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
831 tcg_gen_and_tl(dst, dst, cpu_tmp0);
832 tcg_gen_xori_tl(dst, dst, 0x1);
835 // !3: !(FCC0 & FCC1)
836 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
837 unsigned int fcc_offset)
839 gen_mov_reg_FCC0(dst, src, fcc_offset);
840 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
841 tcg_gen_and_tl(dst, dst, cpu_tmp0);
842 tcg_gen_xori_tl(dst, dst, 0x1);
845 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
846 target_ulong pc2, TCGv r_cond)
848 int l1;
850 l1 = gen_new_label();
852 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
854 gen_goto_tb(dc, 0, pc1, pc1 + 4);
856 gen_set_label(l1);
857 gen_goto_tb(dc, 1, pc2, pc2 + 4);
860 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
861 target_ulong pc2, TCGv r_cond)
863 int l1;
865 l1 = gen_new_label();
867 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
869 gen_goto_tb(dc, 0, pc2, pc1);
871 gen_set_label(l1);
872 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
875 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
876 TCGv r_cond)
878 int l1, l2;
880 l1 = gen_new_label();
881 l2 = gen_new_label();
883 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
885 tcg_gen_movi_tl(cpu_npc, npc1);
886 tcg_gen_br(l2);
888 gen_set_label(l1);
889 tcg_gen_movi_tl(cpu_npc, npc2);
890 gen_set_label(l2);
893 /* call this function before using the condition register as it may
894 have been set for a jump */
895 static inline void flush_cond(DisasContext *dc, TCGv cond)
897 if (dc->npc == JUMP_PC) {
898 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
899 dc->npc = DYNAMIC_PC;
903 static inline void save_npc(DisasContext *dc, TCGv cond)
905 if (dc->npc == JUMP_PC) {
906 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
907 dc->npc = DYNAMIC_PC;
908 } else if (dc->npc != DYNAMIC_PC) {
909 tcg_gen_movi_tl(cpu_npc, dc->npc);
913 static inline void save_state(DisasContext *dc, TCGv cond)
915 tcg_gen_movi_tl(cpu_pc, dc->pc);
916 save_npc(dc, cond);
919 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
921 if (dc->npc == JUMP_PC) {
922 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
923 tcg_gen_mov_tl(cpu_pc, cpu_npc);
924 dc->pc = DYNAMIC_PC;
925 } else if (dc->npc == DYNAMIC_PC) {
926 tcg_gen_mov_tl(cpu_pc, cpu_npc);
927 dc->pc = DYNAMIC_PC;
928 } else {
929 dc->pc = dc->npc;
933 static inline void gen_op_next_insn(void)
935 tcg_gen_mov_tl(cpu_pc, cpu_npc);
936 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
939 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
940 DisasContext *dc)
942 TCGv_i32 r_src;
944 #ifdef TARGET_SPARC64
945 if (cc)
946 r_src = cpu_xcc;
947 else
948 r_src = cpu_psr;
949 #else
950 r_src = cpu_psr;
951 #endif
952 switch (dc->cc_op) {
953 case CC_OP_FLAGS:
954 break;
955 default:
956 gen_helper_compute_psr();
957 dc->cc_op = CC_OP_FLAGS;
958 break;
960 switch (cond) {
961 case 0x0:
962 gen_op_eval_bn(r_dst);
963 break;
964 case 0x1:
965 gen_op_eval_be(r_dst, r_src);
966 break;
967 case 0x2:
968 gen_op_eval_ble(r_dst, r_src);
969 break;
970 case 0x3:
971 gen_op_eval_bl(r_dst, r_src);
972 break;
973 case 0x4:
974 gen_op_eval_bleu(r_dst, r_src);
975 break;
976 case 0x5:
977 gen_op_eval_bcs(r_dst, r_src);
978 break;
979 case 0x6:
980 gen_op_eval_bneg(r_dst, r_src);
981 break;
982 case 0x7:
983 gen_op_eval_bvs(r_dst, r_src);
984 break;
985 case 0x8:
986 gen_op_eval_ba(r_dst);
987 break;
988 case 0x9:
989 gen_op_eval_bne(r_dst, r_src);
990 break;
991 case 0xa:
992 gen_op_eval_bg(r_dst, r_src);
993 break;
994 case 0xb:
995 gen_op_eval_bge(r_dst, r_src);
996 break;
997 case 0xc:
998 gen_op_eval_bgu(r_dst, r_src);
999 break;
1000 case 0xd:
1001 gen_op_eval_bcc(r_dst, r_src);
1002 break;
1003 case 0xe:
1004 gen_op_eval_bpos(r_dst, r_src);
1005 break;
1006 case 0xf:
1007 gen_op_eval_bvc(r_dst, r_src);
1008 break;
1012 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1014 unsigned int offset;
1016 switch (cc) {
1017 default:
1018 case 0x0:
1019 offset = 0;
1020 break;
1021 case 0x1:
1022 offset = 32 - 10;
1023 break;
1024 case 0x2:
1025 offset = 34 - 10;
1026 break;
1027 case 0x3:
1028 offset = 36 - 10;
1029 break;
1032 switch (cond) {
1033 case 0x0:
1034 gen_op_eval_bn(r_dst);
1035 break;
1036 case 0x1:
1037 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1038 break;
1039 case 0x2:
1040 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1041 break;
1042 case 0x3:
1043 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1044 break;
1045 case 0x4:
1046 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1047 break;
1048 case 0x5:
1049 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1050 break;
1051 case 0x6:
1052 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1053 break;
1054 case 0x7:
1055 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1056 break;
1057 case 0x8:
1058 gen_op_eval_ba(r_dst);
1059 break;
1060 case 0x9:
1061 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1062 break;
1063 case 0xa:
1064 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1065 break;
1066 case 0xb:
1067 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1068 break;
1069 case 0xc:
1070 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1071 break;
1072 case 0xd:
1073 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1074 break;
1075 case 0xe:
1076 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1077 break;
1078 case 0xf:
1079 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1080 break;
1084 #ifdef TARGET_SPARC64
1085 // Inverted logic
1086 static const int gen_tcg_cond_reg[8] = {
1088 TCG_COND_NE,
1089 TCG_COND_GT,
1090 TCG_COND_GE,
1092 TCG_COND_EQ,
1093 TCG_COND_LE,
1094 TCG_COND_LT,
1097 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1099 int l1;
1101 l1 = gen_new_label();
1102 tcg_gen_movi_tl(r_dst, 0);
1103 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1104 tcg_gen_movi_tl(r_dst, 1);
1105 gen_set_label(l1);
1107 #endif
1109 /* XXX: potentially incorrect if dynamic npc */
1110 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1111 TCGv r_cond)
1113 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1114 target_ulong target = dc->pc + offset;
1116 if (cond == 0x0) {
1117 /* unconditional not taken */
1118 if (a) {
1119 dc->pc = dc->npc + 4;
1120 dc->npc = dc->pc + 4;
1121 } else {
1122 dc->pc = dc->npc;
1123 dc->npc = dc->pc + 4;
1125 } else if (cond == 0x8) {
1126 /* unconditional taken */
1127 if (a) {
1128 dc->pc = target;
1129 dc->npc = dc->pc + 4;
1130 } else {
1131 dc->pc = dc->npc;
1132 dc->npc = target;
1134 } else {
1135 flush_cond(dc, r_cond);
1136 gen_cond(r_cond, cc, cond, dc);
1137 if (a) {
1138 gen_branch_a(dc, target, dc->npc, r_cond);
1139 dc->is_br = 1;
1140 } else {
1141 dc->pc = dc->npc;
1142 dc->jump_pc[0] = target;
1143 dc->jump_pc[1] = dc->npc + 4;
1144 dc->npc = JUMP_PC;
1149 /* XXX: potentially incorrect if dynamic npc */
1150 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1151 TCGv r_cond)
1153 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1154 target_ulong target = dc->pc + offset;
1156 if (cond == 0x0) {
1157 /* unconditional not taken */
1158 if (a) {
1159 dc->pc = dc->npc + 4;
1160 dc->npc = dc->pc + 4;
1161 } else {
1162 dc->pc = dc->npc;
1163 dc->npc = dc->pc + 4;
1165 } else if (cond == 0x8) {
1166 /* unconditional taken */
1167 if (a) {
1168 dc->pc = target;
1169 dc->npc = dc->pc + 4;
1170 } else {
1171 dc->pc = dc->npc;
1172 dc->npc = target;
1174 } else {
1175 flush_cond(dc, r_cond);
1176 gen_fcond(r_cond, cc, cond);
1177 if (a) {
1178 gen_branch_a(dc, target, dc->npc, r_cond);
1179 dc->is_br = 1;
1180 } else {
1181 dc->pc = dc->npc;
1182 dc->jump_pc[0] = target;
1183 dc->jump_pc[1] = dc->npc + 4;
1184 dc->npc = JUMP_PC;
1189 #ifdef TARGET_SPARC64
1190 /* XXX: potentially incorrect if dynamic npc */
1191 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1192 TCGv r_cond, TCGv r_reg)
1194 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1195 target_ulong target = dc->pc + offset;
1197 flush_cond(dc, r_cond);
1198 gen_cond_reg(r_cond, cond, r_reg);
1199 if (a) {
1200 gen_branch_a(dc, target, dc->npc, r_cond);
1201 dc->is_br = 1;
1202 } else {
1203 dc->pc = dc->npc;
1204 dc->jump_pc[0] = target;
1205 dc->jump_pc[1] = dc->npc + 4;
1206 dc->npc = JUMP_PC;
1210 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1212 switch (fccno) {
1213 case 0:
1214 gen_helper_fcmps(r_rs1, r_rs2);
1215 break;
1216 case 1:
1217 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1218 break;
1219 case 2:
1220 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1221 break;
1222 case 3:
1223 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1224 break;
1228 static inline void gen_op_fcmpd(int fccno)
1230 switch (fccno) {
1231 case 0:
1232 gen_helper_fcmpd();
1233 break;
1234 case 1:
1235 gen_helper_fcmpd_fcc1();
1236 break;
1237 case 2:
1238 gen_helper_fcmpd_fcc2();
1239 break;
1240 case 3:
1241 gen_helper_fcmpd_fcc3();
1242 break;
1246 static inline void gen_op_fcmpq(int fccno)
1248 switch (fccno) {
1249 case 0:
1250 gen_helper_fcmpq();
1251 break;
1252 case 1:
1253 gen_helper_fcmpq_fcc1();
1254 break;
1255 case 2:
1256 gen_helper_fcmpq_fcc2();
1257 break;
1258 case 3:
1259 gen_helper_fcmpq_fcc3();
1260 break;
1264 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1266 switch (fccno) {
1267 case 0:
1268 gen_helper_fcmpes(r_rs1, r_rs2);
1269 break;
1270 case 1:
1271 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1272 break;
1273 case 2:
1274 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1275 break;
1276 case 3:
1277 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1278 break;
1282 static inline void gen_op_fcmped(int fccno)
1284 switch (fccno) {
1285 case 0:
1286 gen_helper_fcmped();
1287 break;
1288 case 1:
1289 gen_helper_fcmped_fcc1();
1290 break;
1291 case 2:
1292 gen_helper_fcmped_fcc2();
1293 break;
1294 case 3:
1295 gen_helper_fcmped_fcc3();
1296 break;
1300 static inline void gen_op_fcmpeq(int fccno)
1302 switch (fccno) {
1303 case 0:
1304 gen_helper_fcmpeq();
1305 break;
1306 case 1:
1307 gen_helper_fcmpeq_fcc1();
1308 break;
1309 case 2:
1310 gen_helper_fcmpeq_fcc2();
1311 break;
1312 case 3:
1313 gen_helper_fcmpeq_fcc3();
1314 break;
1318 #else
1320 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1322 gen_helper_fcmps(r_rs1, r_rs2);
1325 static inline void gen_op_fcmpd(int fccno)
1327 gen_helper_fcmpd();
1330 static inline void gen_op_fcmpq(int fccno)
1332 gen_helper_fcmpq();
1335 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1337 gen_helper_fcmpes(r_rs1, r_rs2);
1340 static inline void gen_op_fcmped(int fccno)
1342 gen_helper_fcmped();
1345 static inline void gen_op_fcmpeq(int fccno)
1347 gen_helper_fcmpeq();
1349 #endif
1351 static inline void gen_op_fpexception_im(int fsr_flags)
1353 TCGv_i32 r_const;
1355 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1356 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1357 r_const = tcg_const_i32(TT_FP_EXCP);
1358 gen_helper_raise_exception(r_const);
1359 tcg_temp_free_i32(r_const);
1362 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1364 #if !defined(CONFIG_USER_ONLY)
1365 if (!dc->fpu_enabled) {
1366 TCGv_i32 r_const;
1368 save_state(dc, r_cond);
1369 r_const = tcg_const_i32(TT_NFPU_INSN);
1370 gen_helper_raise_exception(r_const);
1371 tcg_temp_free_i32(r_const);
1372 dc->is_br = 1;
1373 return 1;
1375 #endif
1376 return 0;
1379 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1381 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1384 static inline void gen_clear_float_exceptions(void)
1386 gen_helper_clear_float_exceptions();
1389 /* asi moves */
1390 #ifdef TARGET_SPARC64
1391 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1393 int asi;
1394 TCGv_i32 r_asi;
1396 if (IS_IMM) {
1397 r_asi = tcg_temp_new_i32();
1398 tcg_gen_mov_i32(r_asi, cpu_asi);
1399 } else {
1400 asi = GET_FIELD(insn, 19, 26);
1401 r_asi = tcg_const_i32(asi);
1403 return r_asi;
1406 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1407 int sign)
1409 TCGv_i32 r_asi, r_size, r_sign;
1411 r_asi = gen_get_asi(insn, addr);
1412 r_size = tcg_const_i32(size);
1413 r_sign = tcg_const_i32(sign);
1414 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1415 tcg_temp_free_i32(r_sign);
1416 tcg_temp_free_i32(r_size);
1417 tcg_temp_free_i32(r_asi);
1420 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1422 TCGv_i32 r_asi, r_size;
1424 r_asi = gen_get_asi(insn, addr);
1425 r_size = tcg_const_i32(size);
1426 gen_helper_st_asi(addr, src, r_asi, r_size);
1427 tcg_temp_free_i32(r_size);
1428 tcg_temp_free_i32(r_asi);
1431 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1433 TCGv_i32 r_asi, r_size, r_rd;
1435 r_asi = gen_get_asi(insn, addr);
1436 r_size = tcg_const_i32(size);
1437 r_rd = tcg_const_i32(rd);
1438 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1439 tcg_temp_free_i32(r_rd);
1440 tcg_temp_free_i32(r_size);
1441 tcg_temp_free_i32(r_asi);
1444 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1446 TCGv_i32 r_asi, r_size, r_rd;
1448 r_asi = gen_get_asi(insn, addr);
1449 r_size = tcg_const_i32(size);
1450 r_rd = tcg_const_i32(rd);
1451 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1452 tcg_temp_free_i32(r_rd);
1453 tcg_temp_free_i32(r_size);
1454 tcg_temp_free_i32(r_asi);
1457 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1459 TCGv_i32 r_asi, r_size, r_sign;
1461 r_asi = gen_get_asi(insn, addr);
1462 r_size = tcg_const_i32(4);
1463 r_sign = tcg_const_i32(0);
1464 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1465 tcg_temp_free_i32(r_sign);
1466 gen_helper_st_asi(addr, dst, r_asi, r_size);
1467 tcg_temp_free_i32(r_size);
1468 tcg_temp_free_i32(r_asi);
1469 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1472 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1474 TCGv_i32 r_asi, r_rd;
1476 r_asi = gen_get_asi(insn, addr);
1477 r_rd = tcg_const_i32(rd);
1478 gen_helper_ldda_asi(addr, r_asi, r_rd);
1479 tcg_temp_free_i32(r_rd);
1480 tcg_temp_free_i32(r_asi);
1483 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1485 TCGv_i32 r_asi, r_size;
1487 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1488 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1489 r_asi = gen_get_asi(insn, addr);
1490 r_size = tcg_const_i32(8);
1491 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1492 tcg_temp_free_i32(r_size);
1493 tcg_temp_free_i32(r_asi);
1496 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1497 int rd)
1499 TCGv r_val1;
1500 TCGv_i32 r_asi;
1502 r_val1 = tcg_temp_new();
1503 gen_movl_reg_TN(rd, r_val1);
1504 r_asi = gen_get_asi(insn, addr);
1505 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1506 tcg_temp_free_i32(r_asi);
1507 tcg_temp_free(r_val1);
1510 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1511 int rd)
1513 TCGv_i32 r_asi;
1515 gen_movl_reg_TN(rd, cpu_tmp64);
1516 r_asi = gen_get_asi(insn, addr);
1517 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1518 tcg_temp_free_i32(r_asi);
1521 #elif !defined(CONFIG_USER_ONLY)
1523 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1524 int sign)
1526 TCGv_i32 r_asi, r_size, r_sign;
1528 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1529 r_size = tcg_const_i32(size);
1530 r_sign = tcg_const_i32(sign);
1531 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1532 tcg_temp_free(r_sign);
1533 tcg_temp_free(r_size);
1534 tcg_temp_free(r_asi);
1535 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1538 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1540 TCGv_i32 r_asi, r_size;
1542 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1543 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1544 r_size = tcg_const_i32(size);
1545 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1546 tcg_temp_free(r_size);
1547 tcg_temp_free(r_asi);
1550 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1552 TCGv_i32 r_asi, r_size, r_sign;
1553 TCGv_i64 r_val;
1555 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1556 r_size = tcg_const_i32(4);
1557 r_sign = tcg_const_i32(0);
1558 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1559 tcg_temp_free(r_sign);
1560 r_val = tcg_temp_new_i64();
1561 tcg_gen_extu_tl_i64(r_val, dst);
1562 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1563 tcg_temp_free_i64(r_val);
1564 tcg_temp_free(r_size);
1565 tcg_temp_free(r_asi);
1566 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1569 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1571 TCGv_i32 r_asi, r_size, r_sign;
1573 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1574 r_size = tcg_const_i32(8);
1575 r_sign = tcg_const_i32(0);
1576 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1577 tcg_temp_free(r_sign);
1578 tcg_temp_free(r_size);
1579 tcg_temp_free(r_asi);
1580 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1581 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1582 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1583 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1584 gen_movl_TN_reg(rd, hi);
1587 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1589 TCGv_i32 r_asi, r_size;
1591 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1592 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1593 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1594 r_size = tcg_const_i32(8);
1595 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1596 tcg_temp_free(r_size);
1597 tcg_temp_free(r_asi);
1599 #endif
1601 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1602 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1604 TCGv_i64 r_val;
1605 TCGv_i32 r_asi, r_size;
1607 gen_ld_asi(dst, addr, insn, 1, 0);
1609 r_val = tcg_const_i64(0xffULL);
1610 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1611 r_size = tcg_const_i32(1);
1612 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1613 tcg_temp_free_i32(r_size);
1614 tcg_temp_free_i32(r_asi);
1615 tcg_temp_free_i64(r_val);
1617 #endif
1619 static inline TCGv get_src1(unsigned int insn, TCGv def)
1621 TCGv r_rs1 = def;
1622 unsigned int rs1;
1624 rs1 = GET_FIELD(insn, 13, 17);
1625 if (rs1 == 0)
1626 r_rs1 = tcg_const_tl(0); // XXX how to free?
1627 else if (rs1 < 8)
1628 r_rs1 = cpu_gregs[rs1];
1629 else
1630 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1631 return r_rs1;
1634 static inline TCGv get_src2(unsigned int insn, TCGv def)
1636 TCGv r_rs2 = def;
1638 if (IS_IMM) { /* immediate */
1639 target_long simm;
1641 simm = GET_FIELDs(insn, 19, 31);
1642 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1643 } else { /* register */
1644 unsigned int rs2;
1646 rs2 = GET_FIELD(insn, 27, 31);
1647 if (rs2 == 0)
1648 r_rs2 = tcg_const_tl(0); // XXX how to free?
1649 else if (rs2 < 8)
1650 r_rs2 = cpu_gregs[rs2];
1651 else
1652 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1654 return r_rs2;
1657 #define CHECK_IU_FEATURE(dc, FEATURE) \
1658 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1659 goto illegal_insn;
1660 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1661 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1662 goto nfpu_insn;
1664 /* before an instruction, dc->pc must be static */
1665 static void disas_sparc_insn(DisasContext * dc)
1667 unsigned int insn, opc, rs1, rs2, rd;
1668 target_long simm;
1670 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1671 tcg_gen_debug_insn_start(dc->pc);
1672 insn = ldl_code(dc->pc);
1673 opc = GET_FIELD(insn, 0, 1);
1675 rd = GET_FIELD(insn, 2, 6);
1677 cpu_src1 = tcg_temp_new(); // const
1678 cpu_src2 = tcg_temp_new(); // const
1680 switch (opc) {
1681 case 0: /* branches/sethi */
1683 unsigned int xop = GET_FIELD(insn, 7, 9);
1684 int32_t target;
1685 switch (xop) {
1686 #ifdef TARGET_SPARC64
1687 case 0x1: /* V9 BPcc */
1689 int cc;
1691 target = GET_FIELD_SP(insn, 0, 18);
1692 target = sign_extend(target, 18);
1693 target <<= 2;
1694 cc = GET_FIELD_SP(insn, 20, 21);
1695 if (cc == 0)
1696 do_branch(dc, target, insn, 0, cpu_cond);
1697 else if (cc == 2)
1698 do_branch(dc, target, insn, 1, cpu_cond);
1699 else
1700 goto illegal_insn;
1701 goto jmp_insn;
1703 case 0x3: /* V9 BPr */
1705 target = GET_FIELD_SP(insn, 0, 13) |
1706 (GET_FIELD_SP(insn, 20, 21) << 14);
1707 target = sign_extend(target, 16);
1708 target <<= 2;
1709 cpu_src1 = get_src1(insn, cpu_src1);
1710 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1711 goto jmp_insn;
1713 case 0x5: /* V9 FBPcc */
1715 int cc = GET_FIELD_SP(insn, 20, 21);
1716 if (gen_trap_ifnofpu(dc, cpu_cond))
1717 goto jmp_insn;
1718 target = GET_FIELD_SP(insn, 0, 18);
1719 target = sign_extend(target, 19);
1720 target <<= 2;
1721 do_fbranch(dc, target, insn, cc, cpu_cond);
1722 goto jmp_insn;
1724 #else
1725 case 0x7: /* CBN+x */
1727 goto ncp_insn;
1729 #endif
1730 case 0x2: /* BN+x */
1732 target = GET_FIELD(insn, 10, 31);
1733 target = sign_extend(target, 22);
1734 target <<= 2;
1735 do_branch(dc, target, insn, 0, cpu_cond);
1736 goto jmp_insn;
1738 case 0x6: /* FBN+x */
1740 if (gen_trap_ifnofpu(dc, cpu_cond))
1741 goto jmp_insn;
1742 target = GET_FIELD(insn, 10, 31);
1743 target = sign_extend(target, 22);
1744 target <<= 2;
1745 do_fbranch(dc, target, insn, 0, cpu_cond);
1746 goto jmp_insn;
1748 case 0x4: /* SETHI */
1749 if (rd) { // nop
1750 uint32_t value = GET_FIELD(insn, 10, 31);
1751 TCGv r_const;
1753 r_const = tcg_const_tl(value << 10);
1754 gen_movl_TN_reg(rd, r_const);
1755 tcg_temp_free(r_const);
1757 break;
1758 case 0x0: /* UNIMPL */
1759 default:
1760 goto illegal_insn;
1762 break;
1764 break;
1765 case 1: /*CALL*/
1767 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1768 TCGv r_const;
1770 r_const = tcg_const_tl(dc->pc);
1771 gen_movl_TN_reg(15, r_const);
1772 tcg_temp_free(r_const);
1773 target += dc->pc;
1774 gen_mov_pc_npc(dc, cpu_cond);
1775 dc->npc = target;
1777 goto jmp_insn;
1778 case 2: /* FPU & Logical Operations */
1780 unsigned int xop = GET_FIELD(insn, 7, 12);
1781 if (xop == 0x3a) { /* generate trap */
1782 int cond;
1784 cpu_src1 = get_src1(insn, cpu_src1);
1785 if (IS_IMM) {
1786 rs2 = GET_FIELD(insn, 25, 31);
1787 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1788 } else {
1789 rs2 = GET_FIELD(insn, 27, 31);
1790 if (rs2 != 0) {
1791 gen_movl_reg_TN(rs2, cpu_src2);
1792 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1793 } else
1794 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1796 cond = GET_FIELD(insn, 3, 6);
1797 if (cond == 0x8) {
1798 save_state(dc, cpu_cond);
1799 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1800 supervisor(dc))
1801 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1802 else
1803 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1804 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1805 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1806 gen_helper_raise_exception(cpu_tmp32);
1807 } else if (cond != 0) {
1808 TCGv r_cond = tcg_temp_new();
1809 int l1;
1810 #ifdef TARGET_SPARC64
1811 /* V9 icc/xcc */
1812 int cc = GET_FIELD_SP(insn, 11, 12);
1814 save_state(dc, cpu_cond);
1815 if (cc == 0)
1816 gen_cond(r_cond, 0, cond, dc);
1817 else if (cc == 2)
1818 gen_cond(r_cond, 1, cond, dc);
1819 else
1820 goto illegal_insn;
1821 #else
1822 save_state(dc, cpu_cond);
1823 gen_cond(r_cond, 0, cond, dc);
1824 #endif
1825 l1 = gen_new_label();
1826 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1828 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1829 supervisor(dc))
1830 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1831 else
1832 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1833 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1834 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1835 gen_helper_raise_exception(cpu_tmp32);
1837 gen_set_label(l1);
1838 tcg_temp_free(r_cond);
1840 gen_op_next_insn();
1841 tcg_gen_exit_tb(0);
1842 dc->is_br = 1;
1843 goto jmp_insn;
1844 } else if (xop == 0x28) {
1845 rs1 = GET_FIELD(insn, 13, 17);
1846 switch(rs1) {
1847 case 0: /* rdy */
1848 #ifndef TARGET_SPARC64
1849 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1850 manual, rdy on the microSPARC
1851 II */
1852 case 0x0f: /* stbar in the SPARCv8 manual,
1853 rdy on the microSPARC II */
1854 case 0x10 ... 0x1f: /* implementation-dependent in the
1855 SPARCv8 manual, rdy on the
1856 microSPARC II */
1857 #endif
1858 gen_movl_TN_reg(rd, cpu_y);
1859 break;
1860 #ifdef TARGET_SPARC64
1861 case 0x2: /* V9 rdccr */
1862 gen_helper_compute_psr();
1863 gen_helper_rdccr(cpu_dst);
1864 gen_movl_TN_reg(rd, cpu_dst);
1865 break;
1866 case 0x3: /* V9 rdasi */
1867 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1868 gen_movl_TN_reg(rd, cpu_dst);
1869 break;
1870 case 0x4: /* V9 rdtick */
1872 TCGv_ptr r_tickptr;
1874 r_tickptr = tcg_temp_new_ptr();
1875 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1876 offsetof(CPUState, tick));
1877 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1878 tcg_temp_free_ptr(r_tickptr);
1879 gen_movl_TN_reg(rd, cpu_dst);
1881 break;
1882 case 0x5: /* V9 rdpc */
1884 TCGv r_const;
1886 r_const = tcg_const_tl(dc->pc);
1887 gen_movl_TN_reg(rd, r_const);
1888 tcg_temp_free(r_const);
1890 break;
1891 case 0x6: /* V9 rdfprs */
1892 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1893 gen_movl_TN_reg(rd, cpu_dst);
1894 break;
1895 case 0xf: /* V9 membar */
1896 break; /* no effect */
1897 case 0x13: /* Graphics Status */
1898 if (gen_trap_ifnofpu(dc, cpu_cond))
1899 goto jmp_insn;
1900 gen_movl_TN_reg(rd, cpu_gsr);
1901 break;
1902 case 0x16: /* Softint */
1903 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1904 gen_movl_TN_reg(rd, cpu_dst);
1905 break;
1906 case 0x17: /* Tick compare */
1907 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1908 break;
1909 case 0x18: /* System tick */
1911 TCGv_ptr r_tickptr;
1913 r_tickptr = tcg_temp_new_ptr();
1914 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1915 offsetof(CPUState, stick));
1916 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1917 tcg_temp_free_ptr(r_tickptr);
1918 gen_movl_TN_reg(rd, cpu_dst);
1920 break;
1921 case 0x19: /* System tick compare */
1922 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1923 break;
1924 case 0x10: /* Performance Control */
1925 case 0x11: /* Performance Instrumentation Counter */
1926 case 0x12: /* Dispatch Control */
1927 case 0x14: /* Softint set, WO */
1928 case 0x15: /* Softint clear, WO */
1929 #endif
1930 default:
1931 goto illegal_insn;
1933 #if !defined(CONFIG_USER_ONLY)
1934 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1935 #ifndef TARGET_SPARC64
1936 if (!supervisor(dc))
1937 goto priv_insn;
1938 gen_helper_compute_psr();
1939 dc->cc_op = CC_OP_FLAGS;
1940 gen_helper_rdpsr(cpu_dst);
1941 #else
1942 CHECK_IU_FEATURE(dc, HYPV);
1943 if (!hypervisor(dc))
1944 goto priv_insn;
1945 rs1 = GET_FIELD(insn, 13, 17);
1946 switch (rs1) {
1947 case 0: // hpstate
1948 // gen_op_rdhpstate();
1949 break;
1950 case 1: // htstate
1951 // gen_op_rdhtstate();
1952 break;
1953 case 3: // hintp
1954 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1955 break;
1956 case 5: // htba
1957 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1958 break;
1959 case 6: // hver
1960 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1961 break;
1962 case 31: // hstick_cmpr
1963 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1964 break;
1965 default:
1966 goto illegal_insn;
1968 #endif
1969 gen_movl_TN_reg(rd, cpu_dst);
1970 break;
1971 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1972 if (!supervisor(dc))
1973 goto priv_insn;
1974 #ifdef TARGET_SPARC64
1975 rs1 = GET_FIELD(insn, 13, 17);
1976 switch (rs1) {
1977 case 0: // tpc
1979 TCGv_ptr r_tsptr;
1981 r_tsptr = tcg_temp_new_ptr();
1982 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1983 offsetof(CPUState, tsptr));
1984 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
1985 offsetof(trap_state, tpc));
1986 tcg_temp_free_ptr(r_tsptr);
1988 break;
1989 case 1: // tnpc
1991 TCGv_ptr r_tsptr;
1993 r_tsptr = tcg_temp_new_ptr();
1994 tcg_gen_ld_ptr(r_tsptr, cpu_env,
1995 offsetof(CPUState, tsptr));
1996 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
1997 offsetof(trap_state, tnpc));
1998 tcg_temp_free_ptr(r_tsptr);
2000 break;
2001 case 2: // tstate
2003 TCGv_ptr r_tsptr;
2005 r_tsptr = tcg_temp_new_ptr();
2006 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2007 offsetof(CPUState, tsptr));
2008 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2009 offsetof(trap_state, tstate));
2010 tcg_temp_free_ptr(r_tsptr);
2012 break;
2013 case 3: // tt
2015 TCGv_ptr r_tsptr;
2017 r_tsptr = tcg_temp_new_ptr();
2018 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2019 offsetof(CPUState, tsptr));
2020 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2021 offsetof(trap_state, tt));
2022 tcg_temp_free_ptr(r_tsptr);
2023 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2025 break;
2026 case 4: // tick
2028 TCGv_ptr r_tickptr;
2030 r_tickptr = tcg_temp_new_ptr();
2031 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2032 offsetof(CPUState, tick));
2033 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2034 gen_movl_TN_reg(rd, cpu_tmp0);
2035 tcg_temp_free_ptr(r_tickptr);
2037 break;
2038 case 5: // tba
2039 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2040 break;
2041 case 6: // pstate
2042 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2043 offsetof(CPUSPARCState, pstate));
2044 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2045 break;
2046 case 7: // tl
2047 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2048 offsetof(CPUSPARCState, tl));
2049 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2050 break;
2051 case 8: // pil
2052 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2053 offsetof(CPUSPARCState, psrpil));
2054 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2055 break;
2056 case 9: // cwp
2057 gen_helper_rdcwp(cpu_tmp0);
2058 break;
2059 case 10: // cansave
2060 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2061 offsetof(CPUSPARCState, cansave));
2062 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2063 break;
2064 case 11: // canrestore
2065 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2066 offsetof(CPUSPARCState, canrestore));
2067 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2068 break;
2069 case 12: // cleanwin
2070 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2071 offsetof(CPUSPARCState, cleanwin));
2072 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2073 break;
2074 case 13: // otherwin
2075 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2076 offsetof(CPUSPARCState, otherwin));
2077 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2078 break;
2079 case 14: // wstate
2080 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2081 offsetof(CPUSPARCState, wstate));
2082 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2083 break;
2084 case 16: // UA2005 gl
2085 CHECK_IU_FEATURE(dc, GL);
2086 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2087 offsetof(CPUSPARCState, gl));
2088 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2089 break;
2090 case 26: // UA2005 strand status
2091 CHECK_IU_FEATURE(dc, HYPV);
2092 if (!hypervisor(dc))
2093 goto priv_insn;
2094 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2095 break;
2096 case 31: // ver
2097 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2098 break;
2099 case 15: // fq
2100 default:
2101 goto illegal_insn;
2103 #else
2104 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2105 #endif
2106 gen_movl_TN_reg(rd, cpu_tmp0);
2107 break;
2108 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2109 #ifdef TARGET_SPARC64
2110 save_state(dc, cpu_cond);
2111 gen_helper_flushw();
2112 #else
2113 if (!supervisor(dc))
2114 goto priv_insn;
2115 gen_movl_TN_reg(rd, cpu_tbr);
2116 #endif
2117 break;
2118 #endif
2119 } else if (xop == 0x34) { /* FPU Operations */
2120 if (gen_trap_ifnofpu(dc, cpu_cond))
2121 goto jmp_insn;
2122 gen_op_clear_ieee_excp_and_FTT();
2123 rs1 = GET_FIELD(insn, 13, 17);
2124 rs2 = GET_FIELD(insn, 27, 31);
2125 xop = GET_FIELD(insn, 18, 26);
2126 switch (xop) {
2127 case 0x1: /* fmovs */
2128 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2129 break;
2130 case 0x5: /* fnegs */
2131 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2132 break;
2133 case 0x9: /* fabss */
2134 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2135 break;
2136 case 0x29: /* fsqrts */
2137 CHECK_FPU_FEATURE(dc, FSQRT);
2138 gen_clear_float_exceptions();
2139 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2140 gen_helper_check_ieee_exceptions();
2141 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2142 break;
2143 case 0x2a: /* fsqrtd */
2144 CHECK_FPU_FEATURE(dc, FSQRT);
2145 gen_op_load_fpr_DT1(DFPREG(rs2));
2146 gen_clear_float_exceptions();
2147 gen_helper_fsqrtd();
2148 gen_helper_check_ieee_exceptions();
2149 gen_op_store_DT0_fpr(DFPREG(rd));
2150 break;
2151 case 0x2b: /* fsqrtq */
2152 CHECK_FPU_FEATURE(dc, FLOAT128);
2153 gen_op_load_fpr_QT1(QFPREG(rs2));
2154 gen_clear_float_exceptions();
2155 gen_helper_fsqrtq();
2156 gen_helper_check_ieee_exceptions();
2157 gen_op_store_QT0_fpr(QFPREG(rd));
2158 break;
2159 case 0x41: /* fadds */
2160 gen_clear_float_exceptions();
2161 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2162 gen_helper_check_ieee_exceptions();
2163 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2164 break;
2165 case 0x42: /* faddd */
2166 gen_op_load_fpr_DT0(DFPREG(rs1));
2167 gen_op_load_fpr_DT1(DFPREG(rs2));
2168 gen_clear_float_exceptions();
2169 gen_helper_faddd();
2170 gen_helper_check_ieee_exceptions();
2171 gen_op_store_DT0_fpr(DFPREG(rd));
2172 break;
2173 case 0x43: /* faddq */
2174 CHECK_FPU_FEATURE(dc, FLOAT128);
2175 gen_op_load_fpr_QT0(QFPREG(rs1));
2176 gen_op_load_fpr_QT1(QFPREG(rs2));
2177 gen_clear_float_exceptions();
2178 gen_helper_faddq();
2179 gen_helper_check_ieee_exceptions();
2180 gen_op_store_QT0_fpr(QFPREG(rd));
2181 break;
2182 case 0x45: /* fsubs */
2183 gen_clear_float_exceptions();
2184 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2185 gen_helper_check_ieee_exceptions();
2186 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2187 break;
2188 case 0x46: /* fsubd */
2189 gen_op_load_fpr_DT0(DFPREG(rs1));
2190 gen_op_load_fpr_DT1(DFPREG(rs2));
2191 gen_clear_float_exceptions();
2192 gen_helper_fsubd();
2193 gen_helper_check_ieee_exceptions();
2194 gen_op_store_DT0_fpr(DFPREG(rd));
2195 break;
2196 case 0x47: /* fsubq */
2197 CHECK_FPU_FEATURE(dc, FLOAT128);
2198 gen_op_load_fpr_QT0(QFPREG(rs1));
2199 gen_op_load_fpr_QT1(QFPREG(rs2));
2200 gen_clear_float_exceptions();
2201 gen_helper_fsubq();
2202 gen_helper_check_ieee_exceptions();
2203 gen_op_store_QT0_fpr(QFPREG(rd));
2204 break;
2205 case 0x49: /* fmuls */
2206 CHECK_FPU_FEATURE(dc, FMUL);
2207 gen_clear_float_exceptions();
2208 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2209 gen_helper_check_ieee_exceptions();
2210 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2211 break;
2212 case 0x4a: /* fmuld */
2213 CHECK_FPU_FEATURE(dc, FMUL);
2214 gen_op_load_fpr_DT0(DFPREG(rs1));
2215 gen_op_load_fpr_DT1(DFPREG(rs2));
2216 gen_clear_float_exceptions();
2217 gen_helper_fmuld();
2218 gen_helper_check_ieee_exceptions();
2219 gen_op_store_DT0_fpr(DFPREG(rd));
2220 break;
2221 case 0x4b: /* fmulq */
2222 CHECK_FPU_FEATURE(dc, FLOAT128);
2223 CHECK_FPU_FEATURE(dc, FMUL);
2224 gen_op_load_fpr_QT0(QFPREG(rs1));
2225 gen_op_load_fpr_QT1(QFPREG(rs2));
2226 gen_clear_float_exceptions();
2227 gen_helper_fmulq();
2228 gen_helper_check_ieee_exceptions();
2229 gen_op_store_QT0_fpr(QFPREG(rd));
2230 break;
2231 case 0x4d: /* fdivs */
2232 gen_clear_float_exceptions();
2233 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2234 gen_helper_check_ieee_exceptions();
2235 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2236 break;
2237 case 0x4e: /* fdivd */
2238 gen_op_load_fpr_DT0(DFPREG(rs1));
2239 gen_op_load_fpr_DT1(DFPREG(rs2));
2240 gen_clear_float_exceptions();
2241 gen_helper_fdivd();
2242 gen_helper_check_ieee_exceptions();
2243 gen_op_store_DT0_fpr(DFPREG(rd));
2244 break;
2245 case 0x4f: /* fdivq */
2246 CHECK_FPU_FEATURE(dc, FLOAT128);
2247 gen_op_load_fpr_QT0(QFPREG(rs1));
2248 gen_op_load_fpr_QT1(QFPREG(rs2));
2249 gen_clear_float_exceptions();
2250 gen_helper_fdivq();
2251 gen_helper_check_ieee_exceptions();
2252 gen_op_store_QT0_fpr(QFPREG(rd));
2253 break;
2254 case 0x69: /* fsmuld */
2255 CHECK_FPU_FEATURE(dc, FSMULD);
2256 gen_clear_float_exceptions();
2257 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2258 gen_helper_check_ieee_exceptions();
2259 gen_op_store_DT0_fpr(DFPREG(rd));
2260 break;
2261 case 0x6e: /* fdmulq */
2262 CHECK_FPU_FEATURE(dc, FLOAT128);
2263 gen_op_load_fpr_DT0(DFPREG(rs1));
2264 gen_op_load_fpr_DT1(DFPREG(rs2));
2265 gen_clear_float_exceptions();
2266 gen_helper_fdmulq();
2267 gen_helper_check_ieee_exceptions();
2268 gen_op_store_QT0_fpr(QFPREG(rd));
2269 break;
2270 case 0xc4: /* fitos */
2271 gen_clear_float_exceptions();
2272 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2273 gen_helper_check_ieee_exceptions();
2274 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2275 break;
2276 case 0xc6: /* fdtos */
2277 gen_op_load_fpr_DT1(DFPREG(rs2));
2278 gen_clear_float_exceptions();
2279 gen_helper_fdtos(cpu_tmp32);
2280 gen_helper_check_ieee_exceptions();
2281 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2282 break;
2283 case 0xc7: /* fqtos */
2284 CHECK_FPU_FEATURE(dc, FLOAT128);
2285 gen_op_load_fpr_QT1(QFPREG(rs2));
2286 gen_clear_float_exceptions();
2287 gen_helper_fqtos(cpu_tmp32);
2288 gen_helper_check_ieee_exceptions();
2289 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2290 break;
2291 case 0xc8: /* fitod */
2292 gen_helper_fitod(cpu_fpr[rs2]);
2293 gen_op_store_DT0_fpr(DFPREG(rd));
2294 break;
2295 case 0xc9: /* fstod */
2296 gen_helper_fstod(cpu_fpr[rs2]);
2297 gen_op_store_DT0_fpr(DFPREG(rd));
2298 break;
2299 case 0xcb: /* fqtod */
2300 CHECK_FPU_FEATURE(dc, FLOAT128);
2301 gen_op_load_fpr_QT1(QFPREG(rs2));
2302 gen_clear_float_exceptions();
2303 gen_helper_fqtod();
2304 gen_helper_check_ieee_exceptions();
2305 gen_op_store_DT0_fpr(DFPREG(rd));
2306 break;
2307 case 0xcc: /* fitoq */
2308 CHECK_FPU_FEATURE(dc, FLOAT128);
2309 gen_helper_fitoq(cpu_fpr[rs2]);
2310 gen_op_store_QT0_fpr(QFPREG(rd));
2311 break;
2312 case 0xcd: /* fstoq */
2313 CHECK_FPU_FEATURE(dc, FLOAT128);
2314 gen_helper_fstoq(cpu_fpr[rs2]);
2315 gen_op_store_QT0_fpr(QFPREG(rd));
2316 break;
2317 case 0xce: /* fdtoq */
2318 CHECK_FPU_FEATURE(dc, FLOAT128);
2319 gen_op_load_fpr_DT1(DFPREG(rs2));
2320 gen_helper_fdtoq();
2321 gen_op_store_QT0_fpr(QFPREG(rd));
2322 break;
2323 case 0xd1: /* fstoi */
2324 gen_clear_float_exceptions();
2325 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2326 gen_helper_check_ieee_exceptions();
2327 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2328 break;
2329 case 0xd2: /* fdtoi */
2330 gen_op_load_fpr_DT1(DFPREG(rs2));
2331 gen_clear_float_exceptions();
2332 gen_helper_fdtoi(cpu_tmp32);
2333 gen_helper_check_ieee_exceptions();
2334 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2335 break;
2336 case 0xd3: /* fqtoi */
2337 CHECK_FPU_FEATURE(dc, FLOAT128);
2338 gen_op_load_fpr_QT1(QFPREG(rs2));
2339 gen_clear_float_exceptions();
2340 gen_helper_fqtoi(cpu_tmp32);
2341 gen_helper_check_ieee_exceptions();
2342 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2343 break;
2344 #ifdef TARGET_SPARC64
2345 case 0x2: /* V9 fmovd */
2346 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2347 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2348 cpu_fpr[DFPREG(rs2) + 1]);
2349 break;
2350 case 0x3: /* V9 fmovq */
2351 CHECK_FPU_FEATURE(dc, FLOAT128);
2352 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2353 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2354 cpu_fpr[QFPREG(rs2) + 1]);
2355 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2356 cpu_fpr[QFPREG(rs2) + 2]);
2357 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2358 cpu_fpr[QFPREG(rs2) + 3]);
2359 break;
2360 case 0x6: /* V9 fnegd */
2361 gen_op_load_fpr_DT1(DFPREG(rs2));
2362 gen_helper_fnegd();
2363 gen_op_store_DT0_fpr(DFPREG(rd));
2364 break;
2365 case 0x7: /* V9 fnegq */
2366 CHECK_FPU_FEATURE(dc, FLOAT128);
2367 gen_op_load_fpr_QT1(QFPREG(rs2));
2368 gen_helper_fnegq();
2369 gen_op_store_QT0_fpr(QFPREG(rd));
2370 break;
2371 case 0xa: /* V9 fabsd */
2372 gen_op_load_fpr_DT1(DFPREG(rs2));
2373 gen_helper_fabsd();
2374 gen_op_store_DT0_fpr(DFPREG(rd));
2375 break;
2376 case 0xb: /* V9 fabsq */
2377 CHECK_FPU_FEATURE(dc, FLOAT128);
2378 gen_op_load_fpr_QT1(QFPREG(rs2));
2379 gen_helper_fabsq();
2380 gen_op_store_QT0_fpr(QFPREG(rd));
2381 break;
2382 case 0x81: /* V9 fstox */
2383 gen_clear_float_exceptions();
2384 gen_helper_fstox(cpu_fpr[rs2]);
2385 gen_helper_check_ieee_exceptions();
2386 gen_op_store_DT0_fpr(DFPREG(rd));
2387 break;
2388 case 0x82: /* V9 fdtox */
2389 gen_op_load_fpr_DT1(DFPREG(rs2));
2390 gen_clear_float_exceptions();
2391 gen_helper_fdtox();
2392 gen_helper_check_ieee_exceptions();
2393 gen_op_store_DT0_fpr(DFPREG(rd));
2394 break;
2395 case 0x83: /* V9 fqtox */
2396 CHECK_FPU_FEATURE(dc, FLOAT128);
2397 gen_op_load_fpr_QT1(QFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 gen_helper_fqtox();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_DT0_fpr(DFPREG(rd));
2402 break;
2403 case 0x84: /* V9 fxtos */
2404 gen_op_load_fpr_DT1(DFPREG(rs2));
2405 gen_clear_float_exceptions();
2406 gen_helper_fxtos(cpu_tmp32);
2407 gen_helper_check_ieee_exceptions();
2408 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2409 break;
2410 case 0x88: /* V9 fxtod */
2411 gen_op_load_fpr_DT1(DFPREG(rs2));
2412 gen_clear_float_exceptions();
2413 gen_helper_fxtod();
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd));
2416 break;
2417 case 0x8c: /* V9 fxtoq */
2418 CHECK_FPU_FEATURE(dc, FLOAT128);
2419 gen_op_load_fpr_DT1(DFPREG(rs2));
2420 gen_clear_float_exceptions();
2421 gen_helper_fxtoq();
2422 gen_helper_check_ieee_exceptions();
2423 gen_op_store_QT0_fpr(QFPREG(rd));
2424 break;
2425 #endif
2426 default:
2427 goto illegal_insn;
2429 } else if (xop == 0x35) { /* FPU Operations */
2430 #ifdef TARGET_SPARC64
2431 int cond;
2432 #endif
2433 if (gen_trap_ifnofpu(dc, cpu_cond))
2434 goto jmp_insn;
2435 gen_op_clear_ieee_excp_and_FTT();
2436 rs1 = GET_FIELD(insn, 13, 17);
2437 rs2 = GET_FIELD(insn, 27, 31);
2438 xop = GET_FIELD(insn, 18, 26);
2439 #ifdef TARGET_SPARC64
2440 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2441 int l1;
2443 l1 = gen_new_label();
2444 cond = GET_FIELD_SP(insn, 14, 17);
2445 cpu_src1 = get_src1(insn, cpu_src1);
2446 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2447 0, l1);
2448 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2449 gen_set_label(l1);
2450 break;
2451 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2452 int l1;
2454 l1 = gen_new_label();
2455 cond = GET_FIELD_SP(insn, 14, 17);
2456 cpu_src1 = get_src1(insn, cpu_src1);
2457 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2458 0, l1);
2459 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2460 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2461 gen_set_label(l1);
2462 break;
2463 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2464 int l1;
2466 CHECK_FPU_FEATURE(dc, FLOAT128);
2467 l1 = gen_new_label();
2468 cond = GET_FIELD_SP(insn, 14, 17);
2469 cpu_src1 = get_src1(insn, cpu_src1);
2470 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2471 0, l1);
2472 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2473 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2474 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2475 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2476 gen_set_label(l1);
2477 break;
2479 #endif
2480 switch (xop) {
2481 #ifdef TARGET_SPARC64
2482 #define FMOVSCC(fcc) \
2484 TCGv r_cond; \
2485 int l1; \
2487 l1 = gen_new_label(); \
2488 r_cond = tcg_temp_new(); \
2489 cond = GET_FIELD_SP(insn, 14, 17); \
2490 gen_fcond(r_cond, fcc, cond); \
2491 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2492 0, l1); \
2493 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2494 gen_set_label(l1); \
2495 tcg_temp_free(r_cond); \
2497 #define FMOVDCC(fcc) \
2499 TCGv r_cond; \
2500 int l1; \
2502 l1 = gen_new_label(); \
2503 r_cond = tcg_temp_new(); \
2504 cond = GET_FIELD_SP(insn, 14, 17); \
2505 gen_fcond(r_cond, fcc, cond); \
2506 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2507 0, l1); \
2508 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2509 cpu_fpr[DFPREG(rs2)]); \
2510 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2511 cpu_fpr[DFPREG(rs2) + 1]); \
2512 gen_set_label(l1); \
2513 tcg_temp_free(r_cond); \
2515 #define FMOVQCC(fcc) \
2517 TCGv r_cond; \
2518 int l1; \
2520 l1 = gen_new_label(); \
2521 r_cond = tcg_temp_new(); \
2522 cond = GET_FIELD_SP(insn, 14, 17); \
2523 gen_fcond(r_cond, fcc, cond); \
2524 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2525 0, l1); \
2526 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2527 cpu_fpr[QFPREG(rs2)]); \
2528 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2529 cpu_fpr[QFPREG(rs2) + 1]); \
2530 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2531 cpu_fpr[QFPREG(rs2) + 2]); \
2532 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2533 cpu_fpr[QFPREG(rs2) + 3]); \
2534 gen_set_label(l1); \
2535 tcg_temp_free(r_cond); \
2537 case 0x001: /* V9 fmovscc %fcc0 */
2538 FMOVSCC(0);
2539 break;
2540 case 0x002: /* V9 fmovdcc %fcc0 */
2541 FMOVDCC(0);
2542 break;
2543 case 0x003: /* V9 fmovqcc %fcc0 */
2544 CHECK_FPU_FEATURE(dc, FLOAT128);
2545 FMOVQCC(0);
2546 break;
2547 case 0x041: /* V9 fmovscc %fcc1 */
2548 FMOVSCC(1);
2549 break;
2550 case 0x042: /* V9 fmovdcc %fcc1 */
2551 FMOVDCC(1);
2552 break;
2553 case 0x043: /* V9 fmovqcc %fcc1 */
2554 CHECK_FPU_FEATURE(dc, FLOAT128);
2555 FMOVQCC(1);
2556 break;
2557 case 0x081: /* V9 fmovscc %fcc2 */
2558 FMOVSCC(2);
2559 break;
2560 case 0x082: /* V9 fmovdcc %fcc2 */
2561 FMOVDCC(2);
2562 break;
2563 case 0x083: /* V9 fmovqcc %fcc2 */
2564 CHECK_FPU_FEATURE(dc, FLOAT128);
2565 FMOVQCC(2);
2566 break;
2567 case 0x0c1: /* V9 fmovscc %fcc3 */
2568 FMOVSCC(3);
2569 break;
2570 case 0x0c2: /* V9 fmovdcc %fcc3 */
2571 FMOVDCC(3);
2572 break;
2573 case 0x0c3: /* V9 fmovqcc %fcc3 */
2574 CHECK_FPU_FEATURE(dc, FLOAT128);
2575 FMOVQCC(3);
2576 break;
2577 #undef FMOVSCC
2578 #undef FMOVDCC
2579 #undef FMOVQCC
2580 #define FMOVSCC(icc) \
2582 TCGv r_cond; \
2583 int l1; \
2585 l1 = gen_new_label(); \
2586 r_cond = tcg_temp_new(); \
2587 cond = GET_FIELD_SP(insn, 14, 17); \
2588 gen_cond(r_cond, icc, cond, dc); \
2589 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2590 0, l1); \
2591 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2592 gen_set_label(l1); \
2593 tcg_temp_free(r_cond); \
2595 #define FMOVDCC(icc) \
2597 TCGv r_cond; \
2598 int l1; \
2600 l1 = gen_new_label(); \
2601 r_cond = tcg_temp_new(); \
2602 cond = GET_FIELD_SP(insn, 14, 17); \
2603 gen_cond(r_cond, icc, cond, dc); \
2604 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2605 0, l1); \
2606 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2607 cpu_fpr[DFPREG(rs2)]); \
2608 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2609 cpu_fpr[DFPREG(rs2) + 1]); \
2610 gen_set_label(l1); \
2611 tcg_temp_free(r_cond); \
2613 #define FMOVQCC(icc) \
2615 TCGv r_cond; \
2616 int l1; \
2618 l1 = gen_new_label(); \
2619 r_cond = tcg_temp_new(); \
2620 cond = GET_FIELD_SP(insn, 14, 17); \
2621 gen_cond(r_cond, icc, cond, dc); \
2622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2623 0, l1); \
2624 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2625 cpu_fpr[QFPREG(rs2)]); \
2626 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2627 cpu_fpr[QFPREG(rs2) + 1]); \
2628 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2629 cpu_fpr[QFPREG(rs2) + 2]); \
2630 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2631 cpu_fpr[QFPREG(rs2) + 3]); \
2632 gen_set_label(l1); \
2633 tcg_temp_free(r_cond); \
2636 case 0x101: /* V9 fmovscc %icc */
2637 FMOVSCC(0);
2638 break;
2639 case 0x102: /* V9 fmovdcc %icc */
2640 FMOVDCC(0);
2641 case 0x103: /* V9 fmovqcc %icc */
2642 CHECK_FPU_FEATURE(dc, FLOAT128);
2643 FMOVQCC(0);
2644 break;
2645 case 0x181: /* V9 fmovscc %xcc */
2646 FMOVSCC(1);
2647 break;
2648 case 0x182: /* V9 fmovdcc %xcc */
2649 FMOVDCC(1);
2650 break;
2651 case 0x183: /* V9 fmovqcc %xcc */
2652 CHECK_FPU_FEATURE(dc, FLOAT128);
2653 FMOVQCC(1);
2654 break;
2655 #undef FMOVSCC
2656 #undef FMOVDCC
2657 #undef FMOVQCC
2658 #endif
2659 case 0x51: /* fcmps, V9 %fcc */
2660 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2661 break;
2662 case 0x52: /* fcmpd, V9 %fcc */
2663 gen_op_load_fpr_DT0(DFPREG(rs1));
2664 gen_op_load_fpr_DT1(DFPREG(rs2));
2665 gen_op_fcmpd(rd & 3);
2666 break;
2667 case 0x53: /* fcmpq, V9 %fcc */
2668 CHECK_FPU_FEATURE(dc, FLOAT128);
2669 gen_op_load_fpr_QT0(QFPREG(rs1));
2670 gen_op_load_fpr_QT1(QFPREG(rs2));
2671 gen_op_fcmpq(rd & 3);
2672 break;
2673 case 0x55: /* fcmpes, V9 %fcc */
2674 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2675 break;
2676 case 0x56: /* fcmped, V9 %fcc */
2677 gen_op_load_fpr_DT0(DFPREG(rs1));
2678 gen_op_load_fpr_DT1(DFPREG(rs2));
2679 gen_op_fcmped(rd & 3);
2680 break;
2681 case 0x57: /* fcmpeq, V9 %fcc */
2682 CHECK_FPU_FEATURE(dc, FLOAT128);
2683 gen_op_load_fpr_QT0(QFPREG(rs1));
2684 gen_op_load_fpr_QT1(QFPREG(rs2));
2685 gen_op_fcmpeq(rd & 3);
2686 break;
2687 default:
2688 goto illegal_insn;
2690 } else if (xop == 0x2) {
2691 // clr/mov shortcut
2693 rs1 = GET_FIELD(insn, 13, 17);
2694 if (rs1 == 0) {
2695 // or %g0, x, y -> mov T0, x; mov y, T0
2696 if (IS_IMM) { /* immediate */
2697 TCGv r_const;
2699 simm = GET_FIELDs(insn, 19, 31);
2700 r_const = tcg_const_tl(simm);
2701 gen_movl_TN_reg(rd, r_const);
2702 tcg_temp_free(r_const);
2703 } else { /* register */
2704 rs2 = GET_FIELD(insn, 27, 31);
2705 gen_movl_reg_TN(rs2, cpu_dst);
2706 gen_movl_TN_reg(rd, cpu_dst);
2708 } else {
2709 cpu_src1 = get_src1(insn, cpu_src1);
2710 if (IS_IMM) { /* immediate */
2711 simm = GET_FIELDs(insn, 19, 31);
2712 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2713 gen_movl_TN_reg(rd, cpu_dst);
2714 } else { /* register */
2715 // or x, %g0, y -> mov T1, x; mov y, T1
2716 rs2 = GET_FIELD(insn, 27, 31);
2717 if (rs2 != 0) {
2718 gen_movl_reg_TN(rs2, cpu_src2);
2719 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2720 gen_movl_TN_reg(rd, cpu_dst);
2721 } else
2722 gen_movl_TN_reg(rd, cpu_src1);
2725 #ifdef TARGET_SPARC64
2726 } else if (xop == 0x25) { /* sll, V9 sllx */
2727 cpu_src1 = get_src1(insn, cpu_src1);
2728 if (IS_IMM) { /* immediate */
2729 simm = GET_FIELDs(insn, 20, 31);
2730 if (insn & (1 << 12)) {
2731 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2732 } else {
2733 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2735 } else { /* register */
2736 rs2 = GET_FIELD(insn, 27, 31);
2737 gen_movl_reg_TN(rs2, cpu_src2);
2738 if (insn & (1 << 12)) {
2739 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2740 } else {
2741 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2743 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2745 gen_movl_TN_reg(rd, cpu_dst);
2746 } else if (xop == 0x26) { /* srl, V9 srlx */
2747 cpu_src1 = get_src1(insn, cpu_src1);
2748 if (IS_IMM) { /* immediate */
2749 simm = GET_FIELDs(insn, 20, 31);
2750 if (insn & (1 << 12)) {
2751 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2752 } else {
2753 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2754 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2756 } else { /* register */
2757 rs2 = GET_FIELD(insn, 27, 31);
2758 gen_movl_reg_TN(rs2, cpu_src2);
2759 if (insn & (1 << 12)) {
2760 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2761 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2762 } else {
2763 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2764 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2765 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2768 gen_movl_TN_reg(rd, cpu_dst);
2769 } else if (xop == 0x27) { /* sra, V9 srax */
2770 cpu_src1 = get_src1(insn, cpu_src1);
2771 if (IS_IMM) { /* immediate */
2772 simm = GET_FIELDs(insn, 20, 31);
2773 if (insn & (1 << 12)) {
2774 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2775 } else {
2776 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2777 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2778 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2780 } else { /* register */
2781 rs2 = GET_FIELD(insn, 27, 31);
2782 gen_movl_reg_TN(rs2, cpu_src2);
2783 if (insn & (1 << 12)) {
2784 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2785 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2786 } else {
2787 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2788 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2789 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2790 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2793 gen_movl_TN_reg(rd, cpu_dst);
2794 #endif
2795 } else if (xop < 0x36) {
2796 if (xop < 0x20) {
2797 cpu_src1 = get_src1(insn, cpu_src1);
2798 cpu_src2 = get_src2(insn, cpu_src2);
2799 switch (xop & ~0x10) {
2800 case 0x0: /* add */
2801 if (IS_IMM) {
2802 simm = GET_FIELDs(insn, 19, 31);
2803 if (xop & 0x10) {
2804 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2805 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2806 dc->cc_op = CC_OP_ADD;
2807 } else {
2808 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2810 } else {
2811 if (xop & 0x10) {
2812 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2813 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2814 dc->cc_op = CC_OP_ADD;
2815 } else {
2816 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2819 break;
2820 case 0x1: /* and */
2821 if (IS_IMM) {
2822 simm = GET_FIELDs(insn, 19, 31);
2823 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2824 } else {
2825 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2827 if (xop & 0x10) {
2828 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2829 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2830 dc->cc_op = CC_OP_LOGIC;
2832 break;
2833 case 0x2: /* or */
2834 if (IS_IMM) {
2835 simm = GET_FIELDs(insn, 19, 31);
2836 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2837 } else {
2838 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2840 if (xop & 0x10) {
2841 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2842 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2843 dc->cc_op = CC_OP_LOGIC;
2845 break;
2846 case 0x3: /* xor */
2847 if (IS_IMM) {
2848 simm = GET_FIELDs(insn, 19, 31);
2849 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2850 } else {
2851 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2853 if (xop & 0x10) {
2854 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2855 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2856 dc->cc_op = CC_OP_LOGIC;
2858 break;
2859 case 0x4: /* sub */
2860 if (IS_IMM) {
2861 simm = GET_FIELDs(insn, 19, 31);
2862 if (xop & 0x10) {
2863 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2864 } else {
2865 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2867 } else {
2868 if (xop & 0x10) {
2869 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2870 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2871 dc->cc_op = CC_OP_SUB;
2872 } else {
2873 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2876 break;
2877 case 0x5: /* andn */
2878 if (IS_IMM) {
2879 simm = GET_FIELDs(insn, 19, 31);
2880 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2881 } else {
2882 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2884 if (xop & 0x10) {
2885 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2886 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2887 dc->cc_op = CC_OP_LOGIC;
2889 break;
2890 case 0x6: /* orn */
2891 if (IS_IMM) {
2892 simm = GET_FIELDs(insn, 19, 31);
2893 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2894 } else {
2895 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2897 if (xop & 0x10) {
2898 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2899 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2900 dc->cc_op = CC_OP_LOGIC;
2902 break;
2903 case 0x7: /* xorn */
2904 if (IS_IMM) {
2905 simm = GET_FIELDs(insn, 19, 31);
2906 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2907 } else {
2908 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2909 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2911 if (xop & 0x10) {
2912 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2913 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2914 dc->cc_op = CC_OP_LOGIC;
2916 break;
2917 case 0x8: /* addx, V9 addc */
2918 if (IS_IMM) {
2919 simm = GET_FIELDs(insn, 19, 31);
2920 if (xop & 0x10) {
2921 gen_helper_compute_psr();
2922 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2923 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2924 dc->cc_op = CC_OP_ADDX;
2925 } else {
2926 gen_helper_compute_psr();
2927 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2928 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2929 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2931 } else {
2932 if (xop & 0x10) {
2933 gen_helper_compute_psr();
2934 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2935 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2936 dc->cc_op = CC_OP_ADDX;
2937 } else {
2938 gen_helper_compute_psr();
2939 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2940 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2941 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2944 break;
2945 #ifdef TARGET_SPARC64
2946 case 0x9: /* V9 mulx */
2947 if (IS_IMM) {
2948 simm = GET_FIELDs(insn, 19, 31);
2949 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2950 } else {
2951 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2953 break;
2954 #endif
2955 case 0xa: /* umul */
2956 CHECK_IU_FEATURE(dc, MUL);
2957 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2958 if (xop & 0x10) {
2959 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2960 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2961 dc->cc_op = CC_OP_LOGIC;
2963 break;
2964 case 0xb: /* smul */
2965 CHECK_IU_FEATURE(dc, MUL);
2966 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2967 if (xop & 0x10) {
2968 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2969 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2970 dc->cc_op = CC_OP_LOGIC;
2972 break;
2973 case 0xc: /* subx, V9 subc */
2974 if (IS_IMM) {
2975 simm = GET_FIELDs(insn, 19, 31);
2976 if (xop & 0x10) {
2977 gen_helper_compute_psr();
2978 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
2979 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
2980 dc->cc_op = CC_OP_SUBX;
2981 } else {
2982 gen_helper_compute_psr();
2983 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2984 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2985 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
2987 } else {
2988 if (xop & 0x10) {
2989 gen_helper_compute_psr();
2990 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
2991 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
2992 dc->cc_op = CC_OP_SUBX;
2993 } else {
2994 gen_helper_compute_psr();
2995 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2996 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2997 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3000 break;
3001 #ifdef TARGET_SPARC64
3002 case 0xd: /* V9 udivx */
3003 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3004 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3005 gen_trap_ifdivzero_tl(cpu_cc_src2);
3006 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3007 break;
3008 #endif
3009 case 0xe: /* udiv */
3010 CHECK_IU_FEATURE(dc, DIV);
3011 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3012 if (xop & 0x10) {
3013 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3014 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3015 dc->cc_op = CC_OP_DIV;
3017 break;
3018 case 0xf: /* sdiv */
3019 CHECK_IU_FEATURE(dc, DIV);
3020 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3021 if (xop & 0x10) {
3022 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3023 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3024 dc->cc_op = CC_OP_DIV;
3026 break;
3027 default:
3028 goto illegal_insn;
3030 gen_movl_TN_reg(rd, cpu_dst);
3031 } else {
3032 cpu_src1 = get_src1(insn, cpu_src1);
3033 cpu_src2 = get_src2(insn, cpu_src2);
3034 switch (xop) {
3035 case 0x20: /* taddcc */
3036 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3037 gen_movl_TN_reg(rd, cpu_dst);
3038 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3039 dc->cc_op = CC_OP_TADD;
3040 break;
3041 case 0x21: /* tsubcc */
3042 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3043 gen_movl_TN_reg(rd, cpu_dst);
3044 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3045 dc->cc_op = CC_OP_TSUB;
3046 break;
3047 case 0x22: /* taddcctv */
3048 save_state(dc, cpu_cond);
3049 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3050 gen_movl_TN_reg(rd, cpu_dst);
3051 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3052 dc->cc_op = CC_OP_TADDTV;
3053 break;
3054 case 0x23: /* tsubcctv */
3055 save_state(dc, cpu_cond);
3056 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3057 gen_movl_TN_reg(rd, cpu_dst);
3058 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3059 dc->cc_op = CC_OP_TSUBTV;
3060 break;
3061 case 0x24: /* mulscc */
3062 gen_helper_compute_psr();
3063 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3064 gen_movl_TN_reg(rd, cpu_dst);
3065 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3066 dc->cc_op = CC_OP_ADD;
3067 break;
3068 #ifndef TARGET_SPARC64
3069 case 0x25: /* sll */
3070 if (IS_IMM) { /* immediate */
3071 simm = GET_FIELDs(insn, 20, 31);
3072 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3073 } else { /* register */
3074 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3075 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3077 gen_movl_TN_reg(rd, cpu_dst);
3078 break;
3079 case 0x26: /* srl */
3080 if (IS_IMM) { /* immediate */
3081 simm = GET_FIELDs(insn, 20, 31);
3082 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3083 } else { /* register */
3084 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3085 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3087 gen_movl_TN_reg(rd, cpu_dst);
3088 break;
3089 case 0x27: /* sra */
3090 if (IS_IMM) { /* immediate */
3091 simm = GET_FIELDs(insn, 20, 31);
3092 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3093 } else { /* register */
3094 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3095 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3097 gen_movl_TN_reg(rd, cpu_dst);
3098 break;
3099 #endif
3100 case 0x30:
3102 switch(rd) {
3103 case 0: /* wry */
3104 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3105 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3106 break;
3107 #ifndef TARGET_SPARC64
3108 case 0x01 ... 0x0f: /* undefined in the
3109 SPARCv8 manual, nop
3110 on the microSPARC
3111 II */
3112 case 0x10 ... 0x1f: /* implementation-dependent
3113 in the SPARCv8
3114 manual, nop on the
3115 microSPARC II */
3116 break;
3117 #else
3118 case 0x2: /* V9 wrccr */
3119 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3120 gen_helper_wrccr(cpu_dst);
3121 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3122 dc->cc_op = CC_OP_FLAGS;
3123 break;
3124 case 0x3: /* V9 wrasi */
3125 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3126 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3127 break;
3128 case 0x6: /* V9 wrfprs */
3129 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3130 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3131 save_state(dc, cpu_cond);
3132 gen_op_next_insn();
3133 tcg_gen_exit_tb(0);
3134 dc->is_br = 1;
3135 break;
3136 case 0xf: /* V9 sir, nop if user */
3137 #if !defined(CONFIG_USER_ONLY)
3138 if (supervisor(dc))
3139 ; // XXX
3140 #endif
3141 break;
3142 case 0x13: /* Graphics Status */
3143 if (gen_trap_ifnofpu(dc, cpu_cond))
3144 goto jmp_insn;
3145 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3146 break;
3147 case 0x14: /* Softint set */
3148 if (!supervisor(dc))
3149 goto illegal_insn;
3150 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3151 gen_helper_set_softint(cpu_tmp64);
3152 break;
3153 case 0x15: /* Softint clear */
3154 if (!supervisor(dc))
3155 goto illegal_insn;
3156 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3157 gen_helper_clear_softint(cpu_tmp64);
3158 break;
3159 case 0x16: /* Softint write */
3160 if (!supervisor(dc))
3161 goto illegal_insn;
3162 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3163 gen_helper_write_softint(cpu_tmp64);
3164 break;
3165 case 0x17: /* Tick compare */
3166 #if !defined(CONFIG_USER_ONLY)
3167 if (!supervisor(dc))
3168 goto illegal_insn;
3169 #endif
3171 TCGv_ptr r_tickptr;
3173 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3174 cpu_src2);
3175 r_tickptr = tcg_temp_new_ptr();
3176 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3177 offsetof(CPUState, tick));
3178 gen_helper_tick_set_limit(r_tickptr,
3179 cpu_tick_cmpr);
3180 tcg_temp_free_ptr(r_tickptr);
3182 break;
3183 case 0x18: /* System tick */
3184 #if !defined(CONFIG_USER_ONLY)
3185 if (!supervisor(dc))
3186 goto illegal_insn;
3187 #endif
3189 TCGv_ptr r_tickptr;
3191 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3192 cpu_src2);
3193 r_tickptr = tcg_temp_new_ptr();
3194 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3195 offsetof(CPUState, stick));
3196 gen_helper_tick_set_count(r_tickptr,
3197 cpu_dst);
3198 tcg_temp_free_ptr(r_tickptr);
3200 break;
3201 case 0x19: /* System tick compare */
3202 #if !defined(CONFIG_USER_ONLY)
3203 if (!supervisor(dc))
3204 goto illegal_insn;
3205 #endif
3207 TCGv_ptr r_tickptr;
3209 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3210 cpu_src2);
3211 r_tickptr = tcg_temp_new_ptr();
3212 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3213 offsetof(CPUState, stick));
3214 gen_helper_tick_set_limit(r_tickptr,
3215 cpu_stick_cmpr);
3216 tcg_temp_free_ptr(r_tickptr);
3218 break;
3220 case 0x10: /* Performance Control */
3221 case 0x11: /* Performance Instrumentation
3222 Counter */
3223 case 0x12: /* Dispatch Control */
3224 #endif
3225 default:
3226 goto illegal_insn;
3229 break;
3230 #if !defined(CONFIG_USER_ONLY)
3231 case 0x31: /* wrpsr, V9 saved, restored */
3233 if (!supervisor(dc))
3234 goto priv_insn;
3235 #ifdef TARGET_SPARC64
3236 switch (rd) {
3237 case 0:
3238 gen_helper_saved();
3239 break;
3240 case 1:
3241 gen_helper_restored();
3242 break;
3243 case 2: /* UA2005 allclean */
3244 case 3: /* UA2005 otherw */
3245 case 4: /* UA2005 normalw */
3246 case 5: /* UA2005 invalw */
3247 // XXX
3248 default:
3249 goto illegal_insn;
3251 #else
3252 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3253 gen_helper_wrpsr(cpu_dst);
3254 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3255 dc->cc_op = CC_OP_FLAGS;
3256 save_state(dc, cpu_cond);
3257 gen_op_next_insn();
3258 tcg_gen_exit_tb(0);
3259 dc->is_br = 1;
3260 #endif
3262 break;
3263 case 0x32: /* wrwim, V9 wrpr */
3265 if (!supervisor(dc))
3266 goto priv_insn;
3267 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3268 #ifdef TARGET_SPARC64
3269 switch (rd) {
3270 case 0: // tpc
3272 TCGv_ptr r_tsptr;
3274 r_tsptr = tcg_temp_new_ptr();
3275 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3276 offsetof(CPUState, tsptr));
3277 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3278 offsetof(trap_state, tpc));
3279 tcg_temp_free_ptr(r_tsptr);
3281 break;
3282 case 1: // tnpc
3284 TCGv_ptr r_tsptr;
3286 r_tsptr = tcg_temp_new_ptr();
3287 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3288 offsetof(CPUState, tsptr));
3289 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3290 offsetof(trap_state, tnpc));
3291 tcg_temp_free_ptr(r_tsptr);
3293 break;
3294 case 2: // tstate
3296 TCGv_ptr r_tsptr;
3298 r_tsptr = tcg_temp_new_ptr();
3299 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3300 offsetof(CPUState, tsptr));
3301 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3302 offsetof(trap_state,
3303 tstate));
3304 tcg_temp_free_ptr(r_tsptr);
3306 break;
3307 case 3: // tt
3309 TCGv_ptr r_tsptr;
3311 r_tsptr = tcg_temp_new_ptr();
3312 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3313 offsetof(CPUState, tsptr));
3314 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3315 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3316 offsetof(trap_state, tt));
3317 tcg_temp_free_ptr(r_tsptr);
3319 break;
3320 case 4: // tick
3322 TCGv_ptr r_tickptr;
3324 r_tickptr = tcg_temp_new_ptr();
3325 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3326 offsetof(CPUState, tick));
3327 gen_helper_tick_set_count(r_tickptr,
3328 cpu_tmp0);
3329 tcg_temp_free_ptr(r_tickptr);
3331 break;
3332 case 5: // tba
3333 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3334 break;
3335 case 6: // pstate
3336 save_state(dc, cpu_cond);
3337 gen_helper_wrpstate(cpu_tmp0);
3338 gen_op_next_insn();
3339 tcg_gen_exit_tb(0);
3340 dc->is_br = 1;
3341 break;
3342 case 7: // tl
3343 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3344 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3345 offsetof(CPUSPARCState, tl));
3346 break;
3347 case 8: // pil
3348 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3349 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3350 offsetof(CPUSPARCState,
3351 psrpil));
3352 break;
3353 case 9: // cwp
3354 gen_helper_wrcwp(cpu_tmp0);
3355 break;
3356 case 10: // cansave
3357 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3358 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3359 offsetof(CPUSPARCState,
3360 cansave));
3361 break;
3362 case 11: // canrestore
3363 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3364 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3365 offsetof(CPUSPARCState,
3366 canrestore));
3367 break;
3368 case 12: // cleanwin
3369 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3370 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3371 offsetof(CPUSPARCState,
3372 cleanwin));
3373 break;
3374 case 13: // otherwin
3375 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3376 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3377 offsetof(CPUSPARCState,
3378 otherwin));
3379 break;
3380 case 14: // wstate
3381 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3382 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3383 offsetof(CPUSPARCState,
3384 wstate));
3385 break;
3386 case 16: // UA2005 gl
3387 CHECK_IU_FEATURE(dc, GL);
3388 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3389 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3390 offsetof(CPUSPARCState, gl));
3391 break;
3392 case 26: // UA2005 strand status
3393 CHECK_IU_FEATURE(dc, HYPV);
3394 if (!hypervisor(dc))
3395 goto priv_insn;
3396 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3397 break;
3398 default:
3399 goto illegal_insn;
3401 #else
3402 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3403 if (dc->def->nwindows != 32)
3404 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3405 (1 << dc->def->nwindows) - 1);
3406 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3407 #endif
3409 break;
3410 case 0x33: /* wrtbr, UA2005 wrhpr */
3412 #ifndef TARGET_SPARC64
3413 if (!supervisor(dc))
3414 goto priv_insn;
3415 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3416 #else
3417 CHECK_IU_FEATURE(dc, HYPV);
3418 if (!hypervisor(dc))
3419 goto priv_insn;
3420 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3421 switch (rd) {
3422 case 0: // hpstate
3423 // XXX gen_op_wrhpstate();
3424 save_state(dc, cpu_cond);
3425 gen_op_next_insn();
3426 tcg_gen_exit_tb(0);
3427 dc->is_br = 1;
3428 break;
3429 case 1: // htstate
3430 // XXX gen_op_wrhtstate();
3431 break;
3432 case 3: // hintp
3433 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3434 break;
3435 case 5: // htba
3436 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3437 break;
3438 case 31: // hstick_cmpr
3440 TCGv_ptr r_tickptr;
3442 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3443 r_tickptr = tcg_temp_new_ptr();
3444 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3445 offsetof(CPUState, hstick));
3446 gen_helper_tick_set_limit(r_tickptr,
3447 cpu_hstick_cmpr);
3448 tcg_temp_free_ptr(r_tickptr);
3450 break;
3451 case 6: // hver readonly
3452 default:
3453 goto illegal_insn;
3455 #endif
3457 break;
3458 #endif
3459 #ifdef TARGET_SPARC64
3460 case 0x2c: /* V9 movcc */
3462 int cc = GET_FIELD_SP(insn, 11, 12);
3463 int cond = GET_FIELD_SP(insn, 14, 17);
3464 TCGv r_cond;
3465 int l1;
3467 r_cond = tcg_temp_new();
3468 if (insn & (1 << 18)) {
3469 if (cc == 0)
3470 gen_cond(r_cond, 0, cond, dc);
3471 else if (cc == 2)
3472 gen_cond(r_cond, 1, cond, dc);
3473 else
3474 goto illegal_insn;
3475 } else {
3476 gen_fcond(r_cond, cc, cond);
3479 l1 = gen_new_label();
3481 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3482 if (IS_IMM) { /* immediate */
3483 TCGv r_const;
3485 simm = GET_FIELD_SPs(insn, 0, 10);
3486 r_const = tcg_const_tl(simm);
3487 gen_movl_TN_reg(rd, r_const);
3488 tcg_temp_free(r_const);
3489 } else {
3490 rs2 = GET_FIELD_SP(insn, 0, 4);
3491 gen_movl_reg_TN(rs2, cpu_tmp0);
3492 gen_movl_TN_reg(rd, cpu_tmp0);
3494 gen_set_label(l1);
3495 tcg_temp_free(r_cond);
3496 break;
3498 case 0x2d: /* V9 sdivx */
3499 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3500 gen_movl_TN_reg(rd, cpu_dst);
3501 break;
3502 case 0x2e: /* V9 popc */
3504 cpu_src2 = get_src2(insn, cpu_src2);
3505 gen_helper_popc(cpu_dst, cpu_src2);
3506 gen_movl_TN_reg(rd, cpu_dst);
3508 case 0x2f: /* V9 movr */
3510 int cond = GET_FIELD_SP(insn, 10, 12);
3511 int l1;
3513 cpu_src1 = get_src1(insn, cpu_src1);
3515 l1 = gen_new_label();
3517 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3518 cpu_src1, 0, l1);
3519 if (IS_IMM) { /* immediate */
3520 TCGv r_const;
3522 simm = GET_FIELD_SPs(insn, 0, 9);
3523 r_const = tcg_const_tl(simm);
3524 gen_movl_TN_reg(rd, r_const);
3525 tcg_temp_free(r_const);
3526 } else {
3527 rs2 = GET_FIELD_SP(insn, 0, 4);
3528 gen_movl_reg_TN(rs2, cpu_tmp0);
3529 gen_movl_TN_reg(rd, cpu_tmp0);
3531 gen_set_label(l1);
3532 break;
3534 #endif
3535 default:
3536 goto illegal_insn;
3539 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3540 #ifdef TARGET_SPARC64
3541 int opf = GET_FIELD_SP(insn, 5, 13);
3542 rs1 = GET_FIELD(insn, 13, 17);
3543 rs2 = GET_FIELD(insn, 27, 31);
3544 if (gen_trap_ifnofpu(dc, cpu_cond))
3545 goto jmp_insn;
3547 switch (opf) {
3548 case 0x000: /* VIS I edge8cc */
3549 case 0x001: /* VIS II edge8n */
3550 case 0x002: /* VIS I edge8lcc */
3551 case 0x003: /* VIS II edge8ln */
3552 case 0x004: /* VIS I edge16cc */
3553 case 0x005: /* VIS II edge16n */
3554 case 0x006: /* VIS I edge16lcc */
3555 case 0x007: /* VIS II edge16ln */
3556 case 0x008: /* VIS I edge32cc */
3557 case 0x009: /* VIS II edge32n */
3558 case 0x00a: /* VIS I edge32lcc */
3559 case 0x00b: /* VIS II edge32ln */
3560 // XXX
3561 goto illegal_insn;
3562 case 0x010: /* VIS I array8 */
3563 CHECK_FPU_FEATURE(dc, VIS1);
3564 cpu_src1 = get_src1(insn, cpu_src1);
3565 gen_movl_reg_TN(rs2, cpu_src2);
3566 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3567 gen_movl_TN_reg(rd, cpu_dst);
3568 break;
3569 case 0x012: /* VIS I array16 */
3570 CHECK_FPU_FEATURE(dc, VIS1);
3571 cpu_src1 = get_src1(insn, cpu_src1);
3572 gen_movl_reg_TN(rs2, cpu_src2);
3573 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3574 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3575 gen_movl_TN_reg(rd, cpu_dst);
3576 break;
3577 case 0x014: /* VIS I array32 */
3578 CHECK_FPU_FEATURE(dc, VIS1);
3579 cpu_src1 = get_src1(insn, cpu_src1);
3580 gen_movl_reg_TN(rs2, cpu_src2);
3581 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3582 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3583 gen_movl_TN_reg(rd, cpu_dst);
3584 break;
3585 case 0x018: /* VIS I alignaddr */
3586 CHECK_FPU_FEATURE(dc, VIS1);
3587 cpu_src1 = get_src1(insn, cpu_src1);
3588 gen_movl_reg_TN(rs2, cpu_src2);
3589 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3590 gen_movl_TN_reg(rd, cpu_dst);
3591 break;
3592 case 0x019: /* VIS II bmask */
3593 case 0x01a: /* VIS I alignaddrl */
3594 // XXX
3595 goto illegal_insn;
3596 case 0x020: /* VIS I fcmple16 */
3597 CHECK_FPU_FEATURE(dc, VIS1);
3598 gen_op_load_fpr_DT0(DFPREG(rs1));
3599 gen_op_load_fpr_DT1(DFPREG(rs2));
3600 gen_helper_fcmple16();
3601 gen_op_store_DT0_fpr(DFPREG(rd));
3602 break;
3603 case 0x022: /* VIS I fcmpne16 */
3604 CHECK_FPU_FEATURE(dc, VIS1);
3605 gen_op_load_fpr_DT0(DFPREG(rs1));
3606 gen_op_load_fpr_DT1(DFPREG(rs2));
3607 gen_helper_fcmpne16();
3608 gen_op_store_DT0_fpr(DFPREG(rd));
3609 break;
3610 case 0x024: /* VIS I fcmple32 */
3611 CHECK_FPU_FEATURE(dc, VIS1);
3612 gen_op_load_fpr_DT0(DFPREG(rs1));
3613 gen_op_load_fpr_DT1(DFPREG(rs2));
3614 gen_helper_fcmple32();
3615 gen_op_store_DT0_fpr(DFPREG(rd));
3616 break;
3617 case 0x026: /* VIS I fcmpne32 */
3618 CHECK_FPU_FEATURE(dc, VIS1);
3619 gen_op_load_fpr_DT0(DFPREG(rs1));
3620 gen_op_load_fpr_DT1(DFPREG(rs2));
3621 gen_helper_fcmpne32();
3622 gen_op_store_DT0_fpr(DFPREG(rd));
3623 break;
3624 case 0x028: /* VIS I fcmpgt16 */
3625 CHECK_FPU_FEATURE(dc, VIS1);
3626 gen_op_load_fpr_DT0(DFPREG(rs1));
3627 gen_op_load_fpr_DT1(DFPREG(rs2));
3628 gen_helper_fcmpgt16();
3629 gen_op_store_DT0_fpr(DFPREG(rd));
3630 break;
3631 case 0x02a: /* VIS I fcmpeq16 */
3632 CHECK_FPU_FEATURE(dc, VIS1);
3633 gen_op_load_fpr_DT0(DFPREG(rs1));
3634 gen_op_load_fpr_DT1(DFPREG(rs2));
3635 gen_helper_fcmpeq16();
3636 gen_op_store_DT0_fpr(DFPREG(rd));
3637 break;
3638 case 0x02c: /* VIS I fcmpgt32 */
3639 CHECK_FPU_FEATURE(dc, VIS1);
3640 gen_op_load_fpr_DT0(DFPREG(rs1));
3641 gen_op_load_fpr_DT1(DFPREG(rs2));
3642 gen_helper_fcmpgt32();
3643 gen_op_store_DT0_fpr(DFPREG(rd));
3644 break;
3645 case 0x02e: /* VIS I fcmpeq32 */
3646 CHECK_FPU_FEATURE(dc, VIS1);
3647 gen_op_load_fpr_DT0(DFPREG(rs1));
3648 gen_op_load_fpr_DT1(DFPREG(rs2));
3649 gen_helper_fcmpeq32();
3650 gen_op_store_DT0_fpr(DFPREG(rd));
3651 break;
3652 case 0x031: /* VIS I fmul8x16 */
3653 CHECK_FPU_FEATURE(dc, VIS1);
3654 gen_op_load_fpr_DT0(DFPREG(rs1));
3655 gen_op_load_fpr_DT1(DFPREG(rs2));
3656 gen_helper_fmul8x16();
3657 gen_op_store_DT0_fpr(DFPREG(rd));
3658 break;
3659 case 0x033: /* VIS I fmul8x16au */
3660 CHECK_FPU_FEATURE(dc, VIS1);
3661 gen_op_load_fpr_DT0(DFPREG(rs1));
3662 gen_op_load_fpr_DT1(DFPREG(rs2));
3663 gen_helper_fmul8x16au();
3664 gen_op_store_DT0_fpr(DFPREG(rd));
3665 break;
3666 case 0x035: /* VIS I fmul8x16al */
3667 CHECK_FPU_FEATURE(dc, VIS1);
3668 gen_op_load_fpr_DT0(DFPREG(rs1));
3669 gen_op_load_fpr_DT1(DFPREG(rs2));
3670 gen_helper_fmul8x16al();
3671 gen_op_store_DT0_fpr(DFPREG(rd));
3672 break;
3673 case 0x036: /* VIS I fmul8sux16 */
3674 CHECK_FPU_FEATURE(dc, VIS1);
3675 gen_op_load_fpr_DT0(DFPREG(rs1));
3676 gen_op_load_fpr_DT1(DFPREG(rs2));
3677 gen_helper_fmul8sux16();
3678 gen_op_store_DT0_fpr(DFPREG(rd));
3679 break;
3680 case 0x037: /* VIS I fmul8ulx16 */
3681 CHECK_FPU_FEATURE(dc, VIS1);
3682 gen_op_load_fpr_DT0(DFPREG(rs1));
3683 gen_op_load_fpr_DT1(DFPREG(rs2));
3684 gen_helper_fmul8ulx16();
3685 gen_op_store_DT0_fpr(DFPREG(rd));
3686 break;
3687 case 0x038: /* VIS I fmuld8sux16 */
3688 CHECK_FPU_FEATURE(dc, VIS1);
3689 gen_op_load_fpr_DT0(DFPREG(rs1));
3690 gen_op_load_fpr_DT1(DFPREG(rs2));
3691 gen_helper_fmuld8sux16();
3692 gen_op_store_DT0_fpr(DFPREG(rd));
3693 break;
3694 case 0x039: /* VIS I fmuld8ulx16 */
3695 CHECK_FPU_FEATURE(dc, VIS1);
3696 gen_op_load_fpr_DT0(DFPREG(rs1));
3697 gen_op_load_fpr_DT1(DFPREG(rs2));
3698 gen_helper_fmuld8ulx16();
3699 gen_op_store_DT0_fpr(DFPREG(rd));
3700 break;
3701 case 0x03a: /* VIS I fpack32 */
3702 case 0x03b: /* VIS I fpack16 */
3703 case 0x03d: /* VIS I fpackfix */
3704 case 0x03e: /* VIS I pdist */
3705 // XXX
3706 goto illegal_insn;
3707 case 0x048: /* VIS I faligndata */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 gen_helper_faligndata();
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3713 break;
3714 case 0x04b: /* VIS I fpmerge */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 gen_helper_fpmerge();
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3720 break;
3721 case 0x04c: /* VIS II bshuffle */
3722 // XXX
3723 goto illegal_insn;
3724 case 0x04d: /* VIS I fexpand */
3725 CHECK_FPU_FEATURE(dc, VIS1);
3726 gen_op_load_fpr_DT0(DFPREG(rs1));
3727 gen_op_load_fpr_DT1(DFPREG(rs2));
3728 gen_helper_fexpand();
3729 gen_op_store_DT0_fpr(DFPREG(rd));
3730 break;
3731 case 0x050: /* VIS I fpadd16 */
3732 CHECK_FPU_FEATURE(dc, VIS1);
3733 gen_op_load_fpr_DT0(DFPREG(rs1));
3734 gen_op_load_fpr_DT1(DFPREG(rs2));
3735 gen_helper_fpadd16();
3736 gen_op_store_DT0_fpr(DFPREG(rd));
3737 break;
3738 case 0x051: /* VIS I fpadd16s */
3739 CHECK_FPU_FEATURE(dc, VIS1);
3740 gen_helper_fpadd16s(cpu_fpr[rd],
3741 cpu_fpr[rs1], cpu_fpr[rs2]);
3742 break;
3743 case 0x052: /* VIS I fpadd32 */
3744 CHECK_FPU_FEATURE(dc, VIS1);
3745 gen_op_load_fpr_DT0(DFPREG(rs1));
3746 gen_op_load_fpr_DT1(DFPREG(rs2));
3747 gen_helper_fpadd32();
3748 gen_op_store_DT0_fpr(DFPREG(rd));
3749 break;
3750 case 0x053: /* VIS I fpadd32s */
3751 CHECK_FPU_FEATURE(dc, VIS1);
3752 gen_helper_fpadd32s(cpu_fpr[rd],
3753 cpu_fpr[rs1], cpu_fpr[rs2]);
3754 break;
3755 case 0x054: /* VIS I fpsub16 */
3756 CHECK_FPU_FEATURE(dc, VIS1);
3757 gen_op_load_fpr_DT0(DFPREG(rs1));
3758 gen_op_load_fpr_DT1(DFPREG(rs2));
3759 gen_helper_fpsub16();
3760 gen_op_store_DT0_fpr(DFPREG(rd));
3761 break;
3762 case 0x055: /* VIS I fpsub16s */
3763 CHECK_FPU_FEATURE(dc, VIS1);
3764 gen_helper_fpsub16s(cpu_fpr[rd],
3765 cpu_fpr[rs1], cpu_fpr[rs2]);
3766 break;
3767 case 0x056: /* VIS I fpsub32 */
3768 CHECK_FPU_FEATURE(dc, VIS1);
3769 gen_op_load_fpr_DT0(DFPREG(rs1));
3770 gen_op_load_fpr_DT1(DFPREG(rs2));
3771 gen_helper_fpsub32();
3772 gen_op_store_DT0_fpr(DFPREG(rd));
3773 break;
3774 case 0x057: /* VIS I fpsub32s */
3775 CHECK_FPU_FEATURE(dc, VIS1);
3776 gen_helper_fpsub32s(cpu_fpr[rd],
3777 cpu_fpr[rs1], cpu_fpr[rs2]);
3778 break;
3779 case 0x060: /* VIS I fzero */
3780 CHECK_FPU_FEATURE(dc, VIS1);
3781 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3782 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3783 break;
3784 case 0x061: /* VIS I fzeros */
3785 CHECK_FPU_FEATURE(dc, VIS1);
3786 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3787 break;
3788 case 0x062: /* VIS I fnor */
3789 CHECK_FPU_FEATURE(dc, VIS1);
3790 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3791 cpu_fpr[DFPREG(rs2)]);
3792 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3793 cpu_fpr[DFPREG(rs2) + 1]);
3794 break;
3795 case 0x063: /* VIS I fnors */
3796 CHECK_FPU_FEATURE(dc, VIS1);
3797 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3798 break;
3799 case 0x064: /* VIS I fandnot2 */
3800 CHECK_FPU_FEATURE(dc, VIS1);
3801 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3802 cpu_fpr[DFPREG(rs2)]);
3803 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3804 cpu_fpr[DFPREG(rs1) + 1],
3805 cpu_fpr[DFPREG(rs2) + 1]);
3806 break;
3807 case 0x065: /* VIS I fandnot2s */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3810 break;
3811 case 0x066: /* VIS I fnot2 */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3814 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3815 cpu_fpr[DFPREG(rs2) + 1]);
3816 break;
3817 case 0x067: /* VIS I fnot2s */
3818 CHECK_FPU_FEATURE(dc, VIS1);
3819 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3820 break;
3821 case 0x068: /* VIS I fandnot1 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3824 cpu_fpr[DFPREG(rs1)]);
3825 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3826 cpu_fpr[DFPREG(rs2) + 1],
3827 cpu_fpr[DFPREG(rs1) + 1]);
3828 break;
3829 case 0x069: /* VIS I fandnot1s */
3830 CHECK_FPU_FEATURE(dc, VIS1);
3831 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3832 break;
3833 case 0x06a: /* VIS I fnot1 */
3834 CHECK_FPU_FEATURE(dc, VIS1);
3835 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3836 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3837 cpu_fpr[DFPREG(rs1) + 1]);
3838 break;
3839 case 0x06b: /* VIS I fnot1s */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3842 break;
3843 case 0x06c: /* VIS I fxor */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3846 cpu_fpr[DFPREG(rs2)]);
3847 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3848 cpu_fpr[DFPREG(rs1) + 1],
3849 cpu_fpr[DFPREG(rs2) + 1]);
3850 break;
3851 case 0x06d: /* VIS I fxors */
3852 CHECK_FPU_FEATURE(dc, VIS1);
3853 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3854 break;
3855 case 0x06e: /* VIS I fnand */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3858 cpu_fpr[DFPREG(rs2)]);
3859 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3860 cpu_fpr[DFPREG(rs2) + 1]);
3861 break;
3862 case 0x06f: /* VIS I fnands */
3863 CHECK_FPU_FEATURE(dc, VIS1);
3864 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3865 break;
3866 case 0x070: /* VIS I fand */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3869 cpu_fpr[DFPREG(rs2)]);
3870 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3871 cpu_fpr[DFPREG(rs1) + 1],
3872 cpu_fpr[DFPREG(rs2) + 1]);
3873 break;
3874 case 0x071: /* VIS I fands */
3875 CHECK_FPU_FEATURE(dc, VIS1);
3876 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3877 break;
3878 case 0x072: /* VIS I fxnor */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3881 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3882 cpu_fpr[DFPREG(rs1)]);
3883 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3884 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3885 cpu_fpr[DFPREG(rs1) + 1]);
3886 break;
3887 case 0x073: /* VIS I fxnors */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3890 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3891 break;
3892 case 0x074: /* VIS I fsrc1 */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3895 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3896 cpu_fpr[DFPREG(rs1) + 1]);
3897 break;
3898 case 0x075: /* VIS I fsrc1s */
3899 CHECK_FPU_FEATURE(dc, VIS1);
3900 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3901 break;
3902 case 0x076: /* VIS I fornot2 */
3903 CHECK_FPU_FEATURE(dc, VIS1);
3904 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3905 cpu_fpr[DFPREG(rs2)]);
3906 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3907 cpu_fpr[DFPREG(rs1) + 1],
3908 cpu_fpr[DFPREG(rs2) + 1]);
3909 break;
3910 case 0x077: /* VIS I fornot2s */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3913 break;
3914 case 0x078: /* VIS I fsrc2 */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 gen_op_load_fpr_DT0(DFPREG(rs2));
3917 gen_op_store_DT0_fpr(DFPREG(rd));
3918 break;
3919 case 0x079: /* VIS I fsrc2s */
3920 CHECK_FPU_FEATURE(dc, VIS1);
3921 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3922 break;
3923 case 0x07a: /* VIS I fornot1 */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3926 cpu_fpr[DFPREG(rs1)]);
3927 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3928 cpu_fpr[DFPREG(rs2) + 1],
3929 cpu_fpr[DFPREG(rs1) + 1]);
3930 break;
3931 case 0x07b: /* VIS I fornot1s */
3932 CHECK_FPU_FEATURE(dc, VIS1);
3933 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3934 break;
3935 case 0x07c: /* VIS I for */
3936 CHECK_FPU_FEATURE(dc, VIS1);
3937 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3938 cpu_fpr[DFPREG(rs2)]);
3939 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3940 cpu_fpr[DFPREG(rs1) + 1],
3941 cpu_fpr[DFPREG(rs2) + 1]);
3942 break;
3943 case 0x07d: /* VIS I fors */
3944 CHECK_FPU_FEATURE(dc, VIS1);
3945 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3946 break;
3947 case 0x07e: /* VIS I fone */
3948 CHECK_FPU_FEATURE(dc, VIS1);
3949 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3950 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3951 break;
3952 case 0x07f: /* VIS I fones */
3953 CHECK_FPU_FEATURE(dc, VIS1);
3954 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3955 break;
3956 case 0x080: /* VIS I shutdown */
3957 case 0x081: /* VIS II siam */
3958 // XXX
3959 goto illegal_insn;
3960 default:
3961 goto illegal_insn;
3963 #else
3964 goto ncp_insn;
3965 #endif
3966 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3967 #ifdef TARGET_SPARC64
3968 goto illegal_insn;
3969 #else
3970 goto ncp_insn;
3971 #endif
3972 #ifdef TARGET_SPARC64
3973 } else if (xop == 0x39) { /* V9 return */
3974 TCGv_i32 r_const;
3976 save_state(dc, cpu_cond);
3977 cpu_src1 = get_src1(insn, cpu_src1);
3978 if (IS_IMM) { /* immediate */
3979 simm = GET_FIELDs(insn, 19, 31);
3980 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3981 } else { /* register */
3982 rs2 = GET_FIELD(insn, 27, 31);
3983 if (rs2) {
3984 gen_movl_reg_TN(rs2, cpu_src2);
3985 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3986 } else
3987 tcg_gen_mov_tl(cpu_dst, cpu_src1);
3989 gen_helper_restore();
3990 gen_mov_pc_npc(dc, cpu_cond);
3991 r_const = tcg_const_i32(3);
3992 gen_helper_check_align(cpu_dst, r_const);
3993 tcg_temp_free_i32(r_const);
3994 tcg_gen_mov_tl(cpu_npc, cpu_dst);
3995 dc->npc = DYNAMIC_PC;
3996 goto jmp_insn;
3997 #endif
3998 } else {
3999 cpu_src1 = get_src1(insn, cpu_src1);
4000 if (IS_IMM) { /* immediate */
4001 simm = GET_FIELDs(insn, 19, 31);
4002 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4003 } else { /* register */
4004 rs2 = GET_FIELD(insn, 27, 31);
4005 if (rs2) {
4006 gen_movl_reg_TN(rs2, cpu_src2);
4007 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4008 } else
4009 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4011 switch (xop) {
4012 case 0x38: /* jmpl */
4014 TCGv r_pc;
4015 TCGv_i32 r_const;
4017 r_pc = tcg_const_tl(dc->pc);
4018 gen_movl_TN_reg(rd, r_pc);
4019 tcg_temp_free(r_pc);
4020 gen_mov_pc_npc(dc, cpu_cond);
4021 r_const = tcg_const_i32(3);
4022 gen_helper_check_align(cpu_dst, r_const);
4023 tcg_temp_free_i32(r_const);
4024 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4025 dc->npc = DYNAMIC_PC;
4027 goto jmp_insn;
4028 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4029 case 0x39: /* rett, V9 return */
4031 TCGv_i32 r_const;
4033 if (!supervisor(dc))
4034 goto priv_insn;
4035 gen_mov_pc_npc(dc, cpu_cond);
4036 r_const = tcg_const_i32(3);
4037 gen_helper_check_align(cpu_dst, r_const);
4038 tcg_temp_free_i32(r_const);
4039 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4040 dc->npc = DYNAMIC_PC;
4041 gen_helper_rett();
4043 goto jmp_insn;
4044 #endif
4045 case 0x3b: /* flush */
4046 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4047 goto unimp_flush;
4048 gen_helper_flush(cpu_dst);
4049 break;
4050 case 0x3c: /* save */
4051 save_state(dc, cpu_cond);
4052 gen_helper_save();
4053 gen_movl_TN_reg(rd, cpu_dst);
4054 break;
4055 case 0x3d: /* restore */
4056 save_state(dc, cpu_cond);
4057 gen_helper_restore();
4058 gen_movl_TN_reg(rd, cpu_dst);
4059 break;
4060 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4061 case 0x3e: /* V9 done/retry */
4063 switch (rd) {
4064 case 0:
4065 if (!supervisor(dc))
4066 goto priv_insn;
4067 dc->npc = DYNAMIC_PC;
4068 dc->pc = DYNAMIC_PC;
4069 gen_helper_done();
4070 goto jmp_insn;
4071 case 1:
4072 if (!supervisor(dc))
4073 goto priv_insn;
4074 dc->npc = DYNAMIC_PC;
4075 dc->pc = DYNAMIC_PC;
4076 gen_helper_retry();
4077 goto jmp_insn;
4078 default:
4079 goto illegal_insn;
4082 break;
4083 #endif
4084 default:
4085 goto illegal_insn;
4088 break;
4090 break;
4091 case 3: /* load/store instructions */
4093 unsigned int xop = GET_FIELD(insn, 7, 12);
4095 cpu_src1 = get_src1(insn, cpu_src1);
4096 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4097 rs2 = GET_FIELD(insn, 27, 31);
4098 gen_movl_reg_TN(rs2, cpu_src2);
4099 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4100 } else if (IS_IMM) { /* immediate */
4101 simm = GET_FIELDs(insn, 19, 31);
4102 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4103 } else { /* register */
4104 rs2 = GET_FIELD(insn, 27, 31);
4105 if (rs2 != 0) {
4106 gen_movl_reg_TN(rs2, cpu_src2);
4107 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4108 } else
4109 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4111 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4112 (xop > 0x17 && xop <= 0x1d ) ||
4113 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4114 switch (xop) {
4115 case 0x0: /* ld, V9 lduw, load unsigned word */
4116 gen_address_mask(dc, cpu_addr);
4117 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4118 break;
4119 case 0x1: /* ldub, load unsigned byte */
4120 gen_address_mask(dc, cpu_addr);
4121 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4122 break;
4123 case 0x2: /* lduh, load unsigned halfword */
4124 gen_address_mask(dc, cpu_addr);
4125 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4126 break;
4127 case 0x3: /* ldd, load double word */
4128 if (rd & 1)
4129 goto illegal_insn;
4130 else {
4131 TCGv_i32 r_const;
4133 save_state(dc, cpu_cond);
4134 r_const = tcg_const_i32(7);
4135 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4136 tcg_temp_free_i32(r_const);
4137 gen_address_mask(dc, cpu_addr);
4138 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4139 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4140 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4141 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4142 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4143 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4144 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4146 break;
4147 case 0x9: /* ldsb, load signed byte */
4148 gen_address_mask(dc, cpu_addr);
4149 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4150 break;
4151 case 0xa: /* ldsh, load signed halfword */
4152 gen_address_mask(dc, cpu_addr);
4153 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4154 break;
4155 case 0xd: /* ldstub -- XXX: should be atomically */
4157 TCGv r_const;
4159 gen_address_mask(dc, cpu_addr);
4160 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4161 r_const = tcg_const_tl(0xff);
4162 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4163 tcg_temp_free(r_const);
4165 break;
4166 case 0x0f: /* swap, swap register with memory. Also
4167 atomically */
4168 CHECK_IU_FEATURE(dc, SWAP);
4169 gen_movl_reg_TN(rd, cpu_val);
4170 gen_address_mask(dc, cpu_addr);
4171 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4172 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4173 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4174 break;
4175 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4176 case 0x10: /* lda, V9 lduwa, load word alternate */
4177 #ifndef TARGET_SPARC64
4178 if (IS_IMM)
4179 goto illegal_insn;
4180 if (!supervisor(dc))
4181 goto priv_insn;
4182 #endif
4183 save_state(dc, cpu_cond);
4184 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4185 break;
4186 case 0x11: /* lduba, load unsigned byte alternate */
4187 #ifndef TARGET_SPARC64
4188 if (IS_IMM)
4189 goto illegal_insn;
4190 if (!supervisor(dc))
4191 goto priv_insn;
4192 #endif
4193 save_state(dc, cpu_cond);
4194 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4195 break;
4196 case 0x12: /* lduha, load unsigned halfword alternate */
4197 #ifndef TARGET_SPARC64
4198 if (IS_IMM)
4199 goto illegal_insn;
4200 if (!supervisor(dc))
4201 goto priv_insn;
4202 #endif
4203 save_state(dc, cpu_cond);
4204 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4205 break;
4206 case 0x13: /* ldda, load double word alternate */
4207 #ifndef TARGET_SPARC64
4208 if (IS_IMM)
4209 goto illegal_insn;
4210 if (!supervisor(dc))
4211 goto priv_insn;
4212 #endif
4213 if (rd & 1)
4214 goto illegal_insn;
4215 save_state(dc, cpu_cond);
4216 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4217 goto skip_move;
4218 case 0x19: /* ldsba, load signed byte alternate */
4219 #ifndef TARGET_SPARC64
4220 if (IS_IMM)
4221 goto illegal_insn;
4222 if (!supervisor(dc))
4223 goto priv_insn;
4224 #endif
4225 save_state(dc, cpu_cond);
4226 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4227 break;
4228 case 0x1a: /* ldsha, load signed halfword alternate */
4229 #ifndef TARGET_SPARC64
4230 if (IS_IMM)
4231 goto illegal_insn;
4232 if (!supervisor(dc))
4233 goto priv_insn;
4234 #endif
4235 save_state(dc, cpu_cond);
4236 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4237 break;
4238 case 0x1d: /* ldstuba -- XXX: should be atomically */
4239 #ifndef TARGET_SPARC64
4240 if (IS_IMM)
4241 goto illegal_insn;
4242 if (!supervisor(dc))
4243 goto priv_insn;
4244 #endif
4245 save_state(dc, cpu_cond);
4246 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4247 break;
4248 case 0x1f: /* swapa, swap reg with alt. memory. Also
4249 atomically */
4250 CHECK_IU_FEATURE(dc, SWAP);
4251 #ifndef TARGET_SPARC64
4252 if (IS_IMM)
4253 goto illegal_insn;
4254 if (!supervisor(dc))
4255 goto priv_insn;
4256 #endif
4257 save_state(dc, cpu_cond);
4258 gen_movl_reg_TN(rd, cpu_val);
4259 gen_swap_asi(cpu_val, cpu_addr, insn);
4260 break;
4262 #ifndef TARGET_SPARC64
4263 case 0x30: /* ldc */
4264 case 0x31: /* ldcsr */
4265 case 0x33: /* lddc */
4266 goto ncp_insn;
4267 #endif
4268 #endif
4269 #ifdef TARGET_SPARC64
4270 case 0x08: /* V9 ldsw */
4271 gen_address_mask(dc, cpu_addr);
4272 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4273 break;
4274 case 0x0b: /* V9 ldx */
4275 gen_address_mask(dc, cpu_addr);
4276 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4277 break;
4278 case 0x18: /* V9 ldswa */
4279 save_state(dc, cpu_cond);
4280 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4281 break;
4282 case 0x1b: /* V9 ldxa */
4283 save_state(dc, cpu_cond);
4284 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4285 break;
4286 case 0x2d: /* V9 prefetch, no effect */
4287 goto skip_move;
4288 case 0x30: /* V9 ldfa */
4289 save_state(dc, cpu_cond);
4290 gen_ldf_asi(cpu_addr, insn, 4, rd);
4291 goto skip_move;
4292 case 0x33: /* V9 lddfa */
4293 save_state(dc, cpu_cond);
4294 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4295 goto skip_move;
4296 case 0x3d: /* V9 prefetcha, no effect */
4297 goto skip_move;
4298 case 0x32: /* V9 ldqfa */
4299 CHECK_FPU_FEATURE(dc, FLOAT128);
4300 save_state(dc, cpu_cond);
4301 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4302 goto skip_move;
4303 #endif
4304 default:
4305 goto illegal_insn;
4307 gen_movl_TN_reg(rd, cpu_val);
4308 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4309 skip_move: ;
4310 #endif
4311 } else if (xop >= 0x20 && xop < 0x24) {
4312 if (gen_trap_ifnofpu(dc, cpu_cond))
4313 goto jmp_insn;
4314 save_state(dc, cpu_cond);
4315 switch (xop) {
4316 case 0x20: /* ldf, load fpreg */
4317 gen_address_mask(dc, cpu_addr);
4318 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4319 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4320 break;
4321 case 0x21: /* ldfsr, V9 ldxfsr */
4322 #ifdef TARGET_SPARC64
4323 gen_address_mask(dc, cpu_addr);
4324 if (rd == 1) {
4325 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4326 gen_helper_ldxfsr(cpu_tmp64);
4327 } else
4328 #else
4330 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4331 gen_helper_ldfsr(cpu_tmp32);
4333 #endif
4334 break;
4335 case 0x22: /* ldqf, load quad fpreg */
4337 TCGv_i32 r_const;
4339 CHECK_FPU_FEATURE(dc, FLOAT128);
4340 r_const = tcg_const_i32(dc->mem_idx);
4341 gen_helper_ldqf(cpu_addr, r_const);
4342 tcg_temp_free_i32(r_const);
4343 gen_op_store_QT0_fpr(QFPREG(rd));
4345 break;
4346 case 0x23: /* lddf, load double fpreg */
4348 TCGv_i32 r_const;
4350 r_const = tcg_const_i32(dc->mem_idx);
4351 gen_helper_lddf(cpu_addr, r_const);
4352 tcg_temp_free_i32(r_const);
4353 gen_op_store_DT0_fpr(DFPREG(rd));
4355 break;
4356 default:
4357 goto illegal_insn;
4359 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4360 xop == 0xe || xop == 0x1e) {
4361 gen_movl_reg_TN(rd, cpu_val);
4362 switch (xop) {
4363 case 0x4: /* st, store word */
4364 gen_address_mask(dc, cpu_addr);
4365 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4366 break;
4367 case 0x5: /* stb, store byte */
4368 gen_address_mask(dc, cpu_addr);
4369 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4370 break;
4371 case 0x6: /* sth, store halfword */
4372 gen_address_mask(dc, cpu_addr);
4373 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4374 break;
4375 case 0x7: /* std, store double word */
4376 if (rd & 1)
4377 goto illegal_insn;
4378 else {
4379 TCGv_i32 r_const;
4381 save_state(dc, cpu_cond);
4382 gen_address_mask(dc, cpu_addr);
4383 r_const = tcg_const_i32(7);
4384 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4385 tcg_temp_free_i32(r_const);
4386 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4387 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4388 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4390 break;
4391 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4392 case 0x14: /* sta, V9 stwa, store word alternate */
4393 #ifndef TARGET_SPARC64
4394 if (IS_IMM)
4395 goto illegal_insn;
4396 if (!supervisor(dc))
4397 goto priv_insn;
4398 #endif
4399 save_state(dc, cpu_cond);
4400 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4401 break;
4402 case 0x15: /* stba, store byte alternate */
4403 #ifndef TARGET_SPARC64
4404 if (IS_IMM)
4405 goto illegal_insn;
4406 if (!supervisor(dc))
4407 goto priv_insn;
4408 #endif
4409 save_state(dc, cpu_cond);
4410 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4411 break;
4412 case 0x16: /* stha, store halfword alternate */
4413 #ifndef TARGET_SPARC64
4414 if (IS_IMM)
4415 goto illegal_insn;
4416 if (!supervisor(dc))
4417 goto priv_insn;
4418 #endif
4419 save_state(dc, cpu_cond);
4420 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4421 break;
4422 case 0x17: /* stda, store double word alternate */
4423 #ifndef TARGET_SPARC64
4424 if (IS_IMM)
4425 goto illegal_insn;
4426 if (!supervisor(dc))
4427 goto priv_insn;
4428 #endif
4429 if (rd & 1)
4430 goto illegal_insn;
4431 else {
4432 save_state(dc, cpu_cond);
4433 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4435 break;
4436 #endif
4437 #ifdef TARGET_SPARC64
4438 case 0x0e: /* V9 stx */
4439 gen_address_mask(dc, cpu_addr);
4440 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4441 break;
4442 case 0x1e: /* V9 stxa */
4443 save_state(dc, cpu_cond);
4444 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4445 break;
4446 #endif
4447 default:
4448 goto illegal_insn;
4450 } else if (xop > 0x23 && xop < 0x28) {
4451 if (gen_trap_ifnofpu(dc, cpu_cond))
4452 goto jmp_insn;
4453 save_state(dc, cpu_cond);
4454 switch (xop) {
4455 case 0x24: /* stf, store fpreg */
4456 gen_address_mask(dc, cpu_addr);
4457 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4458 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4459 break;
4460 case 0x25: /* stfsr, V9 stxfsr */
4461 #ifdef TARGET_SPARC64
4462 gen_address_mask(dc, cpu_addr);
4463 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4464 if (rd == 1)
4465 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4466 else
4467 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4468 #else
4469 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4470 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4471 #endif
4472 break;
4473 case 0x26:
4474 #ifdef TARGET_SPARC64
4475 /* V9 stqf, store quad fpreg */
4477 TCGv_i32 r_const;
4479 CHECK_FPU_FEATURE(dc, FLOAT128);
4480 gen_op_load_fpr_QT0(QFPREG(rd));
4481 r_const = tcg_const_i32(dc->mem_idx);
4482 gen_helper_stqf(cpu_addr, r_const);
4483 tcg_temp_free_i32(r_const);
4485 break;
4486 #else /* !TARGET_SPARC64 */
4487 /* stdfq, store floating point queue */
4488 #if defined(CONFIG_USER_ONLY)
4489 goto illegal_insn;
4490 #else
4491 if (!supervisor(dc))
4492 goto priv_insn;
4493 if (gen_trap_ifnofpu(dc, cpu_cond))
4494 goto jmp_insn;
4495 goto nfq_insn;
4496 #endif
4497 #endif
4498 case 0x27: /* stdf, store double fpreg */
4500 TCGv_i32 r_const;
4502 gen_op_load_fpr_DT0(DFPREG(rd));
4503 r_const = tcg_const_i32(dc->mem_idx);
4504 gen_helper_stdf(cpu_addr, r_const);
4505 tcg_temp_free_i32(r_const);
4507 break;
4508 default:
4509 goto illegal_insn;
4511 } else if (xop > 0x33 && xop < 0x3f) {
4512 save_state(dc, cpu_cond);
4513 switch (xop) {
4514 #ifdef TARGET_SPARC64
4515 case 0x34: /* V9 stfa */
4516 gen_stf_asi(cpu_addr, insn, 4, rd);
4517 break;
4518 case 0x36: /* V9 stqfa */
4520 TCGv_i32 r_const;
4522 CHECK_FPU_FEATURE(dc, FLOAT128);
4523 r_const = tcg_const_i32(7);
4524 gen_helper_check_align(cpu_addr, r_const);
4525 tcg_temp_free_i32(r_const);
4526 gen_op_load_fpr_QT0(QFPREG(rd));
4527 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4529 break;
4530 case 0x37: /* V9 stdfa */
4531 gen_op_load_fpr_DT0(DFPREG(rd));
4532 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4533 break;
4534 case 0x3c: /* V9 casa */
4535 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4536 gen_movl_TN_reg(rd, cpu_val);
4537 break;
4538 case 0x3e: /* V9 casxa */
4539 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4540 gen_movl_TN_reg(rd, cpu_val);
4541 break;
4542 #else
4543 case 0x34: /* stc */
4544 case 0x35: /* stcsr */
4545 case 0x36: /* stdcq */
4546 case 0x37: /* stdc */
4547 goto ncp_insn;
4548 #endif
4549 default:
4550 goto illegal_insn;
4552 } else
4553 goto illegal_insn;
4555 break;
4557 /* default case for non jump instructions */
4558 if (dc->npc == DYNAMIC_PC) {
4559 dc->pc = DYNAMIC_PC;
4560 gen_op_next_insn();
4561 } else if (dc->npc == JUMP_PC) {
4562 /* we can do a static jump */
4563 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4564 dc->is_br = 1;
4565 } else {
4566 dc->pc = dc->npc;
4567 dc->npc = dc->npc + 4;
4569 jmp_insn:
4570 return;
4571 illegal_insn:
4573 TCGv_i32 r_const;
4575 save_state(dc, cpu_cond);
4576 r_const = tcg_const_i32(TT_ILL_INSN);
4577 gen_helper_raise_exception(r_const);
4578 tcg_temp_free_i32(r_const);
4579 dc->is_br = 1;
4581 return;
4582 unimp_flush:
4584 TCGv_i32 r_const;
4586 save_state(dc, cpu_cond);
4587 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4588 gen_helper_raise_exception(r_const);
4589 tcg_temp_free_i32(r_const);
4590 dc->is_br = 1;
4592 return;
4593 #if !defined(CONFIG_USER_ONLY)
4594 priv_insn:
4596 TCGv_i32 r_const;
4598 save_state(dc, cpu_cond);
4599 r_const = tcg_const_i32(TT_PRIV_INSN);
4600 gen_helper_raise_exception(r_const);
4601 tcg_temp_free_i32(r_const);
4602 dc->is_br = 1;
4604 return;
4605 #endif
4606 nfpu_insn:
4607 save_state(dc, cpu_cond);
4608 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4609 dc->is_br = 1;
4610 return;
4611 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4612 nfq_insn:
4613 save_state(dc, cpu_cond);
4614 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4615 dc->is_br = 1;
4616 return;
4617 #endif
4618 #ifndef TARGET_SPARC64
4619 ncp_insn:
4621 TCGv r_const;
4623 save_state(dc, cpu_cond);
4624 r_const = tcg_const_i32(TT_NCP_INSN);
4625 gen_helper_raise_exception(r_const);
4626 tcg_temp_free(r_const);
4627 dc->is_br = 1;
4629 return;
4630 #endif
4633 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4634 int spc, CPUSPARCState *env)
4636 target_ulong pc_start, last_pc;
4637 uint16_t *gen_opc_end;
4638 DisasContext dc1, *dc = &dc1;
4639 CPUBreakpoint *bp;
4640 int j, lj = -1;
4641 int num_insns;
4642 int max_insns;
4644 memset(dc, 0, sizeof(DisasContext));
4645 dc->tb = tb;
4646 pc_start = tb->pc;
4647 dc->pc = pc_start;
4648 last_pc = dc->pc;
4649 dc->npc = (target_ulong) tb->cs_base;
4650 dc->cc_op = CC_OP_DYNAMIC;
4651 dc->mem_idx = cpu_mmu_index(env);
4652 dc->def = env->def;
4653 if ((dc->def->features & CPU_FEATURE_FLOAT))
4654 dc->fpu_enabled = cpu_fpu_enabled(env);
4655 else
4656 dc->fpu_enabled = 0;
4657 #ifdef TARGET_SPARC64
4658 dc->address_mask_32bit = env->pstate & PS_AM;
4659 #endif
4660 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4662 cpu_tmp0 = tcg_temp_new();
4663 cpu_tmp32 = tcg_temp_new_i32();
4664 cpu_tmp64 = tcg_temp_new_i64();
4666 cpu_dst = tcg_temp_local_new();
4668 // loads and stores
4669 cpu_val = tcg_temp_local_new();
4670 cpu_addr = tcg_temp_local_new();
4672 num_insns = 0;
4673 max_insns = tb->cflags & CF_COUNT_MASK;
4674 if (max_insns == 0)
4675 max_insns = CF_COUNT_MASK;
4676 gen_icount_start();
4677 do {
4678 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4679 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4680 if (bp->pc == dc->pc) {
4681 if (dc->pc != pc_start)
4682 save_state(dc, cpu_cond);
4683 gen_helper_debug();
4684 tcg_gen_exit_tb(0);
4685 dc->is_br = 1;
4686 goto exit_gen_loop;
4690 if (spc) {
4691 qemu_log("Search PC...\n");
4692 j = gen_opc_ptr - gen_opc_buf;
4693 if (lj < j) {
4694 lj++;
4695 while (lj < j)
4696 gen_opc_instr_start[lj++] = 0;
4697 gen_opc_pc[lj] = dc->pc;
4698 gen_opc_npc[lj] = dc->npc;
4699 gen_opc_instr_start[lj] = 1;
4700 gen_opc_icount[lj] = num_insns;
4703 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4704 gen_io_start();
4705 last_pc = dc->pc;
4706 disas_sparc_insn(dc);
4707 num_insns++;
4709 if (dc->is_br)
4710 break;
4711 /* if the next PC is different, we abort now */
4712 if (dc->pc != (last_pc + 4))
4713 break;
4714 /* if we reach a page boundary, we stop generation so that the
4715 PC of a TT_TFAULT exception is always in the right page */
4716 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4717 break;
4718 /* if single step mode, we generate only one instruction and
4719 generate an exception */
4720 if (env->singlestep_enabled || singlestep) {
4721 tcg_gen_movi_tl(cpu_pc, dc->pc);
4722 tcg_gen_exit_tb(0);
4723 break;
4725 } while ((gen_opc_ptr < gen_opc_end) &&
4726 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4727 num_insns < max_insns);
4729 exit_gen_loop:
4730 tcg_temp_free(cpu_addr);
4731 tcg_temp_free(cpu_val);
4732 tcg_temp_free(cpu_dst);
4733 tcg_temp_free_i64(cpu_tmp64);
4734 tcg_temp_free_i32(cpu_tmp32);
4735 tcg_temp_free(cpu_tmp0);
4736 if (tb->cflags & CF_LAST_IO)
4737 gen_io_end();
4738 if (!dc->is_br) {
4739 if (dc->pc != DYNAMIC_PC &&
4740 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4741 /* static PC and NPC: we can use direct chaining */
4742 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4743 } else {
4744 if (dc->pc != DYNAMIC_PC)
4745 tcg_gen_movi_tl(cpu_pc, dc->pc);
4746 save_npc(dc, cpu_cond);
4747 tcg_gen_exit_tb(0);
4750 gen_icount_end(tb, num_insns);
4751 *gen_opc_ptr = INDEX_op_end;
4752 if (spc) {
4753 j = gen_opc_ptr - gen_opc_buf;
4754 lj++;
4755 while (lj <= j)
4756 gen_opc_instr_start[lj++] = 0;
4757 #if 0
4758 log_page_dump();
4759 #endif
4760 gen_opc_jump_pc[0] = dc->jump_pc[0];
4761 gen_opc_jump_pc[1] = dc->jump_pc[1];
4762 } else {
4763 tb->size = last_pc + 4 - pc_start;
4764 tb->icount = num_insns;
4766 #ifdef DEBUG_DISAS
4767 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4768 qemu_log("--------------\n");
4769 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4770 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4771 qemu_log("\n");
4773 #endif
4776 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4778 gen_intermediate_code_internal(tb, 0, env);
4781 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4783 gen_intermediate_code_internal(tb, 1, env);
4786 void gen_intermediate_code_init(CPUSPARCState *env)
4788 unsigned int i;
4789 static int inited;
4790 static const char * const gregnames[8] = {
4791 NULL, // g0 not used
4792 "g1",
4793 "g2",
4794 "g3",
4795 "g4",
4796 "g5",
4797 "g6",
4798 "g7",
4800 static const char * const fregnames[64] = {
4801 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4802 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4803 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4804 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4805 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4806 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4807 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4808 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4811 /* init various static tables */
4812 if (!inited) {
4813 inited = 1;
4815 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4816 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4817 offsetof(CPUState, regwptr),
4818 "regwptr");
4819 #ifdef TARGET_SPARC64
4820 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4821 "xcc");
4822 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4823 "asi");
4824 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4825 "fprs");
4826 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4827 "gsr");
4828 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4829 offsetof(CPUState, tick_cmpr),
4830 "tick_cmpr");
4831 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4832 offsetof(CPUState, stick_cmpr),
4833 "stick_cmpr");
4834 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4835 offsetof(CPUState, hstick_cmpr),
4836 "hstick_cmpr");
4837 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4838 "hintp");
4839 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4840 "htba");
4841 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4842 "hver");
4843 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4844 offsetof(CPUState, ssr), "ssr");
4845 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4846 offsetof(CPUState, version), "ver");
4847 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4848 offsetof(CPUState, softint),
4849 "softint");
4850 #else
4851 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4852 "wim");
4853 #endif
4854 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4855 "cond");
4856 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4857 "cc_src");
4858 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4859 offsetof(CPUState, cc_src2),
4860 "cc_src2");
4861 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4862 "cc_dst");
4863 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4864 "cc_op");
4865 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4866 "psr");
4867 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4868 "fsr");
4869 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4870 "pc");
4871 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4872 "npc");
4873 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4874 #ifndef CONFIG_USER_ONLY
4875 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4876 "tbr");
4877 #endif
4878 for (i = 1; i < 8; i++)
4879 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4880 offsetof(CPUState, gregs[i]),
4881 gregnames[i]);
4882 for (i = 0; i < TARGET_FPREGS; i++)
4883 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4884 offsetof(CPUState, fpr[i]),
4885 fregnames[i]);
4887 /* register helpers */
4889 #define GEN_HELPER 2
4890 #include "helper.h"
4894 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4895 unsigned long searched_pc, int pc_pos, void *puc)
4897 target_ulong npc;
4898 env->pc = gen_opc_pc[pc_pos];
4899 npc = gen_opc_npc[pc_pos];
4900 if (npc == 1) {
4901 /* dynamic NPC: already stored */
4902 } else if (npc == 2) {
4903 target_ulong t2 = (target_ulong)(unsigned long)puc;
4904 /* jump PC: use T2 and the jump targets of the translation */
4905 if (t2)
4906 env->npc = gen_opc_jump_pc[0];
4907 else
4908 env->npc = gen_opc_jump_pc[1];
4909 } else {
4910 env->npc = npc;