vga roms: move loading from pc.c to vga drivers.
[qemu/scottt.git] / target-sparc / translate.c
blobbf6df50c8d81426642be4fd3a1276a7c83ecca41
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
279 TCGv r_temp;
280 TCGv_i32 r_const;
281 int l1;
283 l1 = gen_new_label();
285 r_temp = tcg_temp_new();
286 tcg_gen_xor_tl(r_temp, src1, src2);
287 tcg_gen_not_tl(r_temp, r_temp);
288 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
289 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
290 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
291 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
292 r_const = tcg_const_i32(TT_TOVF);
293 gen_helper_raise_exception(r_const);
294 tcg_temp_free_i32(r_const);
295 gen_set_label(l1);
296 tcg_temp_free(r_temp);
299 static inline void gen_tag_tv(TCGv src1, TCGv src2)
301 int l1;
302 TCGv_i32 r_const;
304 l1 = gen_new_label();
305 tcg_gen_or_tl(cpu_tmp0, src1, src2);
306 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
307 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
308 r_const = tcg_const_i32(TT_TOVF);
309 gen_helper_raise_exception(r_const);
310 tcg_temp_free_i32(r_const);
311 gen_set_label(l1);
314 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
316 tcg_gen_mov_tl(cpu_cc_src, src1);
317 tcg_gen_movi_tl(cpu_cc_src2, src2);
318 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
319 tcg_gen_mov_tl(dst, cpu_cc_dst);
322 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
324 tcg_gen_mov_tl(cpu_cc_src, src1);
325 tcg_gen_mov_tl(cpu_cc_src2, src2);
326 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
327 tcg_gen_mov_tl(dst, cpu_cc_dst);
330 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
332 tcg_gen_mov_tl(cpu_cc_src, src1);
333 tcg_gen_movi_tl(cpu_cc_src2, src2);
334 gen_mov_reg_C(cpu_tmp0, cpu_psr);
335 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
336 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
337 tcg_gen_mov_tl(dst, cpu_cc_dst);
340 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
342 tcg_gen_mov_tl(cpu_cc_src, src1);
343 tcg_gen_mov_tl(cpu_cc_src2, src2);
344 gen_mov_reg_C(cpu_tmp0, cpu_psr);
345 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
346 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
347 tcg_gen_mov_tl(dst, cpu_cc_dst);
350 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
352 tcg_gen_mov_tl(cpu_cc_src, src1);
353 tcg_gen_mov_tl(cpu_cc_src2, src2);
354 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
355 tcg_gen_mov_tl(dst, cpu_cc_dst);
358 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
360 tcg_gen_mov_tl(cpu_cc_src, src1);
361 tcg_gen_mov_tl(cpu_cc_src2, src2);
362 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
363 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
364 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365 tcg_gen_mov_tl(dst, cpu_cc_dst);
368 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
370 TCGv r_temp;
371 TCGv_i32 r_const;
372 int l1;
374 l1 = gen_new_label();
376 r_temp = tcg_temp_new();
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
379 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
380 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
381 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
382 r_const = tcg_const_i32(TT_TOVF);
383 gen_helper_raise_exception(r_const);
384 tcg_temp_free_i32(r_const);
385 gen_set_label(l1);
386 tcg_temp_free(r_temp);
389 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
391 tcg_gen_mov_tl(cpu_cc_src, src1);
392 tcg_gen_movi_tl(cpu_cc_src2, src2);
393 if (src2 == 0) {
394 tcg_gen_mov_tl(cpu_cc_dst, src1);
395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
396 dc->cc_op = CC_OP_LOGIC;
397 } else {
398 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
399 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
400 dc->cc_op = CC_OP_SUB;
402 tcg_gen_mov_tl(dst, cpu_cc_dst);
405 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
407 tcg_gen_mov_tl(cpu_cc_src, src1);
408 tcg_gen_mov_tl(cpu_cc_src2, src2);
409 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
410 tcg_gen_mov_tl(dst, cpu_cc_dst);
413 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
415 tcg_gen_mov_tl(cpu_cc_src, src1);
416 tcg_gen_movi_tl(cpu_cc_src2, src2);
417 gen_mov_reg_C(cpu_tmp0, cpu_psr);
418 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
419 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
420 tcg_gen_mov_tl(dst, cpu_cc_dst);
423 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
425 tcg_gen_mov_tl(cpu_cc_src, src1);
426 tcg_gen_mov_tl(cpu_cc_src2, src2);
427 gen_mov_reg_C(cpu_tmp0, cpu_psr);
428 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
429 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
430 tcg_gen_mov_tl(dst, cpu_cc_dst);
433 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
435 tcg_gen_mov_tl(cpu_cc_src, src1);
436 tcg_gen_mov_tl(cpu_cc_src2, src2);
437 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
438 tcg_gen_mov_tl(dst, cpu_cc_dst);
441 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
443 tcg_gen_mov_tl(cpu_cc_src, src1);
444 tcg_gen_mov_tl(cpu_cc_src2, src2);
445 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
446 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
447 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 tcg_gen_mov_tl(dst, cpu_cc_dst);
451 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
453 TCGv r_temp;
454 int l1;
456 l1 = gen_new_label();
457 r_temp = tcg_temp_new();
459 /* old op:
460 if (!(env->y & 1))
461 T1 = 0;
463 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
464 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
465 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
466 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
467 tcg_gen_movi_tl(cpu_cc_src2, 0);
468 gen_set_label(l1);
470 // b2 = T0 & 1;
471 // env->y = (b2 << 31) | (env->y >> 1);
472 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
473 tcg_gen_shli_tl(r_temp, r_temp, 31);
474 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
475 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
476 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
477 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
479 // b1 = N ^ V;
480 gen_mov_reg_N(cpu_tmp0, cpu_psr);
481 gen_mov_reg_V(r_temp, cpu_psr);
482 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
483 tcg_temp_free(r_temp);
485 // T0 = (b1 << 31) | (T0 >> 1);
486 // src1 = T0;
487 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
488 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
489 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
491 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
498 TCGv_i64 r_temp, r_temp2;
500 r_temp = tcg_temp_new_i64();
501 r_temp2 = tcg_temp_new_i64();
503 tcg_gen_extu_tl_i64(r_temp, src2);
504 tcg_gen_extu_tl_i64(r_temp2, src1);
505 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
507 tcg_gen_shri_i64(r_temp, r_temp2, 32);
508 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
509 tcg_temp_free_i64(r_temp);
510 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
511 #ifdef TARGET_SPARC64
512 tcg_gen_mov_i64(dst, r_temp2);
513 #else
514 tcg_gen_trunc_i64_tl(dst, r_temp2);
515 #endif
516 tcg_temp_free_i64(r_temp2);
519 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
521 TCGv_i64 r_temp, r_temp2;
523 r_temp = tcg_temp_new_i64();
524 r_temp2 = tcg_temp_new_i64();
526 tcg_gen_ext_tl_i64(r_temp, src2);
527 tcg_gen_ext_tl_i64(r_temp2, src1);
528 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
530 tcg_gen_shri_i64(r_temp, r_temp2, 32);
531 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
532 tcg_temp_free_i64(r_temp);
533 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
534 #ifdef TARGET_SPARC64
535 tcg_gen_mov_i64(dst, r_temp2);
536 #else
537 tcg_gen_trunc_i64_tl(dst, r_temp2);
538 #endif
539 tcg_temp_free_i64(r_temp2);
542 #ifdef TARGET_SPARC64
543 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
545 TCGv_i32 r_const;
546 int l1;
548 l1 = gen_new_label();
549 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
550 r_const = tcg_const_i32(TT_DIV_ZERO);
551 gen_helper_raise_exception(r_const);
552 tcg_temp_free_i32(r_const);
553 gen_set_label(l1);
556 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
558 int l1, l2;
560 l1 = gen_new_label();
561 l2 = gen_new_label();
562 tcg_gen_mov_tl(cpu_cc_src, src1);
563 tcg_gen_mov_tl(cpu_cc_src2, src2);
564 gen_trap_ifdivzero_tl(cpu_cc_src2);
565 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
567 tcg_gen_movi_i64(dst, INT64_MIN);
568 tcg_gen_br(l2);
569 gen_set_label(l1);
570 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
571 gen_set_label(l2);
573 #endif
575 // 1
576 static inline void gen_op_eval_ba(TCGv dst)
578 tcg_gen_movi_tl(dst, 1);
581 // Z
582 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
584 gen_mov_reg_Z(dst, src);
587 // Z | (N ^ V)
588 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
590 gen_mov_reg_N(cpu_tmp0, src);
591 gen_mov_reg_V(dst, src);
592 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
593 gen_mov_reg_Z(cpu_tmp0, src);
594 tcg_gen_or_tl(dst, dst, cpu_tmp0);
597 // N ^ V
598 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
600 gen_mov_reg_V(cpu_tmp0, src);
601 gen_mov_reg_N(dst, src);
602 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
605 // C | Z
606 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
608 gen_mov_reg_Z(cpu_tmp0, src);
609 gen_mov_reg_C(dst, src);
610 tcg_gen_or_tl(dst, dst, cpu_tmp0);
613 // C
614 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
616 gen_mov_reg_C(dst, src);
619 // V
620 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
622 gen_mov_reg_V(dst, src);
625 // 0
626 static inline void gen_op_eval_bn(TCGv dst)
628 tcg_gen_movi_tl(dst, 0);
631 // N
632 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
634 gen_mov_reg_N(dst, src);
637 // !Z
638 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
640 gen_mov_reg_Z(dst, src);
641 tcg_gen_xori_tl(dst, dst, 0x1);
644 // !(Z | (N ^ V))
645 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
647 gen_mov_reg_N(cpu_tmp0, src);
648 gen_mov_reg_V(dst, src);
649 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
650 gen_mov_reg_Z(cpu_tmp0, src);
651 tcg_gen_or_tl(dst, dst, cpu_tmp0);
652 tcg_gen_xori_tl(dst, dst, 0x1);
655 // !(N ^ V)
656 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
658 gen_mov_reg_V(cpu_tmp0, src);
659 gen_mov_reg_N(dst, src);
660 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
661 tcg_gen_xori_tl(dst, dst, 0x1);
664 // !(C | Z)
665 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
667 gen_mov_reg_Z(cpu_tmp0, src);
668 gen_mov_reg_C(dst, src);
669 tcg_gen_or_tl(dst, dst, cpu_tmp0);
670 tcg_gen_xori_tl(dst, dst, 0x1);
673 // !C
674 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
676 gen_mov_reg_C(dst, src);
677 tcg_gen_xori_tl(dst, dst, 0x1);
680 // !N
681 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
683 gen_mov_reg_N(dst, src);
684 tcg_gen_xori_tl(dst, dst, 0x1);
687 // !V
688 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
690 gen_mov_reg_V(dst, src);
691 tcg_gen_xori_tl(dst, dst, 0x1);
695 FPSR bit field FCC1 | FCC0:
699 3 unordered
701 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
702 unsigned int fcc_offset)
704 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
705 tcg_gen_andi_tl(reg, reg, 0x1);
708 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
709 unsigned int fcc_offset)
711 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
712 tcg_gen_andi_tl(reg, reg, 0x1);
715 // !0: FCC0 | FCC1
716 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
717 unsigned int fcc_offset)
719 gen_mov_reg_FCC0(dst, src, fcc_offset);
720 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
721 tcg_gen_or_tl(dst, dst, cpu_tmp0);
724 // 1 or 2: FCC0 ^ FCC1
725 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
726 unsigned int fcc_offset)
728 gen_mov_reg_FCC0(dst, src, fcc_offset);
729 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
730 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
733 // 1 or 3: FCC0
734 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
735 unsigned int fcc_offset)
737 gen_mov_reg_FCC0(dst, src, fcc_offset);
740 // 1: FCC0 & !FCC1
741 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
742 unsigned int fcc_offset)
744 gen_mov_reg_FCC0(dst, src, fcc_offset);
745 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
746 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
747 tcg_gen_and_tl(dst, dst, cpu_tmp0);
750 // 2 or 3: FCC1
751 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
752 unsigned int fcc_offset)
754 gen_mov_reg_FCC1(dst, src, fcc_offset);
757 // 2: !FCC0 & FCC1
758 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
759 unsigned int fcc_offset)
761 gen_mov_reg_FCC0(dst, src, fcc_offset);
762 tcg_gen_xori_tl(dst, dst, 0x1);
763 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
764 tcg_gen_and_tl(dst, dst, cpu_tmp0);
767 // 3: FCC0 & FCC1
768 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
769 unsigned int fcc_offset)
771 gen_mov_reg_FCC0(dst, src, fcc_offset);
772 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
773 tcg_gen_and_tl(dst, dst, cpu_tmp0);
776 // 0: !(FCC0 | FCC1)
777 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
778 unsigned int fcc_offset)
780 gen_mov_reg_FCC0(dst, src, fcc_offset);
781 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
783 tcg_gen_xori_tl(dst, dst, 0x1);
786 // 0 or 3: !(FCC0 ^ FCC1)
787 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
788 unsigned int fcc_offset)
790 gen_mov_reg_FCC0(dst, src, fcc_offset);
791 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
792 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
793 tcg_gen_xori_tl(dst, dst, 0x1);
796 // 0 or 2: !FCC0
797 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
798 unsigned int fcc_offset)
800 gen_mov_reg_FCC0(dst, src, fcc_offset);
801 tcg_gen_xori_tl(dst, dst, 0x1);
804 // !1: !(FCC0 & !FCC1)
805 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
806 unsigned int fcc_offset)
808 gen_mov_reg_FCC0(dst, src, fcc_offset);
809 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
810 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
811 tcg_gen_and_tl(dst, dst, cpu_tmp0);
812 tcg_gen_xori_tl(dst, dst, 0x1);
815 // 0 or 1: !FCC1
816 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
817 unsigned int fcc_offset)
819 gen_mov_reg_FCC1(dst, src, fcc_offset);
820 tcg_gen_xori_tl(dst, dst, 0x1);
823 // !2: !(!FCC0 & FCC1)
824 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
825 unsigned int fcc_offset)
827 gen_mov_reg_FCC0(dst, src, fcc_offset);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
830 tcg_gen_and_tl(dst, dst, cpu_tmp0);
831 tcg_gen_xori_tl(dst, dst, 0x1);
834 // !3: !(FCC0 & FCC1)
835 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
839 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
840 tcg_gen_and_tl(dst, dst, cpu_tmp0);
841 tcg_gen_xori_tl(dst, dst, 0x1);
844 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
845 target_ulong pc2, TCGv r_cond)
847 int l1;
849 l1 = gen_new_label();
851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
853 gen_goto_tb(dc, 0, pc1, pc1 + 4);
855 gen_set_label(l1);
856 gen_goto_tb(dc, 1, pc2, pc2 + 4);
859 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
860 target_ulong pc2, TCGv r_cond)
862 int l1;
864 l1 = gen_new_label();
866 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
868 gen_goto_tb(dc, 0, pc2, pc1);
870 gen_set_label(l1);
871 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
874 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
875 TCGv r_cond)
877 int l1, l2;
879 l1 = gen_new_label();
880 l2 = gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
884 tcg_gen_movi_tl(cpu_npc, npc1);
885 tcg_gen_br(l2);
887 gen_set_label(l1);
888 tcg_gen_movi_tl(cpu_npc, npc2);
889 gen_set_label(l2);
892 /* call this function before using the condition register as it may
893 have been set for a jump */
894 static inline void flush_cond(DisasContext *dc, TCGv cond)
896 if (dc->npc == JUMP_PC) {
897 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
898 dc->npc = DYNAMIC_PC;
902 static inline void save_npc(DisasContext *dc, TCGv cond)
904 if (dc->npc == JUMP_PC) {
905 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
906 dc->npc = DYNAMIC_PC;
907 } else if (dc->npc != DYNAMIC_PC) {
908 tcg_gen_movi_tl(cpu_npc, dc->npc);
912 static inline void save_state(DisasContext *dc, TCGv cond)
914 tcg_gen_movi_tl(cpu_pc, dc->pc);
915 /* flush pending conditional evaluations before exposing cpu state */
916 if (dc->cc_op != CC_OP_FLAGS) {
917 dc->cc_op = CC_OP_FLAGS;
918 gen_helper_compute_psr();
920 save_npc(dc, cond);
923 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
925 if (dc->npc == JUMP_PC) {
926 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
927 tcg_gen_mov_tl(cpu_pc, cpu_npc);
928 dc->pc = DYNAMIC_PC;
929 } else if (dc->npc == DYNAMIC_PC) {
930 tcg_gen_mov_tl(cpu_pc, cpu_npc);
931 dc->pc = DYNAMIC_PC;
932 } else {
933 dc->pc = dc->npc;
937 static inline void gen_op_next_insn(void)
939 tcg_gen_mov_tl(cpu_pc, cpu_npc);
940 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
943 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
944 DisasContext *dc)
946 TCGv_i32 r_src;
948 #ifdef TARGET_SPARC64
949 if (cc)
950 r_src = cpu_xcc;
951 else
952 r_src = cpu_psr;
953 #else
954 r_src = cpu_psr;
955 #endif
956 switch (dc->cc_op) {
957 case CC_OP_FLAGS:
958 break;
959 default:
960 gen_helper_compute_psr();
961 dc->cc_op = CC_OP_FLAGS;
962 break;
964 switch (cond) {
965 case 0x0:
966 gen_op_eval_bn(r_dst);
967 break;
968 case 0x1:
969 gen_op_eval_be(r_dst, r_src);
970 break;
971 case 0x2:
972 gen_op_eval_ble(r_dst, r_src);
973 break;
974 case 0x3:
975 gen_op_eval_bl(r_dst, r_src);
976 break;
977 case 0x4:
978 gen_op_eval_bleu(r_dst, r_src);
979 break;
980 case 0x5:
981 gen_op_eval_bcs(r_dst, r_src);
982 break;
983 case 0x6:
984 gen_op_eval_bneg(r_dst, r_src);
985 break;
986 case 0x7:
987 gen_op_eval_bvs(r_dst, r_src);
988 break;
989 case 0x8:
990 gen_op_eval_ba(r_dst);
991 break;
992 case 0x9:
993 gen_op_eval_bne(r_dst, r_src);
994 break;
995 case 0xa:
996 gen_op_eval_bg(r_dst, r_src);
997 break;
998 case 0xb:
999 gen_op_eval_bge(r_dst, r_src);
1000 break;
1001 case 0xc:
1002 gen_op_eval_bgu(r_dst, r_src);
1003 break;
1004 case 0xd:
1005 gen_op_eval_bcc(r_dst, r_src);
1006 break;
1007 case 0xe:
1008 gen_op_eval_bpos(r_dst, r_src);
1009 break;
1010 case 0xf:
1011 gen_op_eval_bvc(r_dst, r_src);
1012 break;
1016 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1018 unsigned int offset;
1020 switch (cc) {
1021 default:
1022 case 0x0:
1023 offset = 0;
1024 break;
1025 case 0x1:
1026 offset = 32 - 10;
1027 break;
1028 case 0x2:
1029 offset = 34 - 10;
1030 break;
1031 case 0x3:
1032 offset = 36 - 10;
1033 break;
1036 switch (cond) {
1037 case 0x0:
1038 gen_op_eval_bn(r_dst);
1039 break;
1040 case 0x1:
1041 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1042 break;
1043 case 0x2:
1044 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1045 break;
1046 case 0x3:
1047 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1048 break;
1049 case 0x4:
1050 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1051 break;
1052 case 0x5:
1053 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1054 break;
1055 case 0x6:
1056 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1057 break;
1058 case 0x7:
1059 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1060 break;
1061 case 0x8:
1062 gen_op_eval_ba(r_dst);
1063 break;
1064 case 0x9:
1065 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1066 break;
1067 case 0xa:
1068 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1069 break;
1070 case 0xb:
1071 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1072 break;
1073 case 0xc:
1074 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1075 break;
1076 case 0xd:
1077 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1078 break;
1079 case 0xe:
1080 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1081 break;
1082 case 0xf:
1083 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1084 break;
1088 #ifdef TARGET_SPARC64
1089 // Inverted logic
1090 static const int gen_tcg_cond_reg[8] = {
1092 TCG_COND_NE,
1093 TCG_COND_GT,
1094 TCG_COND_GE,
1096 TCG_COND_EQ,
1097 TCG_COND_LE,
1098 TCG_COND_LT,
1101 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1103 int l1;
1105 l1 = gen_new_label();
1106 tcg_gen_movi_tl(r_dst, 0);
1107 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1108 tcg_gen_movi_tl(r_dst, 1);
1109 gen_set_label(l1);
1111 #endif
1113 /* XXX: potentially incorrect if dynamic npc */
1114 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1115 TCGv r_cond)
1117 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1118 target_ulong target = dc->pc + offset;
1120 if (cond == 0x0) {
1121 /* unconditional not taken */
1122 if (a) {
1123 dc->pc = dc->npc + 4;
1124 dc->npc = dc->pc + 4;
1125 } else {
1126 dc->pc = dc->npc;
1127 dc->npc = dc->pc + 4;
1129 } else if (cond == 0x8) {
1130 /* unconditional taken */
1131 if (a) {
1132 dc->pc = target;
1133 dc->npc = dc->pc + 4;
1134 } else {
1135 dc->pc = dc->npc;
1136 dc->npc = target;
1137 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1139 } else {
1140 flush_cond(dc, r_cond);
1141 gen_cond(r_cond, cc, cond, dc);
1142 if (a) {
1143 gen_branch_a(dc, target, dc->npc, r_cond);
1144 dc->is_br = 1;
1145 } else {
1146 dc->pc = dc->npc;
1147 dc->jump_pc[0] = target;
1148 dc->jump_pc[1] = dc->npc + 4;
1149 dc->npc = JUMP_PC;
1154 /* XXX: potentially incorrect if dynamic npc */
1155 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1156 TCGv r_cond)
1158 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1159 target_ulong target = dc->pc + offset;
1161 if (cond == 0x0) {
1162 /* unconditional not taken */
1163 if (a) {
1164 dc->pc = dc->npc + 4;
1165 dc->npc = dc->pc + 4;
1166 } else {
1167 dc->pc = dc->npc;
1168 dc->npc = dc->pc + 4;
1170 } else if (cond == 0x8) {
1171 /* unconditional taken */
1172 if (a) {
1173 dc->pc = target;
1174 dc->npc = dc->pc + 4;
1175 } else {
1176 dc->pc = dc->npc;
1177 dc->npc = target;
1178 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1180 } else {
1181 flush_cond(dc, r_cond);
1182 gen_fcond(r_cond, cc, cond);
1183 if (a) {
1184 gen_branch_a(dc, target, dc->npc, r_cond);
1185 dc->is_br = 1;
1186 } else {
1187 dc->pc = dc->npc;
1188 dc->jump_pc[0] = target;
1189 dc->jump_pc[1] = dc->npc + 4;
1190 dc->npc = JUMP_PC;
1195 #ifdef TARGET_SPARC64
1196 /* XXX: potentially incorrect if dynamic npc */
1197 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1198 TCGv r_cond, TCGv r_reg)
1200 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1201 target_ulong target = dc->pc + offset;
1203 flush_cond(dc, r_cond);
1204 gen_cond_reg(r_cond, cond, r_reg);
1205 if (a) {
1206 gen_branch_a(dc, target, dc->npc, r_cond);
1207 dc->is_br = 1;
1208 } else {
1209 dc->pc = dc->npc;
1210 dc->jump_pc[0] = target;
1211 dc->jump_pc[1] = dc->npc + 4;
1212 dc->npc = JUMP_PC;
1216 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1218 switch (fccno) {
1219 case 0:
1220 gen_helper_fcmps(r_rs1, r_rs2);
1221 break;
1222 case 1:
1223 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1224 break;
1225 case 2:
1226 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1227 break;
1228 case 3:
1229 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1230 break;
1234 static inline void gen_op_fcmpd(int fccno)
1236 switch (fccno) {
1237 case 0:
1238 gen_helper_fcmpd();
1239 break;
1240 case 1:
1241 gen_helper_fcmpd_fcc1();
1242 break;
1243 case 2:
1244 gen_helper_fcmpd_fcc2();
1245 break;
1246 case 3:
1247 gen_helper_fcmpd_fcc3();
1248 break;
1252 static inline void gen_op_fcmpq(int fccno)
1254 switch (fccno) {
1255 case 0:
1256 gen_helper_fcmpq();
1257 break;
1258 case 1:
1259 gen_helper_fcmpq_fcc1();
1260 break;
1261 case 2:
1262 gen_helper_fcmpq_fcc2();
1263 break;
1264 case 3:
1265 gen_helper_fcmpq_fcc3();
1266 break;
1270 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1272 switch (fccno) {
1273 case 0:
1274 gen_helper_fcmpes(r_rs1, r_rs2);
1275 break;
1276 case 1:
1277 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1278 break;
1279 case 2:
1280 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1281 break;
1282 case 3:
1283 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1284 break;
1288 static inline void gen_op_fcmped(int fccno)
1290 switch (fccno) {
1291 case 0:
1292 gen_helper_fcmped();
1293 break;
1294 case 1:
1295 gen_helper_fcmped_fcc1();
1296 break;
1297 case 2:
1298 gen_helper_fcmped_fcc2();
1299 break;
1300 case 3:
1301 gen_helper_fcmped_fcc3();
1302 break;
1306 static inline void gen_op_fcmpeq(int fccno)
1308 switch (fccno) {
1309 case 0:
1310 gen_helper_fcmpeq();
1311 break;
1312 case 1:
1313 gen_helper_fcmpeq_fcc1();
1314 break;
1315 case 2:
1316 gen_helper_fcmpeq_fcc2();
1317 break;
1318 case 3:
1319 gen_helper_fcmpeq_fcc3();
1320 break;
1324 #else
1326 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1328 gen_helper_fcmps(r_rs1, r_rs2);
1331 static inline void gen_op_fcmpd(int fccno)
1333 gen_helper_fcmpd();
1336 static inline void gen_op_fcmpq(int fccno)
1338 gen_helper_fcmpq();
1341 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1343 gen_helper_fcmpes(r_rs1, r_rs2);
1346 static inline void gen_op_fcmped(int fccno)
1348 gen_helper_fcmped();
1351 static inline void gen_op_fcmpeq(int fccno)
1353 gen_helper_fcmpeq();
1355 #endif
1357 static inline void gen_op_fpexception_im(int fsr_flags)
1359 TCGv_i32 r_const;
1361 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1362 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1363 r_const = tcg_const_i32(TT_FP_EXCP);
1364 gen_helper_raise_exception(r_const);
1365 tcg_temp_free_i32(r_const);
1368 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1370 #if !defined(CONFIG_USER_ONLY)
1371 if (!dc->fpu_enabled) {
1372 TCGv_i32 r_const;
1374 save_state(dc, r_cond);
1375 r_const = tcg_const_i32(TT_NFPU_INSN);
1376 gen_helper_raise_exception(r_const);
1377 tcg_temp_free_i32(r_const);
1378 dc->is_br = 1;
1379 return 1;
1381 #endif
1382 return 0;
1385 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1387 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1390 static inline void gen_clear_float_exceptions(void)
1392 gen_helper_clear_float_exceptions();
1395 /* asi moves */
1396 #ifdef TARGET_SPARC64
1397 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1399 int asi;
1400 TCGv_i32 r_asi;
1402 if (IS_IMM) {
1403 r_asi = tcg_temp_new_i32();
1404 tcg_gen_mov_i32(r_asi, cpu_asi);
1405 } else {
1406 asi = GET_FIELD(insn, 19, 26);
1407 r_asi = tcg_const_i32(asi);
1409 return r_asi;
1412 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1413 int sign)
1415 TCGv_i32 r_asi, r_size, r_sign;
1417 r_asi = gen_get_asi(insn, addr);
1418 r_size = tcg_const_i32(size);
1419 r_sign = tcg_const_i32(sign);
1420 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1421 tcg_temp_free_i32(r_sign);
1422 tcg_temp_free_i32(r_size);
1423 tcg_temp_free_i32(r_asi);
1426 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1428 TCGv_i32 r_asi, r_size;
1430 r_asi = gen_get_asi(insn, addr);
1431 r_size = tcg_const_i32(size);
1432 gen_helper_st_asi(addr, src, r_asi, r_size);
1433 tcg_temp_free_i32(r_size);
1434 tcg_temp_free_i32(r_asi);
1437 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1439 TCGv_i32 r_asi, r_size, r_rd;
1441 r_asi = gen_get_asi(insn, addr);
1442 r_size = tcg_const_i32(size);
1443 r_rd = tcg_const_i32(rd);
1444 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1445 tcg_temp_free_i32(r_rd);
1446 tcg_temp_free_i32(r_size);
1447 tcg_temp_free_i32(r_asi);
1450 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1452 TCGv_i32 r_asi, r_size, r_rd;
1454 r_asi = gen_get_asi(insn, addr);
1455 r_size = tcg_const_i32(size);
1456 r_rd = tcg_const_i32(rd);
1457 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1458 tcg_temp_free_i32(r_rd);
1459 tcg_temp_free_i32(r_size);
1460 tcg_temp_free_i32(r_asi);
1463 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1465 TCGv_i32 r_asi, r_size, r_sign;
1467 r_asi = gen_get_asi(insn, addr);
1468 r_size = tcg_const_i32(4);
1469 r_sign = tcg_const_i32(0);
1470 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1471 tcg_temp_free_i32(r_sign);
1472 gen_helper_st_asi(addr, dst, r_asi, r_size);
1473 tcg_temp_free_i32(r_size);
1474 tcg_temp_free_i32(r_asi);
1475 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1478 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1480 TCGv_i32 r_asi, r_rd;
1482 r_asi = gen_get_asi(insn, addr);
1483 r_rd = tcg_const_i32(rd);
1484 gen_helper_ldda_asi(addr, r_asi, r_rd);
1485 tcg_temp_free_i32(r_rd);
1486 tcg_temp_free_i32(r_asi);
1489 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1491 TCGv_i32 r_asi, r_size;
1493 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1494 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1495 r_asi = gen_get_asi(insn, addr);
1496 r_size = tcg_const_i32(8);
1497 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1498 tcg_temp_free_i32(r_size);
1499 tcg_temp_free_i32(r_asi);
1502 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1503 int rd)
1505 TCGv r_val1;
1506 TCGv_i32 r_asi;
1508 r_val1 = tcg_temp_new();
1509 gen_movl_reg_TN(rd, r_val1);
1510 r_asi = gen_get_asi(insn, addr);
1511 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1512 tcg_temp_free_i32(r_asi);
1513 tcg_temp_free(r_val1);
1516 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1517 int rd)
1519 TCGv_i32 r_asi;
1521 gen_movl_reg_TN(rd, cpu_tmp64);
1522 r_asi = gen_get_asi(insn, addr);
1523 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1524 tcg_temp_free_i32(r_asi);
1527 #elif !defined(CONFIG_USER_ONLY)
1529 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1530 int sign)
1532 TCGv_i32 r_asi, r_size, r_sign;
1534 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1535 r_size = tcg_const_i32(size);
1536 r_sign = tcg_const_i32(sign);
1537 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1538 tcg_temp_free(r_sign);
1539 tcg_temp_free(r_size);
1540 tcg_temp_free(r_asi);
1541 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1544 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1546 TCGv_i32 r_asi, r_size;
1548 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1549 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1550 r_size = tcg_const_i32(size);
1551 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1552 tcg_temp_free(r_size);
1553 tcg_temp_free(r_asi);
1556 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1558 TCGv_i32 r_asi, r_size, r_sign;
1559 TCGv_i64 r_val;
1561 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1562 r_size = tcg_const_i32(4);
1563 r_sign = tcg_const_i32(0);
1564 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1565 tcg_temp_free(r_sign);
1566 r_val = tcg_temp_new_i64();
1567 tcg_gen_extu_tl_i64(r_val, dst);
1568 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1569 tcg_temp_free_i64(r_val);
1570 tcg_temp_free(r_size);
1571 tcg_temp_free(r_asi);
1572 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1575 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1577 TCGv_i32 r_asi, r_size, r_sign;
1579 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1580 r_size = tcg_const_i32(8);
1581 r_sign = tcg_const_i32(0);
1582 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1583 tcg_temp_free(r_sign);
1584 tcg_temp_free(r_size);
1585 tcg_temp_free(r_asi);
1586 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1587 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1588 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1589 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1590 gen_movl_TN_reg(rd, hi);
1593 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1595 TCGv_i32 r_asi, r_size;
1597 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1598 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1599 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1600 r_size = tcg_const_i32(8);
1601 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1602 tcg_temp_free(r_size);
1603 tcg_temp_free(r_asi);
1605 #endif
1607 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1608 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1610 TCGv_i64 r_val;
1611 TCGv_i32 r_asi, r_size;
1613 gen_ld_asi(dst, addr, insn, 1, 0);
1615 r_val = tcg_const_i64(0xffULL);
1616 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1617 r_size = tcg_const_i32(1);
1618 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1619 tcg_temp_free_i32(r_size);
1620 tcg_temp_free_i32(r_asi);
1621 tcg_temp_free_i64(r_val);
1623 #endif
1625 static inline TCGv get_src1(unsigned int insn, TCGv def)
1627 TCGv r_rs1 = def;
1628 unsigned int rs1;
1630 rs1 = GET_FIELD(insn, 13, 17);
1631 if (rs1 == 0)
1632 r_rs1 = tcg_const_tl(0); // XXX how to free?
1633 else if (rs1 < 8)
1634 r_rs1 = cpu_gregs[rs1];
1635 else
1636 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1637 return r_rs1;
1640 static inline TCGv get_src2(unsigned int insn, TCGv def)
1642 TCGv r_rs2 = def;
1644 if (IS_IMM) { /* immediate */
1645 target_long simm;
1647 simm = GET_FIELDs(insn, 19, 31);
1648 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1649 } else { /* register */
1650 unsigned int rs2;
1652 rs2 = GET_FIELD(insn, 27, 31);
1653 if (rs2 == 0)
1654 r_rs2 = tcg_const_tl(0); // XXX how to free?
1655 else if (rs2 < 8)
1656 r_rs2 = cpu_gregs[rs2];
1657 else
1658 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1660 return r_rs2;
1663 #ifdef TARGET_SPARC64
1664 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1666 TCGv r_tl = tcg_temp_new();
1668 /* load env->tl into r_tl */
1670 TCGv_i32 r_tl_tmp = tcg_temp_new_i32();
1671 tcg_gen_ld_i32(r_tl_tmp, cpu_env, offsetof(CPUSPARCState, tl));
1672 tcg_gen_ext_i32_tl(r_tl, r_tl_tmp);
1673 tcg_temp_free_i32(r_tl_tmp);
1676 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1677 tcg_gen_andi_tl(r_tl, r_tl, MAXTL_MASK);
1679 /* calculate offset to current trap state from env->ts, reuse r_tl */
1680 tcg_gen_muli_tl(r_tl, r_tl, sizeof (trap_state));
1681 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1683 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1684 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl);
1686 tcg_temp_free(r_tl);
1688 #endif
1690 #define CHECK_IU_FEATURE(dc, FEATURE) \
1691 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1692 goto illegal_insn;
1693 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1694 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1695 goto nfpu_insn;
1697 /* before an instruction, dc->pc must be static */
1698 static void disas_sparc_insn(DisasContext * dc)
1700 unsigned int insn, opc, rs1, rs2, rd;
1701 target_long simm;
1703 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1704 tcg_gen_debug_insn_start(dc->pc);
1705 insn = ldl_code(dc->pc);
1706 opc = GET_FIELD(insn, 0, 1);
1708 rd = GET_FIELD(insn, 2, 6);
1710 cpu_src1 = tcg_temp_new(); // const
1711 cpu_src2 = tcg_temp_new(); // const
1713 switch (opc) {
1714 case 0: /* branches/sethi */
1716 unsigned int xop = GET_FIELD(insn, 7, 9);
1717 int32_t target;
1718 switch (xop) {
1719 #ifdef TARGET_SPARC64
1720 case 0x1: /* V9 BPcc */
1722 int cc;
1724 target = GET_FIELD_SP(insn, 0, 18);
1725 target = sign_extend(target, 18);
1726 target <<= 2;
1727 cc = GET_FIELD_SP(insn, 20, 21);
1728 if (cc == 0)
1729 do_branch(dc, target, insn, 0, cpu_cond);
1730 else if (cc == 2)
1731 do_branch(dc, target, insn, 1, cpu_cond);
1732 else
1733 goto illegal_insn;
1734 goto jmp_insn;
1736 case 0x3: /* V9 BPr */
1738 target = GET_FIELD_SP(insn, 0, 13) |
1739 (GET_FIELD_SP(insn, 20, 21) << 14);
1740 target = sign_extend(target, 16);
1741 target <<= 2;
1742 cpu_src1 = get_src1(insn, cpu_src1);
1743 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1744 goto jmp_insn;
1746 case 0x5: /* V9 FBPcc */
1748 int cc = GET_FIELD_SP(insn, 20, 21);
1749 if (gen_trap_ifnofpu(dc, cpu_cond))
1750 goto jmp_insn;
1751 target = GET_FIELD_SP(insn, 0, 18);
1752 target = sign_extend(target, 19);
1753 target <<= 2;
1754 do_fbranch(dc, target, insn, cc, cpu_cond);
1755 goto jmp_insn;
1757 #else
1758 case 0x7: /* CBN+x */
1760 goto ncp_insn;
1762 #endif
1763 case 0x2: /* BN+x */
1765 target = GET_FIELD(insn, 10, 31);
1766 target = sign_extend(target, 22);
1767 target <<= 2;
1768 do_branch(dc, target, insn, 0, cpu_cond);
1769 goto jmp_insn;
1771 case 0x6: /* FBN+x */
1773 if (gen_trap_ifnofpu(dc, cpu_cond))
1774 goto jmp_insn;
1775 target = GET_FIELD(insn, 10, 31);
1776 target = sign_extend(target, 22);
1777 target <<= 2;
1778 do_fbranch(dc, target, insn, 0, cpu_cond);
1779 goto jmp_insn;
1781 case 0x4: /* SETHI */
1782 if (rd) { // nop
1783 uint32_t value = GET_FIELD(insn, 10, 31);
1784 TCGv r_const;
1786 r_const = tcg_const_tl(value << 10);
1787 gen_movl_TN_reg(rd, r_const);
1788 tcg_temp_free(r_const);
1790 break;
1791 case 0x0: /* UNIMPL */
1792 default:
1793 goto illegal_insn;
1795 break;
1797 break;
1798 case 1: /*CALL*/
1800 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1801 TCGv r_const;
1803 r_const = tcg_const_tl(dc->pc);
1804 gen_movl_TN_reg(15, r_const);
1805 tcg_temp_free(r_const);
1806 target += dc->pc;
1807 gen_mov_pc_npc(dc, cpu_cond);
1808 dc->npc = target;
1810 goto jmp_insn;
1811 case 2: /* FPU & Logical Operations */
1813 unsigned int xop = GET_FIELD(insn, 7, 12);
1814 if (xop == 0x3a) { /* generate trap */
1815 int cond;
1817 cpu_src1 = get_src1(insn, cpu_src1);
1818 if (IS_IMM) {
1819 rs2 = GET_FIELD(insn, 25, 31);
1820 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1821 } else {
1822 rs2 = GET_FIELD(insn, 27, 31);
1823 if (rs2 != 0) {
1824 gen_movl_reg_TN(rs2, cpu_src2);
1825 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1826 } else
1827 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1829 cond = GET_FIELD(insn, 3, 6);
1830 if (cond == 0x8) {
1831 save_state(dc, cpu_cond);
1832 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1833 supervisor(dc))
1834 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1835 else
1836 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1837 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1838 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1839 gen_helper_raise_exception(cpu_tmp32);
1840 } else if (cond != 0) {
1841 TCGv r_cond = tcg_temp_new();
1842 int l1;
1843 #ifdef TARGET_SPARC64
1844 /* V9 icc/xcc */
1845 int cc = GET_FIELD_SP(insn, 11, 12);
1847 save_state(dc, cpu_cond);
1848 if (cc == 0)
1849 gen_cond(r_cond, 0, cond, dc);
1850 else if (cc == 2)
1851 gen_cond(r_cond, 1, cond, dc);
1852 else
1853 goto illegal_insn;
1854 #else
1855 save_state(dc, cpu_cond);
1856 gen_cond(r_cond, 0, cond, dc);
1857 #endif
1858 l1 = gen_new_label();
1859 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1861 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1862 supervisor(dc))
1863 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1864 else
1865 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1866 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1867 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1868 gen_helper_raise_exception(cpu_tmp32);
1870 gen_set_label(l1);
1871 tcg_temp_free(r_cond);
1873 gen_op_next_insn();
1874 tcg_gen_exit_tb(0);
1875 dc->is_br = 1;
1876 goto jmp_insn;
1877 } else if (xop == 0x28) {
1878 rs1 = GET_FIELD(insn, 13, 17);
1879 switch(rs1) {
1880 case 0: /* rdy */
1881 #ifndef TARGET_SPARC64
1882 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1883 manual, rdy on the microSPARC
1884 II */
1885 case 0x0f: /* stbar in the SPARCv8 manual,
1886 rdy on the microSPARC II */
1887 case 0x10 ... 0x1f: /* implementation-dependent in the
1888 SPARCv8 manual, rdy on the
1889 microSPARC II */
1890 #endif
1891 gen_movl_TN_reg(rd, cpu_y);
1892 break;
1893 #ifdef TARGET_SPARC64
1894 case 0x2: /* V9 rdccr */
1895 gen_helper_compute_psr();
1896 gen_helper_rdccr(cpu_dst);
1897 gen_movl_TN_reg(rd, cpu_dst);
1898 break;
1899 case 0x3: /* V9 rdasi */
1900 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1901 gen_movl_TN_reg(rd, cpu_dst);
1902 break;
1903 case 0x4: /* V9 rdtick */
1905 TCGv_ptr r_tickptr;
1907 r_tickptr = tcg_temp_new_ptr();
1908 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1909 offsetof(CPUState, tick));
1910 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1911 tcg_temp_free_ptr(r_tickptr);
1912 gen_movl_TN_reg(rd, cpu_dst);
1914 break;
1915 case 0x5: /* V9 rdpc */
1917 TCGv r_const;
1919 r_const = tcg_const_tl(dc->pc);
1920 gen_movl_TN_reg(rd, r_const);
1921 tcg_temp_free(r_const);
1923 break;
1924 case 0x6: /* V9 rdfprs */
1925 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1926 gen_movl_TN_reg(rd, cpu_dst);
1927 break;
1928 case 0xf: /* V9 membar */
1929 break; /* no effect */
1930 case 0x13: /* Graphics Status */
1931 if (gen_trap_ifnofpu(dc, cpu_cond))
1932 goto jmp_insn;
1933 gen_movl_TN_reg(rd, cpu_gsr);
1934 break;
1935 case 0x16: /* Softint */
1936 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1937 gen_movl_TN_reg(rd, cpu_dst);
1938 break;
1939 case 0x17: /* Tick compare */
1940 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1941 break;
1942 case 0x18: /* System tick */
1944 TCGv_ptr r_tickptr;
1946 r_tickptr = tcg_temp_new_ptr();
1947 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1948 offsetof(CPUState, stick));
1949 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1950 tcg_temp_free_ptr(r_tickptr);
1951 gen_movl_TN_reg(rd, cpu_dst);
1953 break;
1954 case 0x19: /* System tick compare */
1955 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1956 break;
1957 case 0x10: /* Performance Control */
1958 case 0x11: /* Performance Instrumentation Counter */
1959 case 0x12: /* Dispatch Control */
1960 case 0x14: /* Softint set, WO */
1961 case 0x15: /* Softint clear, WO */
1962 #endif
1963 default:
1964 goto illegal_insn;
1966 #if !defined(CONFIG_USER_ONLY)
1967 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1968 #ifndef TARGET_SPARC64
1969 if (!supervisor(dc))
1970 goto priv_insn;
1971 gen_helper_compute_psr();
1972 dc->cc_op = CC_OP_FLAGS;
1973 gen_helper_rdpsr(cpu_dst);
1974 #else
1975 CHECK_IU_FEATURE(dc, HYPV);
1976 if (!hypervisor(dc))
1977 goto priv_insn;
1978 rs1 = GET_FIELD(insn, 13, 17);
1979 switch (rs1) {
1980 case 0: // hpstate
1981 // gen_op_rdhpstate();
1982 break;
1983 case 1: // htstate
1984 // gen_op_rdhtstate();
1985 break;
1986 case 3: // hintp
1987 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1988 break;
1989 case 5: // htba
1990 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1991 break;
1992 case 6: // hver
1993 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1994 break;
1995 case 31: // hstick_cmpr
1996 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1997 break;
1998 default:
1999 goto illegal_insn;
2001 #endif
2002 gen_movl_TN_reg(rd, cpu_dst);
2003 break;
2004 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2005 if (!supervisor(dc))
2006 goto priv_insn;
2007 #ifdef TARGET_SPARC64
2008 rs1 = GET_FIELD(insn, 13, 17);
2009 switch (rs1) {
2010 case 0: // tpc
2012 TCGv_ptr r_tsptr;
2014 r_tsptr = tcg_temp_new_ptr();
2015 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2016 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2017 offsetof(trap_state, tpc));
2018 tcg_temp_free_ptr(r_tsptr);
2020 break;
2021 case 1: // tnpc
2023 TCGv_ptr r_tsptr;
2025 r_tsptr = tcg_temp_new_ptr();
2026 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2027 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2028 offsetof(trap_state, tnpc));
2029 tcg_temp_free_ptr(r_tsptr);
2031 break;
2032 case 2: // tstate
2034 TCGv_ptr r_tsptr;
2036 r_tsptr = tcg_temp_new_ptr();
2037 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2038 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2039 offsetof(trap_state, tstate));
2040 tcg_temp_free_ptr(r_tsptr);
2042 break;
2043 case 3: // tt
2045 TCGv_ptr r_tsptr;
2047 r_tsptr = tcg_temp_new_ptr();
2048 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2049 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2050 offsetof(trap_state, tt));
2051 tcg_temp_free_ptr(r_tsptr);
2052 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2054 break;
2055 case 4: // tick
2057 TCGv_ptr r_tickptr;
2059 r_tickptr = tcg_temp_new_ptr();
2060 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2061 offsetof(CPUState, tick));
2062 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2063 gen_movl_TN_reg(rd, cpu_tmp0);
2064 tcg_temp_free_ptr(r_tickptr);
2066 break;
2067 case 5: // tba
2068 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2069 break;
2070 case 6: // pstate
2071 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2072 offsetof(CPUSPARCState, pstate));
2073 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2074 break;
2075 case 7: // tl
2076 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2077 offsetof(CPUSPARCState, tl));
2078 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2079 break;
2080 case 8: // pil
2081 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2082 offsetof(CPUSPARCState, psrpil));
2083 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2084 break;
2085 case 9: // cwp
2086 gen_helper_rdcwp(cpu_tmp0);
2087 break;
2088 case 10: // cansave
2089 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2090 offsetof(CPUSPARCState, cansave));
2091 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2092 break;
2093 case 11: // canrestore
2094 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2095 offsetof(CPUSPARCState, canrestore));
2096 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2097 break;
2098 case 12: // cleanwin
2099 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2100 offsetof(CPUSPARCState, cleanwin));
2101 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2102 break;
2103 case 13: // otherwin
2104 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2105 offsetof(CPUSPARCState, otherwin));
2106 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2107 break;
2108 case 14: // wstate
2109 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2110 offsetof(CPUSPARCState, wstate));
2111 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2112 break;
2113 case 16: // UA2005 gl
2114 CHECK_IU_FEATURE(dc, GL);
2115 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2116 offsetof(CPUSPARCState, gl));
2117 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2118 break;
2119 case 26: // UA2005 strand status
2120 CHECK_IU_FEATURE(dc, HYPV);
2121 if (!hypervisor(dc))
2122 goto priv_insn;
2123 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2124 break;
2125 case 31: // ver
2126 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2127 break;
2128 case 15: // fq
2129 default:
2130 goto illegal_insn;
2132 #else
2133 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2134 #endif
2135 gen_movl_TN_reg(rd, cpu_tmp0);
2136 break;
2137 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2138 #ifdef TARGET_SPARC64
2139 save_state(dc, cpu_cond);
2140 gen_helper_flushw();
2141 #else
2142 if (!supervisor(dc))
2143 goto priv_insn;
2144 gen_movl_TN_reg(rd, cpu_tbr);
2145 #endif
2146 break;
2147 #endif
2148 } else if (xop == 0x34) { /* FPU Operations */
2149 if (gen_trap_ifnofpu(dc, cpu_cond))
2150 goto jmp_insn;
2151 gen_op_clear_ieee_excp_and_FTT();
2152 rs1 = GET_FIELD(insn, 13, 17);
2153 rs2 = GET_FIELD(insn, 27, 31);
2154 xop = GET_FIELD(insn, 18, 26);
2155 switch (xop) {
2156 case 0x1: /* fmovs */
2157 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2158 break;
2159 case 0x5: /* fnegs */
2160 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2161 break;
2162 case 0x9: /* fabss */
2163 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2164 break;
2165 case 0x29: /* fsqrts */
2166 CHECK_FPU_FEATURE(dc, FSQRT);
2167 gen_clear_float_exceptions();
2168 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2169 gen_helper_check_ieee_exceptions();
2170 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2171 break;
2172 case 0x2a: /* fsqrtd */
2173 CHECK_FPU_FEATURE(dc, FSQRT);
2174 gen_op_load_fpr_DT1(DFPREG(rs2));
2175 gen_clear_float_exceptions();
2176 gen_helper_fsqrtd();
2177 gen_helper_check_ieee_exceptions();
2178 gen_op_store_DT0_fpr(DFPREG(rd));
2179 break;
2180 case 0x2b: /* fsqrtq */
2181 CHECK_FPU_FEATURE(dc, FLOAT128);
2182 gen_op_load_fpr_QT1(QFPREG(rs2));
2183 gen_clear_float_exceptions();
2184 gen_helper_fsqrtq();
2185 gen_helper_check_ieee_exceptions();
2186 gen_op_store_QT0_fpr(QFPREG(rd));
2187 break;
2188 case 0x41: /* fadds */
2189 gen_clear_float_exceptions();
2190 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2191 gen_helper_check_ieee_exceptions();
2192 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2193 break;
2194 case 0x42: /* faddd */
2195 gen_op_load_fpr_DT0(DFPREG(rs1));
2196 gen_op_load_fpr_DT1(DFPREG(rs2));
2197 gen_clear_float_exceptions();
2198 gen_helper_faddd();
2199 gen_helper_check_ieee_exceptions();
2200 gen_op_store_DT0_fpr(DFPREG(rd));
2201 break;
2202 case 0x43: /* faddq */
2203 CHECK_FPU_FEATURE(dc, FLOAT128);
2204 gen_op_load_fpr_QT0(QFPREG(rs1));
2205 gen_op_load_fpr_QT1(QFPREG(rs2));
2206 gen_clear_float_exceptions();
2207 gen_helper_faddq();
2208 gen_helper_check_ieee_exceptions();
2209 gen_op_store_QT0_fpr(QFPREG(rd));
2210 break;
2211 case 0x45: /* fsubs */
2212 gen_clear_float_exceptions();
2213 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2214 gen_helper_check_ieee_exceptions();
2215 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2216 break;
2217 case 0x46: /* fsubd */
2218 gen_op_load_fpr_DT0(DFPREG(rs1));
2219 gen_op_load_fpr_DT1(DFPREG(rs2));
2220 gen_clear_float_exceptions();
2221 gen_helper_fsubd();
2222 gen_helper_check_ieee_exceptions();
2223 gen_op_store_DT0_fpr(DFPREG(rd));
2224 break;
2225 case 0x47: /* fsubq */
2226 CHECK_FPU_FEATURE(dc, FLOAT128);
2227 gen_op_load_fpr_QT0(QFPREG(rs1));
2228 gen_op_load_fpr_QT1(QFPREG(rs2));
2229 gen_clear_float_exceptions();
2230 gen_helper_fsubq();
2231 gen_helper_check_ieee_exceptions();
2232 gen_op_store_QT0_fpr(QFPREG(rd));
2233 break;
2234 case 0x49: /* fmuls */
2235 CHECK_FPU_FEATURE(dc, FMUL);
2236 gen_clear_float_exceptions();
2237 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2238 gen_helper_check_ieee_exceptions();
2239 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2240 break;
2241 case 0x4a: /* fmuld */
2242 CHECK_FPU_FEATURE(dc, FMUL);
2243 gen_op_load_fpr_DT0(DFPREG(rs1));
2244 gen_op_load_fpr_DT1(DFPREG(rs2));
2245 gen_clear_float_exceptions();
2246 gen_helper_fmuld();
2247 gen_helper_check_ieee_exceptions();
2248 gen_op_store_DT0_fpr(DFPREG(rd));
2249 break;
2250 case 0x4b: /* fmulq */
2251 CHECK_FPU_FEATURE(dc, FLOAT128);
2252 CHECK_FPU_FEATURE(dc, FMUL);
2253 gen_op_load_fpr_QT0(QFPREG(rs1));
2254 gen_op_load_fpr_QT1(QFPREG(rs2));
2255 gen_clear_float_exceptions();
2256 gen_helper_fmulq();
2257 gen_helper_check_ieee_exceptions();
2258 gen_op_store_QT0_fpr(QFPREG(rd));
2259 break;
2260 case 0x4d: /* fdivs */
2261 gen_clear_float_exceptions();
2262 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2263 gen_helper_check_ieee_exceptions();
2264 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2265 break;
2266 case 0x4e: /* fdivd */
2267 gen_op_load_fpr_DT0(DFPREG(rs1));
2268 gen_op_load_fpr_DT1(DFPREG(rs2));
2269 gen_clear_float_exceptions();
2270 gen_helper_fdivd();
2271 gen_helper_check_ieee_exceptions();
2272 gen_op_store_DT0_fpr(DFPREG(rd));
2273 break;
2274 case 0x4f: /* fdivq */
2275 CHECK_FPU_FEATURE(dc, FLOAT128);
2276 gen_op_load_fpr_QT0(QFPREG(rs1));
2277 gen_op_load_fpr_QT1(QFPREG(rs2));
2278 gen_clear_float_exceptions();
2279 gen_helper_fdivq();
2280 gen_helper_check_ieee_exceptions();
2281 gen_op_store_QT0_fpr(QFPREG(rd));
2282 break;
2283 case 0x69: /* fsmuld */
2284 CHECK_FPU_FEATURE(dc, FSMULD);
2285 gen_clear_float_exceptions();
2286 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2287 gen_helper_check_ieee_exceptions();
2288 gen_op_store_DT0_fpr(DFPREG(rd));
2289 break;
2290 case 0x6e: /* fdmulq */
2291 CHECK_FPU_FEATURE(dc, FLOAT128);
2292 gen_op_load_fpr_DT0(DFPREG(rs1));
2293 gen_op_load_fpr_DT1(DFPREG(rs2));
2294 gen_clear_float_exceptions();
2295 gen_helper_fdmulq();
2296 gen_helper_check_ieee_exceptions();
2297 gen_op_store_QT0_fpr(QFPREG(rd));
2298 break;
2299 case 0xc4: /* fitos */
2300 gen_clear_float_exceptions();
2301 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2302 gen_helper_check_ieee_exceptions();
2303 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2304 break;
2305 case 0xc6: /* fdtos */
2306 gen_op_load_fpr_DT1(DFPREG(rs2));
2307 gen_clear_float_exceptions();
2308 gen_helper_fdtos(cpu_tmp32);
2309 gen_helper_check_ieee_exceptions();
2310 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2311 break;
2312 case 0xc7: /* fqtos */
2313 CHECK_FPU_FEATURE(dc, FLOAT128);
2314 gen_op_load_fpr_QT1(QFPREG(rs2));
2315 gen_clear_float_exceptions();
2316 gen_helper_fqtos(cpu_tmp32);
2317 gen_helper_check_ieee_exceptions();
2318 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2319 break;
2320 case 0xc8: /* fitod */
2321 gen_helper_fitod(cpu_fpr[rs2]);
2322 gen_op_store_DT0_fpr(DFPREG(rd));
2323 break;
2324 case 0xc9: /* fstod */
2325 gen_helper_fstod(cpu_fpr[rs2]);
2326 gen_op_store_DT0_fpr(DFPREG(rd));
2327 break;
2328 case 0xcb: /* fqtod */
2329 CHECK_FPU_FEATURE(dc, FLOAT128);
2330 gen_op_load_fpr_QT1(QFPREG(rs2));
2331 gen_clear_float_exceptions();
2332 gen_helper_fqtod();
2333 gen_helper_check_ieee_exceptions();
2334 gen_op_store_DT0_fpr(DFPREG(rd));
2335 break;
2336 case 0xcc: /* fitoq */
2337 CHECK_FPU_FEATURE(dc, FLOAT128);
2338 gen_helper_fitoq(cpu_fpr[rs2]);
2339 gen_op_store_QT0_fpr(QFPREG(rd));
2340 break;
2341 case 0xcd: /* fstoq */
2342 CHECK_FPU_FEATURE(dc, FLOAT128);
2343 gen_helper_fstoq(cpu_fpr[rs2]);
2344 gen_op_store_QT0_fpr(QFPREG(rd));
2345 break;
2346 case 0xce: /* fdtoq */
2347 CHECK_FPU_FEATURE(dc, FLOAT128);
2348 gen_op_load_fpr_DT1(DFPREG(rs2));
2349 gen_helper_fdtoq();
2350 gen_op_store_QT0_fpr(QFPREG(rd));
2351 break;
2352 case 0xd1: /* fstoi */
2353 gen_clear_float_exceptions();
2354 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2355 gen_helper_check_ieee_exceptions();
2356 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2357 break;
2358 case 0xd2: /* fdtoi */
2359 gen_op_load_fpr_DT1(DFPREG(rs2));
2360 gen_clear_float_exceptions();
2361 gen_helper_fdtoi(cpu_tmp32);
2362 gen_helper_check_ieee_exceptions();
2363 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2364 break;
2365 case 0xd3: /* fqtoi */
2366 CHECK_FPU_FEATURE(dc, FLOAT128);
2367 gen_op_load_fpr_QT1(QFPREG(rs2));
2368 gen_clear_float_exceptions();
2369 gen_helper_fqtoi(cpu_tmp32);
2370 gen_helper_check_ieee_exceptions();
2371 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2372 break;
2373 #ifdef TARGET_SPARC64
2374 case 0x2: /* V9 fmovd */
2375 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2376 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2377 cpu_fpr[DFPREG(rs2) + 1]);
2378 break;
2379 case 0x3: /* V9 fmovq */
2380 CHECK_FPU_FEATURE(dc, FLOAT128);
2381 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2382 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2383 cpu_fpr[QFPREG(rs2) + 1]);
2384 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2385 cpu_fpr[QFPREG(rs2) + 2]);
2386 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2387 cpu_fpr[QFPREG(rs2) + 3]);
2388 break;
2389 case 0x6: /* V9 fnegd */
2390 gen_op_load_fpr_DT1(DFPREG(rs2));
2391 gen_helper_fnegd();
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2393 break;
2394 case 0x7: /* V9 fnegq */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT1(QFPREG(rs2));
2397 gen_helper_fnegq();
2398 gen_op_store_QT0_fpr(QFPREG(rd));
2399 break;
2400 case 0xa: /* V9 fabsd */
2401 gen_op_load_fpr_DT1(DFPREG(rs2));
2402 gen_helper_fabsd();
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0xb: /* V9 fabsq */
2406 CHECK_FPU_FEATURE(dc, FLOAT128);
2407 gen_op_load_fpr_QT1(QFPREG(rs2));
2408 gen_helper_fabsq();
2409 gen_op_store_QT0_fpr(QFPREG(rd));
2410 break;
2411 case 0x81: /* V9 fstox */
2412 gen_clear_float_exceptions();
2413 gen_helper_fstox(cpu_fpr[rs2]);
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd));
2416 break;
2417 case 0x82: /* V9 fdtox */
2418 gen_op_load_fpr_DT1(DFPREG(rs2));
2419 gen_clear_float_exceptions();
2420 gen_helper_fdtox();
2421 gen_helper_check_ieee_exceptions();
2422 gen_op_store_DT0_fpr(DFPREG(rd));
2423 break;
2424 case 0x83: /* V9 fqtox */
2425 CHECK_FPU_FEATURE(dc, FLOAT128);
2426 gen_op_load_fpr_QT1(QFPREG(rs2));
2427 gen_clear_float_exceptions();
2428 gen_helper_fqtox();
2429 gen_helper_check_ieee_exceptions();
2430 gen_op_store_DT0_fpr(DFPREG(rd));
2431 break;
2432 case 0x84: /* V9 fxtos */
2433 gen_op_load_fpr_DT1(DFPREG(rs2));
2434 gen_clear_float_exceptions();
2435 gen_helper_fxtos(cpu_tmp32);
2436 gen_helper_check_ieee_exceptions();
2437 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2438 break;
2439 case 0x88: /* V9 fxtod */
2440 gen_op_load_fpr_DT1(DFPREG(rs2));
2441 gen_clear_float_exceptions();
2442 gen_helper_fxtod();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_DT0_fpr(DFPREG(rd));
2445 break;
2446 case 0x8c: /* V9 fxtoq */
2447 CHECK_FPU_FEATURE(dc, FLOAT128);
2448 gen_op_load_fpr_DT1(DFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 gen_helper_fxtoq();
2451 gen_helper_check_ieee_exceptions();
2452 gen_op_store_QT0_fpr(QFPREG(rd));
2453 break;
2454 #endif
2455 default:
2456 goto illegal_insn;
2458 } else if (xop == 0x35) { /* FPU Operations */
2459 #ifdef TARGET_SPARC64
2460 int cond;
2461 #endif
2462 if (gen_trap_ifnofpu(dc, cpu_cond))
2463 goto jmp_insn;
2464 gen_op_clear_ieee_excp_and_FTT();
2465 rs1 = GET_FIELD(insn, 13, 17);
2466 rs2 = GET_FIELD(insn, 27, 31);
2467 xop = GET_FIELD(insn, 18, 26);
2468 #ifdef TARGET_SPARC64
2469 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2470 int l1;
2472 l1 = gen_new_label();
2473 cond = GET_FIELD_SP(insn, 14, 17);
2474 cpu_src1 = get_src1(insn, cpu_src1);
2475 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2476 0, l1);
2477 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2478 gen_set_label(l1);
2479 break;
2480 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2481 int l1;
2483 l1 = gen_new_label();
2484 cond = GET_FIELD_SP(insn, 14, 17);
2485 cpu_src1 = get_src1(insn, cpu_src1);
2486 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2487 0, l1);
2488 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2489 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2490 gen_set_label(l1);
2491 break;
2492 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2493 int l1;
2495 CHECK_FPU_FEATURE(dc, FLOAT128);
2496 l1 = gen_new_label();
2497 cond = GET_FIELD_SP(insn, 14, 17);
2498 cpu_src1 = get_src1(insn, cpu_src1);
2499 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2500 0, l1);
2501 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2502 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2503 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2504 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2505 gen_set_label(l1);
2506 break;
2508 #endif
2509 switch (xop) {
2510 #ifdef TARGET_SPARC64
2511 #define FMOVSCC(fcc) \
2513 TCGv r_cond; \
2514 int l1; \
2516 l1 = gen_new_label(); \
2517 r_cond = tcg_temp_new(); \
2518 cond = GET_FIELD_SP(insn, 14, 17); \
2519 gen_fcond(r_cond, fcc, cond); \
2520 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2521 0, l1); \
2522 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2523 gen_set_label(l1); \
2524 tcg_temp_free(r_cond); \
2526 #define FMOVDCC(fcc) \
2528 TCGv r_cond; \
2529 int l1; \
2531 l1 = gen_new_label(); \
2532 r_cond = tcg_temp_new(); \
2533 cond = GET_FIELD_SP(insn, 14, 17); \
2534 gen_fcond(r_cond, fcc, cond); \
2535 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2536 0, l1); \
2537 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2538 cpu_fpr[DFPREG(rs2)]); \
2539 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2540 cpu_fpr[DFPREG(rs2) + 1]); \
2541 gen_set_label(l1); \
2542 tcg_temp_free(r_cond); \
2544 #define FMOVQCC(fcc) \
2546 TCGv r_cond; \
2547 int l1; \
2549 l1 = gen_new_label(); \
2550 r_cond = tcg_temp_new(); \
2551 cond = GET_FIELD_SP(insn, 14, 17); \
2552 gen_fcond(r_cond, fcc, cond); \
2553 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2554 0, l1); \
2555 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2556 cpu_fpr[QFPREG(rs2)]); \
2557 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2558 cpu_fpr[QFPREG(rs2) + 1]); \
2559 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2560 cpu_fpr[QFPREG(rs2) + 2]); \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2562 cpu_fpr[QFPREG(rs2) + 3]); \
2563 gen_set_label(l1); \
2564 tcg_temp_free(r_cond); \
2566 case 0x001: /* V9 fmovscc %fcc0 */
2567 FMOVSCC(0);
2568 break;
2569 case 0x002: /* V9 fmovdcc %fcc0 */
2570 FMOVDCC(0);
2571 break;
2572 case 0x003: /* V9 fmovqcc %fcc0 */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 FMOVQCC(0);
2575 break;
2576 case 0x041: /* V9 fmovscc %fcc1 */
2577 FMOVSCC(1);
2578 break;
2579 case 0x042: /* V9 fmovdcc %fcc1 */
2580 FMOVDCC(1);
2581 break;
2582 case 0x043: /* V9 fmovqcc %fcc1 */
2583 CHECK_FPU_FEATURE(dc, FLOAT128);
2584 FMOVQCC(1);
2585 break;
2586 case 0x081: /* V9 fmovscc %fcc2 */
2587 FMOVSCC(2);
2588 break;
2589 case 0x082: /* V9 fmovdcc %fcc2 */
2590 FMOVDCC(2);
2591 break;
2592 case 0x083: /* V9 fmovqcc %fcc2 */
2593 CHECK_FPU_FEATURE(dc, FLOAT128);
2594 FMOVQCC(2);
2595 break;
2596 case 0x0c1: /* V9 fmovscc %fcc3 */
2597 FMOVSCC(3);
2598 break;
2599 case 0x0c2: /* V9 fmovdcc %fcc3 */
2600 FMOVDCC(3);
2601 break;
2602 case 0x0c3: /* V9 fmovqcc %fcc3 */
2603 CHECK_FPU_FEATURE(dc, FLOAT128);
2604 FMOVQCC(3);
2605 break;
2606 #undef FMOVSCC
2607 #undef FMOVDCC
2608 #undef FMOVQCC
2609 #define FMOVSCC(icc) \
2611 TCGv r_cond; \
2612 int l1; \
2614 l1 = gen_new_label(); \
2615 r_cond = tcg_temp_new(); \
2616 cond = GET_FIELD_SP(insn, 14, 17); \
2617 gen_cond(r_cond, icc, cond, dc); \
2618 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2619 0, l1); \
2620 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2621 gen_set_label(l1); \
2622 tcg_temp_free(r_cond); \
2624 #define FMOVDCC(icc) \
2626 TCGv r_cond; \
2627 int l1; \
2629 l1 = gen_new_label(); \
2630 r_cond = tcg_temp_new(); \
2631 cond = GET_FIELD_SP(insn, 14, 17); \
2632 gen_cond(r_cond, icc, cond, dc); \
2633 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2634 0, l1); \
2635 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2636 cpu_fpr[DFPREG(rs2)]); \
2637 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2638 cpu_fpr[DFPREG(rs2) + 1]); \
2639 gen_set_label(l1); \
2640 tcg_temp_free(r_cond); \
2642 #define FMOVQCC(icc) \
2644 TCGv r_cond; \
2645 int l1; \
2647 l1 = gen_new_label(); \
2648 r_cond = tcg_temp_new(); \
2649 cond = GET_FIELD_SP(insn, 14, 17); \
2650 gen_cond(r_cond, icc, cond, dc); \
2651 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2652 0, l1); \
2653 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2654 cpu_fpr[QFPREG(rs2)]); \
2655 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2656 cpu_fpr[QFPREG(rs2) + 1]); \
2657 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2658 cpu_fpr[QFPREG(rs2) + 2]); \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2660 cpu_fpr[QFPREG(rs2) + 3]); \
2661 gen_set_label(l1); \
2662 tcg_temp_free(r_cond); \
2665 case 0x101: /* V9 fmovscc %icc */
2666 FMOVSCC(0);
2667 break;
2668 case 0x102: /* V9 fmovdcc %icc */
2669 FMOVDCC(0);
2670 case 0x103: /* V9 fmovqcc %icc */
2671 CHECK_FPU_FEATURE(dc, FLOAT128);
2672 FMOVQCC(0);
2673 break;
2674 case 0x181: /* V9 fmovscc %xcc */
2675 FMOVSCC(1);
2676 break;
2677 case 0x182: /* V9 fmovdcc %xcc */
2678 FMOVDCC(1);
2679 break;
2680 case 0x183: /* V9 fmovqcc %xcc */
2681 CHECK_FPU_FEATURE(dc, FLOAT128);
2682 FMOVQCC(1);
2683 break;
2684 #undef FMOVSCC
2685 #undef FMOVDCC
2686 #undef FMOVQCC
2687 #endif
2688 case 0x51: /* fcmps, V9 %fcc */
2689 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2690 break;
2691 case 0x52: /* fcmpd, V9 %fcc */
2692 gen_op_load_fpr_DT0(DFPREG(rs1));
2693 gen_op_load_fpr_DT1(DFPREG(rs2));
2694 gen_op_fcmpd(rd & 3);
2695 break;
2696 case 0x53: /* fcmpq, V9 %fcc */
2697 CHECK_FPU_FEATURE(dc, FLOAT128);
2698 gen_op_load_fpr_QT0(QFPREG(rs1));
2699 gen_op_load_fpr_QT1(QFPREG(rs2));
2700 gen_op_fcmpq(rd & 3);
2701 break;
2702 case 0x55: /* fcmpes, V9 %fcc */
2703 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2704 break;
2705 case 0x56: /* fcmped, V9 %fcc */
2706 gen_op_load_fpr_DT0(DFPREG(rs1));
2707 gen_op_load_fpr_DT1(DFPREG(rs2));
2708 gen_op_fcmped(rd & 3);
2709 break;
2710 case 0x57: /* fcmpeq, V9 %fcc */
2711 CHECK_FPU_FEATURE(dc, FLOAT128);
2712 gen_op_load_fpr_QT0(QFPREG(rs1));
2713 gen_op_load_fpr_QT1(QFPREG(rs2));
2714 gen_op_fcmpeq(rd & 3);
2715 break;
2716 default:
2717 goto illegal_insn;
2719 } else if (xop == 0x2) {
2720 // clr/mov shortcut
2722 rs1 = GET_FIELD(insn, 13, 17);
2723 if (rs1 == 0) {
2724 // or %g0, x, y -> mov T0, x; mov y, T0
2725 if (IS_IMM) { /* immediate */
2726 TCGv r_const;
2728 simm = GET_FIELDs(insn, 19, 31);
2729 r_const = tcg_const_tl(simm);
2730 gen_movl_TN_reg(rd, r_const);
2731 tcg_temp_free(r_const);
2732 } else { /* register */
2733 rs2 = GET_FIELD(insn, 27, 31);
2734 gen_movl_reg_TN(rs2, cpu_dst);
2735 gen_movl_TN_reg(rd, cpu_dst);
2737 } else {
2738 cpu_src1 = get_src1(insn, cpu_src1);
2739 if (IS_IMM) { /* immediate */
2740 simm = GET_FIELDs(insn, 19, 31);
2741 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2742 gen_movl_TN_reg(rd, cpu_dst);
2743 } else { /* register */
2744 // or x, %g0, y -> mov T1, x; mov y, T1
2745 rs2 = GET_FIELD(insn, 27, 31);
2746 if (rs2 != 0) {
2747 gen_movl_reg_TN(rs2, cpu_src2);
2748 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2749 gen_movl_TN_reg(rd, cpu_dst);
2750 } else
2751 gen_movl_TN_reg(rd, cpu_src1);
2754 #ifdef TARGET_SPARC64
2755 } else if (xop == 0x25) { /* sll, V9 sllx */
2756 cpu_src1 = get_src1(insn, cpu_src1);
2757 if (IS_IMM) { /* immediate */
2758 simm = GET_FIELDs(insn, 20, 31);
2759 if (insn & (1 << 12)) {
2760 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2761 } else {
2762 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2764 } else { /* register */
2765 rs2 = GET_FIELD(insn, 27, 31);
2766 gen_movl_reg_TN(rs2, cpu_src2);
2767 if (insn & (1 << 12)) {
2768 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2769 } else {
2770 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2772 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2774 gen_movl_TN_reg(rd, cpu_dst);
2775 } else if (xop == 0x26) { /* srl, V9 srlx */
2776 cpu_src1 = get_src1(insn, cpu_src1);
2777 if (IS_IMM) { /* immediate */
2778 simm = GET_FIELDs(insn, 20, 31);
2779 if (insn & (1 << 12)) {
2780 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2781 } else {
2782 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2783 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2785 } else { /* register */
2786 rs2 = GET_FIELD(insn, 27, 31);
2787 gen_movl_reg_TN(rs2, cpu_src2);
2788 if (insn & (1 << 12)) {
2789 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2790 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2791 } else {
2792 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2793 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2794 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2797 gen_movl_TN_reg(rd, cpu_dst);
2798 } else if (xop == 0x27) { /* sra, V9 srax */
2799 cpu_src1 = get_src1(insn, cpu_src1);
2800 if (IS_IMM) { /* immediate */
2801 simm = GET_FIELDs(insn, 20, 31);
2802 if (insn & (1 << 12)) {
2803 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2804 } else {
2805 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2806 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2807 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2809 } else { /* register */
2810 rs2 = GET_FIELD(insn, 27, 31);
2811 gen_movl_reg_TN(rs2, cpu_src2);
2812 if (insn & (1 << 12)) {
2813 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2814 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2815 } else {
2816 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2817 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2818 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2819 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2822 gen_movl_TN_reg(rd, cpu_dst);
2823 #endif
2824 } else if (xop < 0x36) {
2825 if (xop < 0x20) {
2826 cpu_src1 = get_src1(insn, cpu_src1);
2827 cpu_src2 = get_src2(insn, cpu_src2);
2828 switch (xop & ~0x10) {
2829 case 0x0: /* add */
2830 if (IS_IMM) {
2831 simm = GET_FIELDs(insn, 19, 31);
2832 if (xop & 0x10) {
2833 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2834 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2835 dc->cc_op = CC_OP_ADD;
2836 } else {
2837 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2839 } else {
2840 if (xop & 0x10) {
2841 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2842 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2843 dc->cc_op = CC_OP_ADD;
2844 } else {
2845 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2848 break;
2849 case 0x1: /* and */
2850 if (IS_IMM) {
2851 simm = GET_FIELDs(insn, 19, 31);
2852 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2853 } else {
2854 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2856 if (xop & 0x10) {
2857 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2858 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2859 dc->cc_op = CC_OP_LOGIC;
2861 break;
2862 case 0x2: /* or */
2863 if (IS_IMM) {
2864 simm = GET_FIELDs(insn, 19, 31);
2865 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2866 } else {
2867 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2869 if (xop & 0x10) {
2870 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2871 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2872 dc->cc_op = CC_OP_LOGIC;
2874 break;
2875 case 0x3: /* xor */
2876 if (IS_IMM) {
2877 simm = GET_FIELDs(insn, 19, 31);
2878 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2879 } else {
2880 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2882 if (xop & 0x10) {
2883 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2884 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2885 dc->cc_op = CC_OP_LOGIC;
2887 break;
2888 case 0x4: /* sub */
2889 if (IS_IMM) {
2890 simm = GET_FIELDs(insn, 19, 31);
2891 if (xop & 0x10) {
2892 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2893 } else {
2894 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2896 } else {
2897 if (xop & 0x10) {
2898 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2899 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2900 dc->cc_op = CC_OP_SUB;
2901 } else {
2902 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2905 break;
2906 case 0x5: /* andn */
2907 if (IS_IMM) {
2908 simm = GET_FIELDs(insn, 19, 31);
2909 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2910 } else {
2911 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2913 if (xop & 0x10) {
2914 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2915 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2916 dc->cc_op = CC_OP_LOGIC;
2918 break;
2919 case 0x6: /* orn */
2920 if (IS_IMM) {
2921 simm = GET_FIELDs(insn, 19, 31);
2922 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2923 } else {
2924 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2926 if (xop & 0x10) {
2927 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2928 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2929 dc->cc_op = CC_OP_LOGIC;
2931 break;
2932 case 0x7: /* xorn */
2933 if (IS_IMM) {
2934 simm = GET_FIELDs(insn, 19, 31);
2935 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2936 } else {
2937 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2938 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2940 if (xop & 0x10) {
2941 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2942 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2943 dc->cc_op = CC_OP_LOGIC;
2945 break;
2946 case 0x8: /* addx, V9 addc */
2947 if (IS_IMM) {
2948 simm = GET_FIELDs(insn, 19, 31);
2949 if (xop & 0x10) {
2950 gen_helper_compute_psr();
2951 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2952 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2953 dc->cc_op = CC_OP_ADDX;
2954 } else {
2955 gen_helper_compute_psr();
2956 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2957 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2958 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2960 } else {
2961 if (xop & 0x10) {
2962 gen_helper_compute_psr();
2963 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2964 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2965 dc->cc_op = CC_OP_ADDX;
2966 } else {
2967 gen_helper_compute_psr();
2968 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2969 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2970 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2973 break;
2974 #ifdef TARGET_SPARC64
2975 case 0x9: /* V9 mulx */
2976 if (IS_IMM) {
2977 simm = GET_FIELDs(insn, 19, 31);
2978 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2979 } else {
2980 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2982 break;
2983 #endif
2984 case 0xa: /* umul */
2985 CHECK_IU_FEATURE(dc, MUL);
2986 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2987 if (xop & 0x10) {
2988 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2989 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2990 dc->cc_op = CC_OP_LOGIC;
2992 break;
2993 case 0xb: /* smul */
2994 CHECK_IU_FEATURE(dc, MUL);
2995 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2996 if (xop & 0x10) {
2997 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2998 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2999 dc->cc_op = CC_OP_LOGIC;
3001 break;
3002 case 0xc: /* subx, V9 subc */
3003 if (IS_IMM) {
3004 simm = GET_FIELDs(insn, 19, 31);
3005 if (xop & 0x10) {
3006 gen_helper_compute_psr();
3007 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3008 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3009 dc->cc_op = CC_OP_SUBX;
3010 } else {
3011 gen_helper_compute_psr();
3012 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3013 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3014 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3016 } else {
3017 if (xop & 0x10) {
3018 gen_helper_compute_psr();
3019 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3020 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3021 dc->cc_op = CC_OP_SUBX;
3022 } else {
3023 gen_helper_compute_psr();
3024 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3025 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3026 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3029 break;
3030 #ifdef TARGET_SPARC64
3031 case 0xd: /* V9 udivx */
3032 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3033 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3034 gen_trap_ifdivzero_tl(cpu_cc_src2);
3035 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3036 break;
3037 #endif
3038 case 0xe: /* udiv */
3039 CHECK_IU_FEATURE(dc, DIV);
3040 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3041 if (xop & 0x10) {
3042 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3043 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3044 dc->cc_op = CC_OP_DIV;
3046 break;
3047 case 0xf: /* sdiv */
3048 CHECK_IU_FEATURE(dc, DIV);
3049 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3050 if (xop & 0x10) {
3051 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3052 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3053 dc->cc_op = CC_OP_DIV;
3055 break;
3056 default:
3057 goto illegal_insn;
3059 gen_movl_TN_reg(rd, cpu_dst);
3060 } else {
3061 cpu_src1 = get_src1(insn, cpu_src1);
3062 cpu_src2 = get_src2(insn, cpu_src2);
3063 switch (xop) {
3064 case 0x20: /* taddcc */
3065 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3066 gen_movl_TN_reg(rd, cpu_dst);
3067 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3068 dc->cc_op = CC_OP_TADD;
3069 break;
3070 case 0x21: /* tsubcc */
3071 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3072 gen_movl_TN_reg(rd, cpu_dst);
3073 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3074 dc->cc_op = CC_OP_TSUB;
3075 break;
3076 case 0x22: /* taddcctv */
3077 save_state(dc, cpu_cond);
3078 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3079 gen_movl_TN_reg(rd, cpu_dst);
3080 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3081 dc->cc_op = CC_OP_TADDTV;
3082 break;
3083 case 0x23: /* tsubcctv */
3084 save_state(dc, cpu_cond);
3085 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3086 gen_movl_TN_reg(rd, cpu_dst);
3087 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3088 dc->cc_op = CC_OP_TSUBTV;
3089 break;
3090 case 0x24: /* mulscc */
3091 gen_helper_compute_psr();
3092 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3093 gen_movl_TN_reg(rd, cpu_dst);
3094 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3095 dc->cc_op = CC_OP_ADD;
3096 break;
3097 #ifndef TARGET_SPARC64
3098 case 0x25: /* sll */
3099 if (IS_IMM) { /* immediate */
3100 simm = GET_FIELDs(insn, 20, 31);
3101 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3102 } else { /* register */
3103 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3104 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3106 gen_movl_TN_reg(rd, cpu_dst);
3107 break;
3108 case 0x26: /* srl */
3109 if (IS_IMM) { /* immediate */
3110 simm = GET_FIELDs(insn, 20, 31);
3111 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3112 } else { /* register */
3113 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3114 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3116 gen_movl_TN_reg(rd, cpu_dst);
3117 break;
3118 case 0x27: /* sra */
3119 if (IS_IMM) { /* immediate */
3120 simm = GET_FIELDs(insn, 20, 31);
3121 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3122 } else { /* register */
3123 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3124 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3126 gen_movl_TN_reg(rd, cpu_dst);
3127 break;
3128 #endif
3129 case 0x30:
3131 switch(rd) {
3132 case 0: /* wry */
3133 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3134 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3135 break;
3136 #ifndef TARGET_SPARC64
3137 case 0x01 ... 0x0f: /* undefined in the
3138 SPARCv8 manual, nop
3139 on the microSPARC
3140 II */
3141 case 0x10 ... 0x1f: /* implementation-dependent
3142 in the SPARCv8
3143 manual, nop on the
3144 microSPARC II */
3145 break;
3146 #else
3147 case 0x2: /* V9 wrccr */
3148 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3149 gen_helper_wrccr(cpu_dst);
3150 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3151 dc->cc_op = CC_OP_FLAGS;
3152 break;
3153 case 0x3: /* V9 wrasi */
3154 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3156 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3157 break;
3158 case 0x6: /* V9 wrfprs */
3159 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3160 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3161 save_state(dc, cpu_cond);
3162 gen_op_next_insn();
3163 tcg_gen_exit_tb(0);
3164 dc->is_br = 1;
3165 break;
3166 case 0xf: /* V9 sir, nop if user */
3167 #if !defined(CONFIG_USER_ONLY)
3168 if (supervisor(dc))
3169 ; // XXX
3170 #endif
3171 break;
3172 case 0x13: /* Graphics Status */
3173 if (gen_trap_ifnofpu(dc, cpu_cond))
3174 goto jmp_insn;
3175 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3176 break;
3177 case 0x14: /* Softint set */
3178 if (!supervisor(dc))
3179 goto illegal_insn;
3180 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3181 gen_helper_set_softint(cpu_tmp64);
3182 break;
3183 case 0x15: /* Softint clear */
3184 if (!supervisor(dc))
3185 goto illegal_insn;
3186 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3187 gen_helper_clear_softint(cpu_tmp64);
3188 break;
3189 case 0x16: /* Softint write */
3190 if (!supervisor(dc))
3191 goto illegal_insn;
3192 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3193 gen_helper_write_softint(cpu_tmp64);
3194 break;
3195 case 0x17: /* Tick compare */
3196 #if !defined(CONFIG_USER_ONLY)
3197 if (!supervisor(dc))
3198 goto illegal_insn;
3199 #endif
3201 TCGv_ptr r_tickptr;
3203 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3204 cpu_src2);
3205 r_tickptr = tcg_temp_new_ptr();
3206 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3207 offsetof(CPUState, tick));
3208 gen_helper_tick_set_limit(r_tickptr,
3209 cpu_tick_cmpr);
3210 tcg_temp_free_ptr(r_tickptr);
3212 break;
3213 case 0x18: /* System tick */
3214 #if !defined(CONFIG_USER_ONLY)
3215 if (!supervisor(dc))
3216 goto illegal_insn;
3217 #endif
3219 TCGv_ptr r_tickptr;
3221 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3222 cpu_src2);
3223 r_tickptr = tcg_temp_new_ptr();
3224 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3225 offsetof(CPUState, stick));
3226 gen_helper_tick_set_count(r_tickptr,
3227 cpu_dst);
3228 tcg_temp_free_ptr(r_tickptr);
3230 break;
3231 case 0x19: /* System tick compare */
3232 #if !defined(CONFIG_USER_ONLY)
3233 if (!supervisor(dc))
3234 goto illegal_insn;
3235 #endif
3237 TCGv_ptr r_tickptr;
3239 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3240 cpu_src2);
3241 r_tickptr = tcg_temp_new_ptr();
3242 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3243 offsetof(CPUState, stick));
3244 gen_helper_tick_set_limit(r_tickptr,
3245 cpu_stick_cmpr);
3246 tcg_temp_free_ptr(r_tickptr);
3248 break;
3250 case 0x10: /* Performance Control */
3251 case 0x11: /* Performance Instrumentation
3252 Counter */
3253 case 0x12: /* Dispatch Control */
3254 #endif
3255 default:
3256 goto illegal_insn;
3259 break;
3260 #if !defined(CONFIG_USER_ONLY)
3261 case 0x31: /* wrpsr, V9 saved, restored */
3263 if (!supervisor(dc))
3264 goto priv_insn;
3265 #ifdef TARGET_SPARC64
3266 switch (rd) {
3267 case 0:
3268 gen_helper_saved();
3269 break;
3270 case 1:
3271 gen_helper_restored();
3272 break;
3273 case 2: /* UA2005 allclean */
3274 case 3: /* UA2005 otherw */
3275 case 4: /* UA2005 normalw */
3276 case 5: /* UA2005 invalw */
3277 // XXX
3278 default:
3279 goto illegal_insn;
3281 #else
3282 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3283 gen_helper_wrpsr(cpu_dst);
3284 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3285 dc->cc_op = CC_OP_FLAGS;
3286 save_state(dc, cpu_cond);
3287 gen_op_next_insn();
3288 tcg_gen_exit_tb(0);
3289 dc->is_br = 1;
3290 #endif
3292 break;
3293 case 0x32: /* wrwim, V9 wrpr */
3295 if (!supervisor(dc))
3296 goto priv_insn;
3297 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3298 #ifdef TARGET_SPARC64
3299 switch (rd) {
3300 case 0: // tpc
3302 TCGv_ptr r_tsptr;
3304 r_tsptr = tcg_temp_new_ptr();
3305 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3306 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3307 offsetof(trap_state, tpc));
3308 tcg_temp_free_ptr(r_tsptr);
3310 break;
3311 case 1: // tnpc
3313 TCGv_ptr r_tsptr;
3315 r_tsptr = tcg_temp_new_ptr();
3316 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3317 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3318 offsetof(trap_state, tnpc));
3319 tcg_temp_free_ptr(r_tsptr);
3321 break;
3322 case 2: // tstate
3324 TCGv_ptr r_tsptr;
3326 r_tsptr = tcg_temp_new_ptr();
3327 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3328 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3329 offsetof(trap_state,
3330 tstate));
3331 tcg_temp_free_ptr(r_tsptr);
3333 break;
3334 case 3: // tt
3336 TCGv_ptr r_tsptr;
3338 r_tsptr = tcg_temp_new_ptr();
3339 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3340 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3341 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3342 offsetof(trap_state, tt));
3343 tcg_temp_free_ptr(r_tsptr);
3345 break;
3346 case 4: // tick
3348 TCGv_ptr r_tickptr;
3350 r_tickptr = tcg_temp_new_ptr();
3351 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3352 offsetof(CPUState, tick));
3353 gen_helper_tick_set_count(r_tickptr,
3354 cpu_tmp0);
3355 tcg_temp_free_ptr(r_tickptr);
3357 break;
3358 case 5: // tba
3359 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3360 break;
3361 case 6: // pstate
3362 save_state(dc, cpu_cond);
3363 gen_helper_wrpstate(cpu_tmp0);
3364 gen_op_next_insn();
3365 tcg_gen_exit_tb(0);
3366 dc->is_br = 1;
3367 break;
3368 case 7: // tl
3369 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3370 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3371 offsetof(CPUSPARCState, tl));
3372 break;
3373 case 8: // pil
3374 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3375 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3376 offsetof(CPUSPARCState,
3377 psrpil));
3378 break;
3379 case 9: // cwp
3380 gen_helper_wrcwp(cpu_tmp0);
3381 break;
3382 case 10: // cansave
3383 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3384 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3385 offsetof(CPUSPARCState,
3386 cansave));
3387 break;
3388 case 11: // canrestore
3389 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3390 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3391 offsetof(CPUSPARCState,
3392 canrestore));
3393 break;
3394 case 12: // cleanwin
3395 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3396 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3397 offsetof(CPUSPARCState,
3398 cleanwin));
3399 break;
3400 case 13: // otherwin
3401 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3402 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3403 offsetof(CPUSPARCState,
3404 otherwin));
3405 break;
3406 case 14: // wstate
3407 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3408 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3409 offsetof(CPUSPARCState,
3410 wstate));
3411 break;
3412 case 16: // UA2005 gl
3413 CHECK_IU_FEATURE(dc, GL);
3414 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3415 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3416 offsetof(CPUSPARCState, gl));
3417 break;
3418 case 26: // UA2005 strand status
3419 CHECK_IU_FEATURE(dc, HYPV);
3420 if (!hypervisor(dc))
3421 goto priv_insn;
3422 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3423 break;
3424 default:
3425 goto illegal_insn;
3427 #else
3428 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3429 if (dc->def->nwindows != 32)
3430 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3431 (1 << dc->def->nwindows) - 1);
3432 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3433 #endif
3435 break;
3436 case 0x33: /* wrtbr, UA2005 wrhpr */
3438 #ifndef TARGET_SPARC64
3439 if (!supervisor(dc))
3440 goto priv_insn;
3441 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3442 #else
3443 CHECK_IU_FEATURE(dc, HYPV);
3444 if (!hypervisor(dc))
3445 goto priv_insn;
3446 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3447 switch (rd) {
3448 case 0: // hpstate
3449 // XXX gen_op_wrhpstate();
3450 save_state(dc, cpu_cond);
3451 gen_op_next_insn();
3452 tcg_gen_exit_tb(0);
3453 dc->is_br = 1;
3454 break;
3455 case 1: // htstate
3456 // XXX gen_op_wrhtstate();
3457 break;
3458 case 3: // hintp
3459 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3460 break;
3461 case 5: // htba
3462 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3463 break;
3464 case 31: // hstick_cmpr
3466 TCGv_ptr r_tickptr;
3468 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3469 r_tickptr = tcg_temp_new_ptr();
3470 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3471 offsetof(CPUState, hstick));
3472 gen_helper_tick_set_limit(r_tickptr,
3473 cpu_hstick_cmpr);
3474 tcg_temp_free_ptr(r_tickptr);
3476 break;
3477 case 6: // hver readonly
3478 default:
3479 goto illegal_insn;
3481 #endif
3483 break;
3484 #endif
3485 #ifdef TARGET_SPARC64
3486 case 0x2c: /* V9 movcc */
3488 int cc = GET_FIELD_SP(insn, 11, 12);
3489 int cond = GET_FIELD_SP(insn, 14, 17);
3490 TCGv r_cond;
3491 int l1;
3493 r_cond = tcg_temp_new();
3494 if (insn & (1 << 18)) {
3495 if (cc == 0)
3496 gen_cond(r_cond, 0, cond, dc);
3497 else if (cc == 2)
3498 gen_cond(r_cond, 1, cond, dc);
3499 else
3500 goto illegal_insn;
3501 } else {
3502 gen_fcond(r_cond, cc, cond);
3505 l1 = gen_new_label();
3507 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3508 if (IS_IMM) { /* immediate */
3509 TCGv r_const;
3511 simm = GET_FIELD_SPs(insn, 0, 10);
3512 r_const = tcg_const_tl(simm);
3513 gen_movl_TN_reg(rd, r_const);
3514 tcg_temp_free(r_const);
3515 } else {
3516 rs2 = GET_FIELD_SP(insn, 0, 4);
3517 gen_movl_reg_TN(rs2, cpu_tmp0);
3518 gen_movl_TN_reg(rd, cpu_tmp0);
3520 gen_set_label(l1);
3521 tcg_temp_free(r_cond);
3522 break;
3524 case 0x2d: /* V9 sdivx */
3525 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3526 gen_movl_TN_reg(rd, cpu_dst);
3527 break;
3528 case 0x2e: /* V9 popc */
3530 cpu_src2 = get_src2(insn, cpu_src2);
3531 gen_helper_popc(cpu_dst, cpu_src2);
3532 gen_movl_TN_reg(rd, cpu_dst);
3534 case 0x2f: /* V9 movr */
3536 int cond = GET_FIELD_SP(insn, 10, 12);
3537 int l1;
3539 cpu_src1 = get_src1(insn, cpu_src1);
3541 l1 = gen_new_label();
3543 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3544 cpu_src1, 0, l1);
3545 if (IS_IMM) { /* immediate */
3546 TCGv r_const;
3548 simm = GET_FIELD_SPs(insn, 0, 9);
3549 r_const = tcg_const_tl(simm);
3550 gen_movl_TN_reg(rd, r_const);
3551 tcg_temp_free(r_const);
3552 } else {
3553 rs2 = GET_FIELD_SP(insn, 0, 4);
3554 gen_movl_reg_TN(rs2, cpu_tmp0);
3555 gen_movl_TN_reg(rd, cpu_tmp0);
3557 gen_set_label(l1);
3558 break;
3560 #endif
3561 default:
3562 goto illegal_insn;
3565 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3566 #ifdef TARGET_SPARC64
3567 int opf = GET_FIELD_SP(insn, 5, 13);
3568 rs1 = GET_FIELD(insn, 13, 17);
3569 rs2 = GET_FIELD(insn, 27, 31);
3570 if (gen_trap_ifnofpu(dc, cpu_cond))
3571 goto jmp_insn;
3573 switch (opf) {
3574 case 0x000: /* VIS I edge8cc */
3575 case 0x001: /* VIS II edge8n */
3576 case 0x002: /* VIS I edge8lcc */
3577 case 0x003: /* VIS II edge8ln */
3578 case 0x004: /* VIS I edge16cc */
3579 case 0x005: /* VIS II edge16n */
3580 case 0x006: /* VIS I edge16lcc */
3581 case 0x007: /* VIS II edge16ln */
3582 case 0x008: /* VIS I edge32cc */
3583 case 0x009: /* VIS II edge32n */
3584 case 0x00a: /* VIS I edge32lcc */
3585 case 0x00b: /* VIS II edge32ln */
3586 // XXX
3587 goto illegal_insn;
3588 case 0x010: /* VIS I array8 */
3589 CHECK_FPU_FEATURE(dc, VIS1);
3590 cpu_src1 = get_src1(insn, cpu_src1);
3591 gen_movl_reg_TN(rs2, cpu_src2);
3592 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3593 gen_movl_TN_reg(rd, cpu_dst);
3594 break;
3595 case 0x012: /* VIS I array16 */
3596 CHECK_FPU_FEATURE(dc, VIS1);
3597 cpu_src1 = get_src1(insn, cpu_src1);
3598 gen_movl_reg_TN(rs2, cpu_src2);
3599 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3600 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3601 gen_movl_TN_reg(rd, cpu_dst);
3602 break;
3603 case 0x014: /* VIS I array32 */
3604 CHECK_FPU_FEATURE(dc, VIS1);
3605 cpu_src1 = get_src1(insn, cpu_src1);
3606 gen_movl_reg_TN(rs2, cpu_src2);
3607 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3608 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3609 gen_movl_TN_reg(rd, cpu_dst);
3610 break;
3611 case 0x018: /* VIS I alignaddr */
3612 CHECK_FPU_FEATURE(dc, VIS1);
3613 cpu_src1 = get_src1(insn, cpu_src1);
3614 gen_movl_reg_TN(rs2, cpu_src2);
3615 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3616 gen_movl_TN_reg(rd, cpu_dst);
3617 break;
3618 case 0x019: /* VIS II bmask */
3619 case 0x01a: /* VIS I alignaddrl */
3620 // XXX
3621 goto illegal_insn;
3622 case 0x020: /* VIS I fcmple16 */
3623 CHECK_FPU_FEATURE(dc, VIS1);
3624 gen_op_load_fpr_DT0(DFPREG(rs1));
3625 gen_op_load_fpr_DT1(DFPREG(rs2));
3626 gen_helper_fcmple16();
3627 gen_op_store_DT0_fpr(DFPREG(rd));
3628 break;
3629 case 0x022: /* VIS I fcmpne16 */
3630 CHECK_FPU_FEATURE(dc, VIS1);
3631 gen_op_load_fpr_DT0(DFPREG(rs1));
3632 gen_op_load_fpr_DT1(DFPREG(rs2));
3633 gen_helper_fcmpne16();
3634 gen_op_store_DT0_fpr(DFPREG(rd));
3635 break;
3636 case 0x024: /* VIS I fcmple32 */
3637 CHECK_FPU_FEATURE(dc, VIS1);
3638 gen_op_load_fpr_DT0(DFPREG(rs1));
3639 gen_op_load_fpr_DT1(DFPREG(rs2));
3640 gen_helper_fcmple32();
3641 gen_op_store_DT0_fpr(DFPREG(rd));
3642 break;
3643 case 0x026: /* VIS I fcmpne32 */
3644 CHECK_FPU_FEATURE(dc, VIS1);
3645 gen_op_load_fpr_DT0(DFPREG(rs1));
3646 gen_op_load_fpr_DT1(DFPREG(rs2));
3647 gen_helper_fcmpne32();
3648 gen_op_store_DT0_fpr(DFPREG(rd));
3649 break;
3650 case 0x028: /* VIS I fcmpgt16 */
3651 CHECK_FPU_FEATURE(dc, VIS1);
3652 gen_op_load_fpr_DT0(DFPREG(rs1));
3653 gen_op_load_fpr_DT1(DFPREG(rs2));
3654 gen_helper_fcmpgt16();
3655 gen_op_store_DT0_fpr(DFPREG(rd));
3656 break;
3657 case 0x02a: /* VIS I fcmpeq16 */
3658 CHECK_FPU_FEATURE(dc, VIS1);
3659 gen_op_load_fpr_DT0(DFPREG(rs1));
3660 gen_op_load_fpr_DT1(DFPREG(rs2));
3661 gen_helper_fcmpeq16();
3662 gen_op_store_DT0_fpr(DFPREG(rd));
3663 break;
3664 case 0x02c: /* VIS I fcmpgt32 */
3665 CHECK_FPU_FEATURE(dc, VIS1);
3666 gen_op_load_fpr_DT0(DFPREG(rs1));
3667 gen_op_load_fpr_DT1(DFPREG(rs2));
3668 gen_helper_fcmpgt32();
3669 gen_op_store_DT0_fpr(DFPREG(rd));
3670 break;
3671 case 0x02e: /* VIS I fcmpeq32 */
3672 CHECK_FPU_FEATURE(dc, VIS1);
3673 gen_op_load_fpr_DT0(DFPREG(rs1));
3674 gen_op_load_fpr_DT1(DFPREG(rs2));
3675 gen_helper_fcmpeq32();
3676 gen_op_store_DT0_fpr(DFPREG(rd));
3677 break;
3678 case 0x031: /* VIS I fmul8x16 */
3679 CHECK_FPU_FEATURE(dc, VIS1);
3680 gen_op_load_fpr_DT0(DFPREG(rs1));
3681 gen_op_load_fpr_DT1(DFPREG(rs2));
3682 gen_helper_fmul8x16();
3683 gen_op_store_DT0_fpr(DFPREG(rd));
3684 break;
3685 case 0x033: /* VIS I fmul8x16au */
3686 CHECK_FPU_FEATURE(dc, VIS1);
3687 gen_op_load_fpr_DT0(DFPREG(rs1));
3688 gen_op_load_fpr_DT1(DFPREG(rs2));
3689 gen_helper_fmul8x16au();
3690 gen_op_store_DT0_fpr(DFPREG(rd));
3691 break;
3692 case 0x035: /* VIS I fmul8x16al */
3693 CHECK_FPU_FEATURE(dc, VIS1);
3694 gen_op_load_fpr_DT0(DFPREG(rs1));
3695 gen_op_load_fpr_DT1(DFPREG(rs2));
3696 gen_helper_fmul8x16al();
3697 gen_op_store_DT0_fpr(DFPREG(rd));
3698 break;
3699 case 0x036: /* VIS I fmul8sux16 */
3700 CHECK_FPU_FEATURE(dc, VIS1);
3701 gen_op_load_fpr_DT0(DFPREG(rs1));
3702 gen_op_load_fpr_DT1(DFPREG(rs2));
3703 gen_helper_fmul8sux16();
3704 gen_op_store_DT0_fpr(DFPREG(rd));
3705 break;
3706 case 0x037: /* VIS I fmul8ulx16 */
3707 CHECK_FPU_FEATURE(dc, VIS1);
3708 gen_op_load_fpr_DT0(DFPREG(rs1));
3709 gen_op_load_fpr_DT1(DFPREG(rs2));
3710 gen_helper_fmul8ulx16();
3711 gen_op_store_DT0_fpr(DFPREG(rd));
3712 break;
3713 case 0x038: /* VIS I fmuld8sux16 */
3714 CHECK_FPU_FEATURE(dc, VIS1);
3715 gen_op_load_fpr_DT0(DFPREG(rs1));
3716 gen_op_load_fpr_DT1(DFPREG(rs2));
3717 gen_helper_fmuld8sux16();
3718 gen_op_store_DT0_fpr(DFPREG(rd));
3719 break;
3720 case 0x039: /* VIS I fmuld8ulx16 */
3721 CHECK_FPU_FEATURE(dc, VIS1);
3722 gen_op_load_fpr_DT0(DFPREG(rs1));
3723 gen_op_load_fpr_DT1(DFPREG(rs2));
3724 gen_helper_fmuld8ulx16();
3725 gen_op_store_DT0_fpr(DFPREG(rd));
3726 break;
3727 case 0x03a: /* VIS I fpack32 */
3728 case 0x03b: /* VIS I fpack16 */
3729 case 0x03d: /* VIS I fpackfix */
3730 case 0x03e: /* VIS I pdist */
3731 // XXX
3732 goto illegal_insn;
3733 case 0x048: /* VIS I faligndata */
3734 CHECK_FPU_FEATURE(dc, VIS1);
3735 gen_op_load_fpr_DT0(DFPREG(rs1));
3736 gen_op_load_fpr_DT1(DFPREG(rs2));
3737 gen_helper_faligndata();
3738 gen_op_store_DT0_fpr(DFPREG(rd));
3739 break;
3740 case 0x04b: /* VIS I fpmerge */
3741 CHECK_FPU_FEATURE(dc, VIS1);
3742 gen_op_load_fpr_DT0(DFPREG(rs1));
3743 gen_op_load_fpr_DT1(DFPREG(rs2));
3744 gen_helper_fpmerge();
3745 gen_op_store_DT0_fpr(DFPREG(rd));
3746 break;
3747 case 0x04c: /* VIS II bshuffle */
3748 // XXX
3749 goto illegal_insn;
3750 case 0x04d: /* VIS I fexpand */
3751 CHECK_FPU_FEATURE(dc, VIS1);
3752 gen_op_load_fpr_DT0(DFPREG(rs1));
3753 gen_op_load_fpr_DT1(DFPREG(rs2));
3754 gen_helper_fexpand();
3755 gen_op_store_DT0_fpr(DFPREG(rd));
3756 break;
3757 case 0x050: /* VIS I fpadd16 */
3758 CHECK_FPU_FEATURE(dc, VIS1);
3759 gen_op_load_fpr_DT0(DFPREG(rs1));
3760 gen_op_load_fpr_DT1(DFPREG(rs2));
3761 gen_helper_fpadd16();
3762 gen_op_store_DT0_fpr(DFPREG(rd));
3763 break;
3764 case 0x051: /* VIS I fpadd16s */
3765 CHECK_FPU_FEATURE(dc, VIS1);
3766 gen_helper_fpadd16s(cpu_fpr[rd],
3767 cpu_fpr[rs1], cpu_fpr[rs2]);
3768 break;
3769 case 0x052: /* VIS I fpadd32 */
3770 CHECK_FPU_FEATURE(dc, VIS1);
3771 gen_op_load_fpr_DT0(DFPREG(rs1));
3772 gen_op_load_fpr_DT1(DFPREG(rs2));
3773 gen_helper_fpadd32();
3774 gen_op_store_DT0_fpr(DFPREG(rd));
3775 break;
3776 case 0x053: /* VIS I fpadd32s */
3777 CHECK_FPU_FEATURE(dc, VIS1);
3778 gen_helper_fpadd32s(cpu_fpr[rd],
3779 cpu_fpr[rs1], cpu_fpr[rs2]);
3780 break;
3781 case 0x054: /* VIS I fpsub16 */
3782 CHECK_FPU_FEATURE(dc, VIS1);
3783 gen_op_load_fpr_DT0(DFPREG(rs1));
3784 gen_op_load_fpr_DT1(DFPREG(rs2));
3785 gen_helper_fpsub16();
3786 gen_op_store_DT0_fpr(DFPREG(rd));
3787 break;
3788 case 0x055: /* VIS I fpsub16s */
3789 CHECK_FPU_FEATURE(dc, VIS1);
3790 gen_helper_fpsub16s(cpu_fpr[rd],
3791 cpu_fpr[rs1], cpu_fpr[rs2]);
3792 break;
3793 case 0x056: /* VIS I fpsub32 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 gen_helper_fpsub32();
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x057: /* VIS I fpsub32s */
3801 CHECK_FPU_FEATURE(dc, VIS1);
3802 gen_helper_fpsub32s(cpu_fpr[rd],
3803 cpu_fpr[rs1], cpu_fpr[rs2]);
3804 break;
3805 case 0x060: /* VIS I fzero */
3806 CHECK_FPU_FEATURE(dc, VIS1);
3807 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3808 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3809 break;
3810 case 0x061: /* VIS I fzeros */
3811 CHECK_FPU_FEATURE(dc, VIS1);
3812 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3813 break;
3814 case 0x062: /* VIS I fnor */
3815 CHECK_FPU_FEATURE(dc, VIS1);
3816 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3817 cpu_fpr[DFPREG(rs2)]);
3818 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3819 cpu_fpr[DFPREG(rs2) + 1]);
3820 break;
3821 case 0x063: /* VIS I fnors */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3824 break;
3825 case 0x064: /* VIS I fandnot2 */
3826 CHECK_FPU_FEATURE(dc, VIS1);
3827 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3828 cpu_fpr[DFPREG(rs2)]);
3829 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3830 cpu_fpr[DFPREG(rs1) + 1],
3831 cpu_fpr[DFPREG(rs2) + 1]);
3832 break;
3833 case 0x065: /* VIS I fandnot2s */
3834 CHECK_FPU_FEATURE(dc, VIS1);
3835 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3836 break;
3837 case 0x066: /* VIS I fnot2 */
3838 CHECK_FPU_FEATURE(dc, VIS1);
3839 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3840 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3841 cpu_fpr[DFPREG(rs2) + 1]);
3842 break;
3843 case 0x067: /* VIS I fnot2s */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3846 break;
3847 case 0x068: /* VIS I fandnot1 */
3848 CHECK_FPU_FEATURE(dc, VIS1);
3849 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3850 cpu_fpr[DFPREG(rs1)]);
3851 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3852 cpu_fpr[DFPREG(rs2) + 1],
3853 cpu_fpr[DFPREG(rs1) + 1]);
3854 break;
3855 case 0x069: /* VIS I fandnot1s */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3858 break;
3859 case 0x06a: /* VIS I fnot1 */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3862 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3863 cpu_fpr[DFPREG(rs1) + 1]);
3864 break;
3865 case 0x06b: /* VIS I fnot1s */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3868 break;
3869 case 0x06c: /* VIS I fxor */
3870 CHECK_FPU_FEATURE(dc, VIS1);
3871 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3872 cpu_fpr[DFPREG(rs2)]);
3873 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3874 cpu_fpr[DFPREG(rs1) + 1],
3875 cpu_fpr[DFPREG(rs2) + 1]);
3876 break;
3877 case 0x06d: /* VIS I fxors */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3880 break;
3881 case 0x06e: /* VIS I fnand */
3882 CHECK_FPU_FEATURE(dc, VIS1);
3883 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3884 cpu_fpr[DFPREG(rs2)]);
3885 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3886 cpu_fpr[DFPREG(rs2) + 1]);
3887 break;
3888 case 0x06f: /* VIS I fnands */
3889 CHECK_FPU_FEATURE(dc, VIS1);
3890 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3891 break;
3892 case 0x070: /* VIS I fand */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3895 cpu_fpr[DFPREG(rs2)]);
3896 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3897 cpu_fpr[DFPREG(rs1) + 1],
3898 cpu_fpr[DFPREG(rs2) + 1]);
3899 break;
3900 case 0x071: /* VIS I fands */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3903 break;
3904 case 0x072: /* VIS I fxnor */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3907 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3908 cpu_fpr[DFPREG(rs1)]);
3909 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3910 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3911 cpu_fpr[DFPREG(rs1) + 1]);
3912 break;
3913 case 0x073: /* VIS I fxnors */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3916 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3917 break;
3918 case 0x074: /* VIS I fsrc1 */
3919 CHECK_FPU_FEATURE(dc, VIS1);
3920 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3921 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3922 cpu_fpr[DFPREG(rs1) + 1]);
3923 break;
3924 case 0x075: /* VIS I fsrc1s */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3927 break;
3928 case 0x076: /* VIS I fornot2 */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3931 cpu_fpr[DFPREG(rs2)]);
3932 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3933 cpu_fpr[DFPREG(rs1) + 1],
3934 cpu_fpr[DFPREG(rs2) + 1]);
3935 break;
3936 case 0x077: /* VIS I fornot2s */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3939 break;
3940 case 0x078: /* VIS I fsrc2 */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 gen_op_load_fpr_DT0(DFPREG(rs2));
3943 gen_op_store_DT0_fpr(DFPREG(rd));
3944 break;
3945 case 0x079: /* VIS I fsrc2s */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3948 break;
3949 case 0x07a: /* VIS I fornot1 */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3952 cpu_fpr[DFPREG(rs1)]);
3953 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3954 cpu_fpr[DFPREG(rs2) + 1],
3955 cpu_fpr[DFPREG(rs1) + 1]);
3956 break;
3957 case 0x07b: /* VIS I fornot1s */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3960 break;
3961 case 0x07c: /* VIS I for */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3964 cpu_fpr[DFPREG(rs2)]);
3965 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3966 cpu_fpr[DFPREG(rs1) + 1],
3967 cpu_fpr[DFPREG(rs2) + 1]);
3968 break;
3969 case 0x07d: /* VIS I fors */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3972 break;
3973 case 0x07e: /* VIS I fone */
3974 CHECK_FPU_FEATURE(dc, VIS1);
3975 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3976 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3977 break;
3978 case 0x07f: /* VIS I fones */
3979 CHECK_FPU_FEATURE(dc, VIS1);
3980 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3981 break;
3982 case 0x080: /* VIS I shutdown */
3983 case 0x081: /* VIS II siam */
3984 // XXX
3985 goto illegal_insn;
3986 default:
3987 goto illegal_insn;
3989 #else
3990 goto ncp_insn;
3991 #endif
3992 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3993 #ifdef TARGET_SPARC64
3994 goto illegal_insn;
3995 #else
3996 goto ncp_insn;
3997 #endif
3998 #ifdef TARGET_SPARC64
3999 } else if (xop == 0x39) { /* V9 return */
4000 TCGv_i32 r_const;
4002 save_state(dc, cpu_cond);
4003 cpu_src1 = get_src1(insn, cpu_src1);
4004 if (IS_IMM) { /* immediate */
4005 simm = GET_FIELDs(insn, 19, 31);
4006 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4007 } else { /* register */
4008 rs2 = GET_FIELD(insn, 27, 31);
4009 if (rs2) {
4010 gen_movl_reg_TN(rs2, cpu_src2);
4011 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4012 } else
4013 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4015 gen_helper_restore();
4016 gen_mov_pc_npc(dc, cpu_cond);
4017 r_const = tcg_const_i32(3);
4018 gen_helper_check_align(cpu_dst, r_const);
4019 tcg_temp_free_i32(r_const);
4020 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4021 dc->npc = DYNAMIC_PC;
4022 goto jmp_insn;
4023 #endif
4024 } else {
4025 cpu_src1 = get_src1(insn, cpu_src1);
4026 if (IS_IMM) { /* immediate */
4027 simm = GET_FIELDs(insn, 19, 31);
4028 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4029 } else { /* register */
4030 rs2 = GET_FIELD(insn, 27, 31);
4031 if (rs2) {
4032 gen_movl_reg_TN(rs2, cpu_src2);
4033 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4034 } else
4035 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4037 switch (xop) {
4038 case 0x38: /* jmpl */
4040 TCGv r_pc;
4041 TCGv_i32 r_const;
4043 r_pc = tcg_const_tl(dc->pc);
4044 gen_movl_TN_reg(rd, r_pc);
4045 tcg_temp_free(r_pc);
4046 gen_mov_pc_npc(dc, cpu_cond);
4047 r_const = tcg_const_i32(3);
4048 gen_helper_check_align(cpu_dst, r_const);
4049 tcg_temp_free_i32(r_const);
4050 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4051 dc->npc = DYNAMIC_PC;
4053 goto jmp_insn;
4054 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4055 case 0x39: /* rett, V9 return */
4057 TCGv_i32 r_const;
4059 if (!supervisor(dc))
4060 goto priv_insn;
4061 gen_mov_pc_npc(dc, cpu_cond);
4062 r_const = tcg_const_i32(3);
4063 gen_helper_check_align(cpu_dst, r_const);
4064 tcg_temp_free_i32(r_const);
4065 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4066 dc->npc = DYNAMIC_PC;
4067 gen_helper_rett();
4069 goto jmp_insn;
4070 #endif
4071 case 0x3b: /* flush */
4072 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4073 goto unimp_flush;
4074 gen_helper_flush(cpu_dst);
4075 break;
4076 case 0x3c: /* save */
4077 save_state(dc, cpu_cond);
4078 gen_helper_save();
4079 gen_movl_TN_reg(rd, cpu_dst);
4080 break;
4081 case 0x3d: /* restore */
4082 save_state(dc, cpu_cond);
4083 gen_helper_restore();
4084 gen_movl_TN_reg(rd, cpu_dst);
4085 break;
4086 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4087 case 0x3e: /* V9 done/retry */
4089 switch (rd) {
4090 case 0:
4091 if (!supervisor(dc))
4092 goto priv_insn;
4093 dc->npc = DYNAMIC_PC;
4094 dc->pc = DYNAMIC_PC;
4095 gen_helper_done();
4096 goto jmp_insn;
4097 case 1:
4098 if (!supervisor(dc))
4099 goto priv_insn;
4100 dc->npc = DYNAMIC_PC;
4101 dc->pc = DYNAMIC_PC;
4102 gen_helper_retry();
4103 goto jmp_insn;
4104 default:
4105 goto illegal_insn;
4108 break;
4109 #endif
4110 default:
4111 goto illegal_insn;
4114 break;
4116 break;
4117 case 3: /* load/store instructions */
4119 unsigned int xop = GET_FIELD(insn, 7, 12);
4121 /* flush pending conditional evaluations before exposing
4122 cpu state */
4123 if (dc->cc_op != CC_OP_FLAGS) {
4124 dc->cc_op = CC_OP_FLAGS;
4125 gen_helper_compute_psr();
4127 cpu_src1 = get_src1(insn, cpu_src1);
4128 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4129 rs2 = GET_FIELD(insn, 27, 31);
4130 gen_movl_reg_TN(rs2, cpu_src2);
4131 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4132 } else if (IS_IMM) { /* immediate */
4133 simm = GET_FIELDs(insn, 19, 31);
4134 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4135 } else { /* register */
4136 rs2 = GET_FIELD(insn, 27, 31);
4137 if (rs2 != 0) {
4138 gen_movl_reg_TN(rs2, cpu_src2);
4139 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4140 } else
4141 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4143 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4144 (xop > 0x17 && xop <= 0x1d ) ||
4145 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4146 switch (xop) {
4147 case 0x0: /* ld, V9 lduw, load unsigned word */
4148 gen_address_mask(dc, cpu_addr);
4149 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4150 break;
4151 case 0x1: /* ldub, load unsigned byte */
4152 gen_address_mask(dc, cpu_addr);
4153 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4154 break;
4155 case 0x2: /* lduh, load unsigned halfword */
4156 gen_address_mask(dc, cpu_addr);
4157 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4158 break;
4159 case 0x3: /* ldd, load double word */
4160 if (rd & 1)
4161 goto illegal_insn;
4162 else {
4163 TCGv_i32 r_const;
4165 save_state(dc, cpu_cond);
4166 r_const = tcg_const_i32(7);
4167 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4168 tcg_temp_free_i32(r_const);
4169 gen_address_mask(dc, cpu_addr);
4170 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4171 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4172 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4173 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4174 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4175 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4176 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4178 break;
4179 case 0x9: /* ldsb, load signed byte */
4180 gen_address_mask(dc, cpu_addr);
4181 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4182 break;
4183 case 0xa: /* ldsh, load signed halfword */
4184 gen_address_mask(dc, cpu_addr);
4185 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4186 break;
4187 case 0xd: /* ldstub -- XXX: should be atomically */
4189 TCGv r_const;
4191 gen_address_mask(dc, cpu_addr);
4192 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4193 r_const = tcg_const_tl(0xff);
4194 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4195 tcg_temp_free(r_const);
4197 break;
4198 case 0x0f: /* swap, swap register with memory. Also
4199 atomically */
4200 CHECK_IU_FEATURE(dc, SWAP);
4201 gen_movl_reg_TN(rd, cpu_val);
4202 gen_address_mask(dc, cpu_addr);
4203 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4204 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4205 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4206 break;
4207 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4208 case 0x10: /* lda, V9 lduwa, load word alternate */
4209 #ifndef TARGET_SPARC64
4210 if (IS_IMM)
4211 goto illegal_insn;
4212 if (!supervisor(dc))
4213 goto priv_insn;
4214 #endif
4215 save_state(dc, cpu_cond);
4216 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4217 break;
4218 case 0x11: /* lduba, load unsigned byte alternate */
4219 #ifndef TARGET_SPARC64
4220 if (IS_IMM)
4221 goto illegal_insn;
4222 if (!supervisor(dc))
4223 goto priv_insn;
4224 #endif
4225 save_state(dc, cpu_cond);
4226 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4227 break;
4228 case 0x12: /* lduha, load unsigned halfword alternate */
4229 #ifndef TARGET_SPARC64
4230 if (IS_IMM)
4231 goto illegal_insn;
4232 if (!supervisor(dc))
4233 goto priv_insn;
4234 #endif
4235 save_state(dc, cpu_cond);
4236 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4237 break;
4238 case 0x13: /* ldda, load double word alternate */
4239 #ifndef TARGET_SPARC64
4240 if (IS_IMM)
4241 goto illegal_insn;
4242 if (!supervisor(dc))
4243 goto priv_insn;
4244 #endif
4245 if (rd & 1)
4246 goto illegal_insn;
4247 save_state(dc, cpu_cond);
4248 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4249 goto skip_move;
4250 case 0x19: /* ldsba, load signed byte alternate */
4251 #ifndef TARGET_SPARC64
4252 if (IS_IMM)
4253 goto illegal_insn;
4254 if (!supervisor(dc))
4255 goto priv_insn;
4256 #endif
4257 save_state(dc, cpu_cond);
4258 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4259 break;
4260 case 0x1a: /* ldsha, load signed halfword alternate */
4261 #ifndef TARGET_SPARC64
4262 if (IS_IMM)
4263 goto illegal_insn;
4264 if (!supervisor(dc))
4265 goto priv_insn;
4266 #endif
4267 save_state(dc, cpu_cond);
4268 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4269 break;
4270 case 0x1d: /* ldstuba -- XXX: should be atomically */
4271 #ifndef TARGET_SPARC64
4272 if (IS_IMM)
4273 goto illegal_insn;
4274 if (!supervisor(dc))
4275 goto priv_insn;
4276 #endif
4277 save_state(dc, cpu_cond);
4278 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4279 break;
4280 case 0x1f: /* swapa, swap reg with alt. memory. Also
4281 atomically */
4282 CHECK_IU_FEATURE(dc, SWAP);
4283 #ifndef TARGET_SPARC64
4284 if (IS_IMM)
4285 goto illegal_insn;
4286 if (!supervisor(dc))
4287 goto priv_insn;
4288 #endif
4289 save_state(dc, cpu_cond);
4290 gen_movl_reg_TN(rd, cpu_val);
4291 gen_swap_asi(cpu_val, cpu_addr, insn);
4292 break;
4294 #ifndef TARGET_SPARC64
4295 case 0x30: /* ldc */
4296 case 0x31: /* ldcsr */
4297 case 0x33: /* lddc */
4298 goto ncp_insn;
4299 #endif
4300 #endif
4301 #ifdef TARGET_SPARC64
4302 case 0x08: /* V9 ldsw */
4303 gen_address_mask(dc, cpu_addr);
4304 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4305 break;
4306 case 0x0b: /* V9 ldx */
4307 gen_address_mask(dc, cpu_addr);
4308 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4309 break;
4310 case 0x18: /* V9 ldswa */
4311 save_state(dc, cpu_cond);
4312 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4313 break;
4314 case 0x1b: /* V9 ldxa */
4315 save_state(dc, cpu_cond);
4316 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4317 break;
4318 case 0x2d: /* V9 prefetch, no effect */
4319 goto skip_move;
4320 case 0x30: /* V9 ldfa */
4321 save_state(dc, cpu_cond);
4322 gen_ldf_asi(cpu_addr, insn, 4, rd);
4323 goto skip_move;
4324 case 0x33: /* V9 lddfa */
4325 save_state(dc, cpu_cond);
4326 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4327 goto skip_move;
4328 case 0x3d: /* V9 prefetcha, no effect */
4329 goto skip_move;
4330 case 0x32: /* V9 ldqfa */
4331 CHECK_FPU_FEATURE(dc, FLOAT128);
4332 save_state(dc, cpu_cond);
4333 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4334 goto skip_move;
4335 #endif
4336 default:
4337 goto illegal_insn;
4339 gen_movl_TN_reg(rd, cpu_val);
4340 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4341 skip_move: ;
4342 #endif
4343 } else if (xop >= 0x20 && xop < 0x24) {
4344 if (gen_trap_ifnofpu(dc, cpu_cond))
4345 goto jmp_insn;
4346 save_state(dc, cpu_cond);
4347 switch (xop) {
4348 case 0x20: /* ldf, load fpreg */
4349 gen_address_mask(dc, cpu_addr);
4350 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4351 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4352 break;
4353 case 0x21: /* ldfsr, V9 ldxfsr */
4354 #ifdef TARGET_SPARC64
4355 gen_address_mask(dc, cpu_addr);
4356 if (rd == 1) {
4357 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4358 gen_helper_ldxfsr(cpu_tmp64);
4359 } else
4360 #else
4362 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4363 gen_helper_ldfsr(cpu_tmp32);
4365 #endif
4366 break;
4367 case 0x22: /* ldqf, load quad fpreg */
4369 TCGv_i32 r_const;
4371 CHECK_FPU_FEATURE(dc, FLOAT128);
4372 r_const = tcg_const_i32(dc->mem_idx);
4373 gen_helper_ldqf(cpu_addr, r_const);
4374 tcg_temp_free_i32(r_const);
4375 gen_op_store_QT0_fpr(QFPREG(rd));
4377 break;
4378 case 0x23: /* lddf, load double fpreg */
4380 TCGv_i32 r_const;
4382 r_const = tcg_const_i32(dc->mem_idx);
4383 gen_helper_lddf(cpu_addr, r_const);
4384 tcg_temp_free_i32(r_const);
4385 gen_op_store_DT0_fpr(DFPREG(rd));
4387 break;
4388 default:
4389 goto illegal_insn;
4391 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4392 xop == 0xe || xop == 0x1e) {
4393 gen_movl_reg_TN(rd, cpu_val);
4394 switch (xop) {
4395 case 0x4: /* st, store word */
4396 gen_address_mask(dc, cpu_addr);
4397 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4398 break;
4399 case 0x5: /* stb, store byte */
4400 gen_address_mask(dc, cpu_addr);
4401 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4402 break;
4403 case 0x6: /* sth, store halfword */
4404 gen_address_mask(dc, cpu_addr);
4405 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4406 break;
4407 case 0x7: /* std, store double word */
4408 if (rd & 1)
4409 goto illegal_insn;
4410 else {
4411 TCGv_i32 r_const;
4413 save_state(dc, cpu_cond);
4414 gen_address_mask(dc, cpu_addr);
4415 r_const = tcg_const_i32(7);
4416 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4417 tcg_temp_free_i32(r_const);
4418 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4419 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4420 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4422 break;
4423 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4424 case 0x14: /* sta, V9 stwa, store word alternate */
4425 #ifndef TARGET_SPARC64
4426 if (IS_IMM)
4427 goto illegal_insn;
4428 if (!supervisor(dc))
4429 goto priv_insn;
4430 #endif
4431 save_state(dc, cpu_cond);
4432 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4433 break;
4434 case 0x15: /* stba, store byte alternate */
4435 #ifndef TARGET_SPARC64
4436 if (IS_IMM)
4437 goto illegal_insn;
4438 if (!supervisor(dc))
4439 goto priv_insn;
4440 #endif
4441 save_state(dc, cpu_cond);
4442 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4443 break;
4444 case 0x16: /* stha, store halfword alternate */
4445 #ifndef TARGET_SPARC64
4446 if (IS_IMM)
4447 goto illegal_insn;
4448 if (!supervisor(dc))
4449 goto priv_insn;
4450 #endif
4451 save_state(dc, cpu_cond);
4452 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4453 break;
4454 case 0x17: /* stda, store double word alternate */
4455 #ifndef TARGET_SPARC64
4456 if (IS_IMM)
4457 goto illegal_insn;
4458 if (!supervisor(dc))
4459 goto priv_insn;
4460 #endif
4461 if (rd & 1)
4462 goto illegal_insn;
4463 else {
4464 save_state(dc, cpu_cond);
4465 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4467 break;
4468 #endif
4469 #ifdef TARGET_SPARC64
4470 case 0x0e: /* V9 stx */
4471 gen_address_mask(dc, cpu_addr);
4472 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4473 break;
4474 case 0x1e: /* V9 stxa */
4475 save_state(dc, cpu_cond);
4476 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4477 break;
4478 #endif
4479 default:
4480 goto illegal_insn;
4482 } else if (xop > 0x23 && xop < 0x28) {
4483 if (gen_trap_ifnofpu(dc, cpu_cond))
4484 goto jmp_insn;
4485 save_state(dc, cpu_cond);
4486 switch (xop) {
4487 case 0x24: /* stf, store fpreg */
4488 gen_address_mask(dc, cpu_addr);
4489 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4490 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4491 break;
4492 case 0x25: /* stfsr, V9 stxfsr */
4493 #ifdef TARGET_SPARC64
4494 gen_address_mask(dc, cpu_addr);
4495 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4496 if (rd == 1)
4497 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4498 else
4499 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4500 #else
4501 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4502 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4503 #endif
4504 break;
4505 case 0x26:
4506 #ifdef TARGET_SPARC64
4507 /* V9 stqf, store quad fpreg */
4509 TCGv_i32 r_const;
4511 CHECK_FPU_FEATURE(dc, FLOAT128);
4512 gen_op_load_fpr_QT0(QFPREG(rd));
4513 r_const = tcg_const_i32(dc->mem_idx);
4514 gen_helper_stqf(cpu_addr, r_const);
4515 tcg_temp_free_i32(r_const);
4517 break;
4518 #else /* !TARGET_SPARC64 */
4519 /* stdfq, store floating point queue */
4520 #if defined(CONFIG_USER_ONLY)
4521 goto illegal_insn;
4522 #else
4523 if (!supervisor(dc))
4524 goto priv_insn;
4525 if (gen_trap_ifnofpu(dc, cpu_cond))
4526 goto jmp_insn;
4527 goto nfq_insn;
4528 #endif
4529 #endif
4530 case 0x27: /* stdf, store double fpreg */
4532 TCGv_i32 r_const;
4534 gen_op_load_fpr_DT0(DFPREG(rd));
4535 r_const = tcg_const_i32(dc->mem_idx);
4536 gen_helper_stdf(cpu_addr, r_const);
4537 tcg_temp_free_i32(r_const);
4539 break;
4540 default:
4541 goto illegal_insn;
4543 } else if (xop > 0x33 && xop < 0x3f) {
4544 save_state(dc, cpu_cond);
4545 switch (xop) {
4546 #ifdef TARGET_SPARC64
4547 case 0x34: /* V9 stfa */
4548 gen_stf_asi(cpu_addr, insn, 4, rd);
4549 break;
4550 case 0x36: /* V9 stqfa */
4552 TCGv_i32 r_const;
4554 CHECK_FPU_FEATURE(dc, FLOAT128);
4555 r_const = tcg_const_i32(7);
4556 gen_helper_check_align(cpu_addr, r_const);
4557 tcg_temp_free_i32(r_const);
4558 gen_op_load_fpr_QT0(QFPREG(rd));
4559 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4561 break;
4562 case 0x37: /* V9 stdfa */
4563 gen_op_load_fpr_DT0(DFPREG(rd));
4564 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4565 break;
4566 case 0x3c: /* V9 casa */
4567 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4568 gen_movl_TN_reg(rd, cpu_val);
4569 break;
4570 case 0x3e: /* V9 casxa */
4571 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4572 gen_movl_TN_reg(rd, cpu_val);
4573 break;
4574 #else
4575 case 0x34: /* stc */
4576 case 0x35: /* stcsr */
4577 case 0x36: /* stdcq */
4578 case 0x37: /* stdc */
4579 goto ncp_insn;
4580 #endif
4581 default:
4582 goto illegal_insn;
4584 } else
4585 goto illegal_insn;
4587 break;
4589 /* default case for non jump instructions */
4590 if (dc->npc == DYNAMIC_PC) {
4591 dc->pc = DYNAMIC_PC;
4592 gen_op_next_insn();
4593 } else if (dc->npc == JUMP_PC) {
4594 /* we can do a static jump */
4595 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4596 dc->is_br = 1;
4597 } else {
4598 dc->pc = dc->npc;
4599 dc->npc = dc->npc + 4;
4601 jmp_insn:
4602 return;
4603 illegal_insn:
4605 TCGv_i32 r_const;
4607 save_state(dc, cpu_cond);
4608 r_const = tcg_const_i32(TT_ILL_INSN);
4609 gen_helper_raise_exception(r_const);
4610 tcg_temp_free_i32(r_const);
4611 dc->is_br = 1;
4613 return;
4614 unimp_flush:
4616 TCGv_i32 r_const;
4618 save_state(dc, cpu_cond);
4619 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4620 gen_helper_raise_exception(r_const);
4621 tcg_temp_free_i32(r_const);
4622 dc->is_br = 1;
4624 return;
4625 #if !defined(CONFIG_USER_ONLY)
4626 priv_insn:
4628 TCGv_i32 r_const;
4630 save_state(dc, cpu_cond);
4631 r_const = tcg_const_i32(TT_PRIV_INSN);
4632 gen_helper_raise_exception(r_const);
4633 tcg_temp_free_i32(r_const);
4634 dc->is_br = 1;
4636 return;
4637 #endif
4638 nfpu_insn:
4639 save_state(dc, cpu_cond);
4640 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4641 dc->is_br = 1;
4642 return;
4643 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4644 nfq_insn:
4645 save_state(dc, cpu_cond);
4646 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4647 dc->is_br = 1;
4648 return;
4649 #endif
4650 #ifndef TARGET_SPARC64
4651 ncp_insn:
4653 TCGv r_const;
4655 save_state(dc, cpu_cond);
4656 r_const = tcg_const_i32(TT_NCP_INSN);
4657 gen_helper_raise_exception(r_const);
4658 tcg_temp_free(r_const);
4659 dc->is_br = 1;
4661 return;
4662 #endif
4665 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4666 int spc, CPUSPARCState *env)
4668 target_ulong pc_start, last_pc;
4669 uint16_t *gen_opc_end;
4670 DisasContext dc1, *dc = &dc1;
4671 CPUBreakpoint *bp;
4672 int j, lj = -1;
4673 int num_insns;
4674 int max_insns;
4676 memset(dc, 0, sizeof(DisasContext));
4677 dc->tb = tb;
4678 pc_start = tb->pc;
4679 dc->pc = pc_start;
4680 last_pc = dc->pc;
4681 dc->npc = (target_ulong) tb->cs_base;
4682 dc->cc_op = CC_OP_DYNAMIC;
4683 dc->mem_idx = cpu_mmu_index(env);
4684 dc->def = env->def;
4685 if ((dc->def->features & CPU_FEATURE_FLOAT))
4686 dc->fpu_enabled = cpu_fpu_enabled(env);
4687 else
4688 dc->fpu_enabled = 0;
4689 #ifdef TARGET_SPARC64
4690 dc->address_mask_32bit = env->pstate & PS_AM;
4691 #endif
4692 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4694 cpu_tmp0 = tcg_temp_new();
4695 cpu_tmp32 = tcg_temp_new_i32();
4696 cpu_tmp64 = tcg_temp_new_i64();
4698 cpu_dst = tcg_temp_local_new();
4700 // loads and stores
4701 cpu_val = tcg_temp_local_new();
4702 cpu_addr = tcg_temp_local_new();
4704 num_insns = 0;
4705 max_insns = tb->cflags & CF_COUNT_MASK;
4706 if (max_insns == 0)
4707 max_insns = CF_COUNT_MASK;
4708 gen_icount_start();
4709 do {
4710 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4711 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4712 if (bp->pc == dc->pc) {
4713 if (dc->pc != pc_start)
4714 save_state(dc, cpu_cond);
4715 gen_helper_debug();
4716 tcg_gen_exit_tb(0);
4717 dc->is_br = 1;
4718 goto exit_gen_loop;
4722 if (spc) {
4723 qemu_log("Search PC...\n");
4724 j = gen_opc_ptr - gen_opc_buf;
4725 if (lj < j) {
4726 lj++;
4727 while (lj < j)
4728 gen_opc_instr_start[lj++] = 0;
4729 gen_opc_pc[lj] = dc->pc;
4730 gen_opc_npc[lj] = dc->npc;
4731 gen_opc_instr_start[lj] = 1;
4732 gen_opc_icount[lj] = num_insns;
4735 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4736 gen_io_start();
4737 last_pc = dc->pc;
4738 disas_sparc_insn(dc);
4739 num_insns++;
4741 if (dc->is_br)
4742 break;
4743 /* if the next PC is different, we abort now */
4744 if (dc->pc != (last_pc + 4))
4745 break;
4746 /* if we reach a page boundary, we stop generation so that the
4747 PC of a TT_TFAULT exception is always in the right page */
4748 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4749 break;
4750 /* if single step mode, we generate only one instruction and
4751 generate an exception */
4752 if (env->singlestep_enabled || singlestep) {
4753 tcg_gen_movi_tl(cpu_pc, dc->pc);
4754 tcg_gen_exit_tb(0);
4755 break;
4757 } while ((gen_opc_ptr < gen_opc_end) &&
4758 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4759 num_insns < max_insns);
4761 exit_gen_loop:
4762 tcg_temp_free(cpu_addr);
4763 tcg_temp_free(cpu_val);
4764 tcg_temp_free(cpu_dst);
4765 tcg_temp_free_i64(cpu_tmp64);
4766 tcg_temp_free_i32(cpu_tmp32);
4767 tcg_temp_free(cpu_tmp0);
4768 if (tb->cflags & CF_LAST_IO)
4769 gen_io_end();
4770 if (!dc->is_br) {
4771 if (dc->pc != DYNAMIC_PC &&
4772 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4773 /* static PC and NPC: we can use direct chaining */
4774 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4775 } else {
4776 if (dc->pc != DYNAMIC_PC)
4777 tcg_gen_movi_tl(cpu_pc, dc->pc);
4778 save_npc(dc, cpu_cond);
4779 tcg_gen_exit_tb(0);
4782 gen_icount_end(tb, num_insns);
4783 *gen_opc_ptr = INDEX_op_end;
4784 if (spc) {
4785 j = gen_opc_ptr - gen_opc_buf;
4786 lj++;
4787 while (lj <= j)
4788 gen_opc_instr_start[lj++] = 0;
4789 #if 0
4790 log_page_dump();
4791 #endif
4792 gen_opc_jump_pc[0] = dc->jump_pc[0];
4793 gen_opc_jump_pc[1] = dc->jump_pc[1];
4794 } else {
4795 tb->size = last_pc + 4 - pc_start;
4796 tb->icount = num_insns;
4798 #ifdef DEBUG_DISAS
4799 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4800 qemu_log("--------------\n");
4801 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4802 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4803 qemu_log("\n");
4805 #endif
4808 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4810 gen_intermediate_code_internal(tb, 0, env);
4813 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4815 gen_intermediate_code_internal(tb, 1, env);
4818 void gen_intermediate_code_init(CPUSPARCState *env)
4820 unsigned int i;
4821 static int inited;
4822 static const char * const gregnames[8] = {
4823 NULL, // g0 not used
4824 "g1",
4825 "g2",
4826 "g3",
4827 "g4",
4828 "g5",
4829 "g6",
4830 "g7",
4832 static const char * const fregnames[64] = {
4833 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4834 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4835 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4836 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4837 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4838 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4839 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4840 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4843 /* init various static tables */
4844 if (!inited) {
4845 inited = 1;
4847 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4848 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4849 offsetof(CPUState, regwptr),
4850 "regwptr");
4851 #ifdef TARGET_SPARC64
4852 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4853 "xcc");
4854 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4855 "asi");
4856 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4857 "fprs");
4858 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4859 "gsr");
4860 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4861 offsetof(CPUState, tick_cmpr),
4862 "tick_cmpr");
4863 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4864 offsetof(CPUState, stick_cmpr),
4865 "stick_cmpr");
4866 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4867 offsetof(CPUState, hstick_cmpr),
4868 "hstick_cmpr");
4869 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4870 "hintp");
4871 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4872 "htba");
4873 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4874 "hver");
4875 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4876 offsetof(CPUState, ssr), "ssr");
4877 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4878 offsetof(CPUState, version), "ver");
4879 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4880 offsetof(CPUState, softint),
4881 "softint");
4882 #else
4883 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4884 "wim");
4885 #endif
4886 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4887 "cond");
4888 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4889 "cc_src");
4890 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4891 offsetof(CPUState, cc_src2),
4892 "cc_src2");
4893 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4894 "cc_dst");
4895 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4896 "cc_op");
4897 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4898 "psr");
4899 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4900 "fsr");
4901 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4902 "pc");
4903 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4904 "npc");
4905 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4906 #ifndef CONFIG_USER_ONLY
4907 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4908 "tbr");
4909 #endif
4910 for (i = 1; i < 8; i++)
4911 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4912 offsetof(CPUState, gregs[i]),
4913 gregnames[i]);
4914 for (i = 0; i < TARGET_FPREGS; i++)
4915 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4916 offsetof(CPUState, fpr[i]),
4917 fregnames[i]);
4919 /* register helpers */
4921 #define GEN_HELPER 2
4922 #include "helper.h"
4926 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4927 unsigned long searched_pc, int pc_pos, void *puc)
4929 target_ulong npc;
4930 env->pc = gen_opc_pc[pc_pos];
4931 npc = gen_opc_npc[pc_pos];
4932 if (npc == 1) {
4933 /* dynamic NPC: already stored */
4934 } else if (npc == 2) {
4935 target_ulong t2 = (target_ulong)(unsigned long)puc;
4936 /* jump PC: use T2 and the jump targets of the translation */
4937 if (t2)
4938 env->npc = gen_opc_jump_pc[0];
4939 else
4940 env->npc = gen_opc_jump_pc[1];
4941 } else {
4942 env->npc = npc;
4945 /* flush pending conditional evaluations before exposing cpu state */
4946 if (CC_OP != CC_OP_FLAGS) {
4947 helper_compute_psr();