Musicpal qdev conversion: gpio (except I2C part), keyboard and lcd
[qemu/ar7.git] / target-sparc / translate.c
blob9bbfd3cd2309c71ba0e0cf62b8df48fcd6a64876
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
279 TCGv r_temp;
280 TCGv_i32 r_const;
281 int l1;
283 l1 = gen_new_label();
285 r_temp = tcg_temp_new();
286 tcg_gen_xor_tl(r_temp, src1, src2);
287 tcg_gen_not_tl(r_temp, r_temp);
288 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
289 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
290 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
291 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
292 r_const = tcg_const_i32(TT_TOVF);
293 gen_helper_raise_exception(r_const);
294 tcg_temp_free_i32(r_const);
295 gen_set_label(l1);
296 tcg_temp_free(r_temp);
299 static inline void gen_tag_tv(TCGv src1, TCGv src2)
301 int l1;
302 TCGv_i32 r_const;
304 l1 = gen_new_label();
305 tcg_gen_or_tl(cpu_tmp0, src1, src2);
306 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
307 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
308 r_const = tcg_const_i32(TT_TOVF);
309 gen_helper_raise_exception(r_const);
310 tcg_temp_free_i32(r_const);
311 gen_set_label(l1);
314 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
316 tcg_gen_mov_tl(cpu_cc_src, src1);
317 tcg_gen_movi_tl(cpu_cc_src2, src2);
318 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
319 tcg_gen_mov_tl(dst, cpu_cc_dst);
322 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
324 tcg_gen_mov_tl(cpu_cc_src, src1);
325 tcg_gen_mov_tl(cpu_cc_src2, src2);
326 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
327 tcg_gen_mov_tl(dst, cpu_cc_dst);
330 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
332 tcg_gen_mov_tl(cpu_cc_src, src1);
333 tcg_gen_movi_tl(cpu_cc_src2, src2);
334 gen_mov_reg_C(cpu_tmp0, cpu_psr);
335 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
336 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
337 tcg_gen_mov_tl(dst, cpu_cc_dst);
340 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
342 tcg_gen_mov_tl(cpu_cc_src, src1);
343 tcg_gen_mov_tl(cpu_cc_src2, src2);
344 gen_mov_reg_C(cpu_tmp0, cpu_psr);
345 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
346 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
347 tcg_gen_mov_tl(dst, cpu_cc_dst);
350 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
352 tcg_gen_mov_tl(cpu_cc_src, src1);
353 tcg_gen_mov_tl(cpu_cc_src2, src2);
354 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
355 tcg_gen_mov_tl(dst, cpu_cc_dst);
358 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
360 tcg_gen_mov_tl(cpu_cc_src, src1);
361 tcg_gen_mov_tl(cpu_cc_src2, src2);
362 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
363 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
364 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365 tcg_gen_mov_tl(dst, cpu_cc_dst);
368 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
370 TCGv r_temp;
371 TCGv_i32 r_const;
372 int l1;
374 l1 = gen_new_label();
376 r_temp = tcg_temp_new();
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
379 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
380 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
381 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
382 r_const = tcg_const_i32(TT_TOVF);
383 gen_helper_raise_exception(r_const);
384 tcg_temp_free_i32(r_const);
385 gen_set_label(l1);
386 tcg_temp_free(r_temp);
389 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
391 tcg_gen_mov_tl(cpu_cc_src, src1);
392 tcg_gen_movi_tl(cpu_cc_src2, src2);
393 if (src2 == 0) {
394 tcg_gen_mov_tl(cpu_cc_dst, src1);
395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
396 dc->cc_op = CC_OP_LOGIC;
397 } else {
398 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
399 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
400 dc->cc_op = CC_OP_SUB;
402 tcg_gen_mov_tl(dst, cpu_cc_dst);
405 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
407 tcg_gen_mov_tl(cpu_cc_src, src1);
408 tcg_gen_mov_tl(cpu_cc_src2, src2);
409 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
410 tcg_gen_mov_tl(dst, cpu_cc_dst);
413 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
415 tcg_gen_mov_tl(cpu_cc_src, src1);
416 tcg_gen_movi_tl(cpu_cc_src2, src2);
417 gen_mov_reg_C(cpu_tmp0, cpu_psr);
418 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
419 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
420 tcg_gen_mov_tl(dst, cpu_cc_dst);
423 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
425 tcg_gen_mov_tl(cpu_cc_src, src1);
426 tcg_gen_mov_tl(cpu_cc_src2, src2);
427 gen_mov_reg_C(cpu_tmp0, cpu_psr);
428 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
429 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
430 tcg_gen_mov_tl(dst, cpu_cc_dst);
433 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
435 tcg_gen_mov_tl(cpu_cc_src, src1);
436 tcg_gen_mov_tl(cpu_cc_src2, src2);
437 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
438 tcg_gen_mov_tl(dst, cpu_cc_dst);
441 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
443 tcg_gen_mov_tl(cpu_cc_src, src1);
444 tcg_gen_mov_tl(cpu_cc_src2, src2);
445 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
446 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
447 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 tcg_gen_mov_tl(dst, cpu_cc_dst);
451 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
453 TCGv r_temp;
454 int l1;
456 l1 = gen_new_label();
457 r_temp = tcg_temp_new();
459 /* old op:
460 if (!(env->y & 1))
461 T1 = 0;
463 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
464 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
465 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
466 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
467 tcg_gen_movi_tl(cpu_cc_src2, 0);
468 gen_set_label(l1);
470 // b2 = T0 & 1;
471 // env->y = (b2 << 31) | (env->y >> 1);
472 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
473 tcg_gen_shli_tl(r_temp, r_temp, 31);
474 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
475 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
476 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
477 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
479 // b1 = N ^ V;
480 gen_mov_reg_N(cpu_tmp0, cpu_psr);
481 gen_mov_reg_V(r_temp, cpu_psr);
482 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
483 tcg_temp_free(r_temp);
485 // T0 = (b1 << 31) | (T0 >> 1);
486 // src1 = T0;
487 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
488 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
489 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
491 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
498 TCGv_i64 r_temp, r_temp2;
500 r_temp = tcg_temp_new_i64();
501 r_temp2 = tcg_temp_new_i64();
503 tcg_gen_extu_tl_i64(r_temp, src2);
504 tcg_gen_extu_tl_i64(r_temp2, src1);
505 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
507 tcg_gen_shri_i64(r_temp, r_temp2, 32);
508 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
509 tcg_temp_free_i64(r_temp);
510 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
511 #ifdef TARGET_SPARC64
512 tcg_gen_mov_i64(dst, r_temp2);
513 #else
514 tcg_gen_trunc_i64_tl(dst, r_temp2);
515 #endif
516 tcg_temp_free_i64(r_temp2);
519 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
521 TCGv_i64 r_temp, r_temp2;
523 r_temp = tcg_temp_new_i64();
524 r_temp2 = tcg_temp_new_i64();
526 tcg_gen_ext_tl_i64(r_temp, src2);
527 tcg_gen_ext_tl_i64(r_temp2, src1);
528 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
530 tcg_gen_shri_i64(r_temp, r_temp2, 32);
531 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
532 tcg_temp_free_i64(r_temp);
533 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
534 #ifdef TARGET_SPARC64
535 tcg_gen_mov_i64(dst, r_temp2);
536 #else
537 tcg_gen_trunc_i64_tl(dst, r_temp2);
538 #endif
539 tcg_temp_free_i64(r_temp2);
542 #ifdef TARGET_SPARC64
543 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
545 TCGv_i32 r_const;
546 int l1;
548 l1 = gen_new_label();
549 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
550 r_const = tcg_const_i32(TT_DIV_ZERO);
551 gen_helper_raise_exception(r_const);
552 tcg_temp_free_i32(r_const);
553 gen_set_label(l1);
556 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
558 int l1, l2;
560 l1 = gen_new_label();
561 l2 = gen_new_label();
562 tcg_gen_mov_tl(cpu_cc_src, src1);
563 tcg_gen_mov_tl(cpu_cc_src2, src2);
564 gen_trap_ifdivzero_tl(cpu_cc_src2);
565 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
567 tcg_gen_movi_i64(dst, INT64_MIN);
568 tcg_gen_br(l2);
569 gen_set_label(l1);
570 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
571 gen_set_label(l2);
573 #endif
575 // 1
576 static inline void gen_op_eval_ba(TCGv dst)
578 tcg_gen_movi_tl(dst, 1);
581 // Z
582 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
584 gen_mov_reg_Z(dst, src);
587 // Z | (N ^ V)
588 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
590 gen_mov_reg_N(cpu_tmp0, src);
591 gen_mov_reg_V(dst, src);
592 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
593 gen_mov_reg_Z(cpu_tmp0, src);
594 tcg_gen_or_tl(dst, dst, cpu_tmp0);
597 // N ^ V
598 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
600 gen_mov_reg_V(cpu_tmp0, src);
601 gen_mov_reg_N(dst, src);
602 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
605 // C | Z
606 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
608 gen_mov_reg_Z(cpu_tmp0, src);
609 gen_mov_reg_C(dst, src);
610 tcg_gen_or_tl(dst, dst, cpu_tmp0);
613 // C
614 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
616 gen_mov_reg_C(dst, src);
619 // V
620 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
622 gen_mov_reg_V(dst, src);
625 // 0
626 static inline void gen_op_eval_bn(TCGv dst)
628 tcg_gen_movi_tl(dst, 0);
631 // N
632 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
634 gen_mov_reg_N(dst, src);
637 // !Z
638 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
640 gen_mov_reg_Z(dst, src);
641 tcg_gen_xori_tl(dst, dst, 0x1);
644 // !(Z | (N ^ V))
645 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
647 gen_mov_reg_N(cpu_tmp0, src);
648 gen_mov_reg_V(dst, src);
649 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
650 gen_mov_reg_Z(cpu_tmp0, src);
651 tcg_gen_or_tl(dst, dst, cpu_tmp0);
652 tcg_gen_xori_tl(dst, dst, 0x1);
655 // !(N ^ V)
656 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
658 gen_mov_reg_V(cpu_tmp0, src);
659 gen_mov_reg_N(dst, src);
660 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
661 tcg_gen_xori_tl(dst, dst, 0x1);
664 // !(C | Z)
665 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
667 gen_mov_reg_Z(cpu_tmp0, src);
668 gen_mov_reg_C(dst, src);
669 tcg_gen_or_tl(dst, dst, cpu_tmp0);
670 tcg_gen_xori_tl(dst, dst, 0x1);
673 // !C
674 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
676 gen_mov_reg_C(dst, src);
677 tcg_gen_xori_tl(dst, dst, 0x1);
680 // !N
681 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
683 gen_mov_reg_N(dst, src);
684 tcg_gen_xori_tl(dst, dst, 0x1);
687 // !V
688 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
690 gen_mov_reg_V(dst, src);
691 tcg_gen_xori_tl(dst, dst, 0x1);
695 FPSR bit field FCC1 | FCC0:
699 3 unordered
701 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
702 unsigned int fcc_offset)
704 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
705 tcg_gen_andi_tl(reg, reg, 0x1);
708 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
709 unsigned int fcc_offset)
711 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
712 tcg_gen_andi_tl(reg, reg, 0x1);
715 // !0: FCC0 | FCC1
716 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
717 unsigned int fcc_offset)
719 gen_mov_reg_FCC0(dst, src, fcc_offset);
720 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
721 tcg_gen_or_tl(dst, dst, cpu_tmp0);
724 // 1 or 2: FCC0 ^ FCC1
725 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
726 unsigned int fcc_offset)
728 gen_mov_reg_FCC0(dst, src, fcc_offset);
729 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
730 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
733 // 1 or 3: FCC0
734 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
735 unsigned int fcc_offset)
737 gen_mov_reg_FCC0(dst, src, fcc_offset);
740 // 1: FCC0 & !FCC1
741 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
742 unsigned int fcc_offset)
744 gen_mov_reg_FCC0(dst, src, fcc_offset);
745 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
746 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
747 tcg_gen_and_tl(dst, dst, cpu_tmp0);
750 // 2 or 3: FCC1
751 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
752 unsigned int fcc_offset)
754 gen_mov_reg_FCC1(dst, src, fcc_offset);
757 // 2: !FCC0 & FCC1
758 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
759 unsigned int fcc_offset)
761 gen_mov_reg_FCC0(dst, src, fcc_offset);
762 tcg_gen_xori_tl(dst, dst, 0x1);
763 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
764 tcg_gen_and_tl(dst, dst, cpu_tmp0);
767 // 3: FCC0 & FCC1
768 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
769 unsigned int fcc_offset)
771 gen_mov_reg_FCC0(dst, src, fcc_offset);
772 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
773 tcg_gen_and_tl(dst, dst, cpu_tmp0);
776 // 0: !(FCC0 | FCC1)
777 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
778 unsigned int fcc_offset)
780 gen_mov_reg_FCC0(dst, src, fcc_offset);
781 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
783 tcg_gen_xori_tl(dst, dst, 0x1);
786 // 0 or 3: !(FCC0 ^ FCC1)
787 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
788 unsigned int fcc_offset)
790 gen_mov_reg_FCC0(dst, src, fcc_offset);
791 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
792 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
793 tcg_gen_xori_tl(dst, dst, 0x1);
796 // 0 or 2: !FCC0
797 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
798 unsigned int fcc_offset)
800 gen_mov_reg_FCC0(dst, src, fcc_offset);
801 tcg_gen_xori_tl(dst, dst, 0x1);
804 // !1: !(FCC0 & !FCC1)
805 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
806 unsigned int fcc_offset)
808 gen_mov_reg_FCC0(dst, src, fcc_offset);
809 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
810 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
811 tcg_gen_and_tl(dst, dst, cpu_tmp0);
812 tcg_gen_xori_tl(dst, dst, 0x1);
815 // 0 or 1: !FCC1
816 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
817 unsigned int fcc_offset)
819 gen_mov_reg_FCC1(dst, src, fcc_offset);
820 tcg_gen_xori_tl(dst, dst, 0x1);
823 // !2: !(!FCC0 & FCC1)
824 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
825 unsigned int fcc_offset)
827 gen_mov_reg_FCC0(dst, src, fcc_offset);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
830 tcg_gen_and_tl(dst, dst, cpu_tmp0);
831 tcg_gen_xori_tl(dst, dst, 0x1);
834 // !3: !(FCC0 & FCC1)
835 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
839 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
840 tcg_gen_and_tl(dst, dst, cpu_tmp0);
841 tcg_gen_xori_tl(dst, dst, 0x1);
844 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
845 target_ulong pc2, TCGv r_cond)
847 int l1;
849 l1 = gen_new_label();
851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
853 gen_goto_tb(dc, 0, pc1, pc1 + 4);
855 gen_set_label(l1);
856 gen_goto_tb(dc, 1, pc2, pc2 + 4);
859 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
860 target_ulong pc2, TCGv r_cond)
862 int l1;
864 l1 = gen_new_label();
866 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
868 gen_goto_tb(dc, 0, pc2, pc1);
870 gen_set_label(l1);
871 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
874 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
875 TCGv r_cond)
877 int l1, l2;
879 l1 = gen_new_label();
880 l2 = gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
884 tcg_gen_movi_tl(cpu_npc, npc1);
885 tcg_gen_br(l2);
887 gen_set_label(l1);
888 tcg_gen_movi_tl(cpu_npc, npc2);
889 gen_set_label(l2);
892 /* call this function before using the condition register as it may
893 have been set for a jump */
894 static inline void flush_cond(DisasContext *dc, TCGv cond)
896 if (dc->npc == JUMP_PC) {
897 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
898 dc->npc = DYNAMIC_PC;
902 static inline void save_npc(DisasContext *dc, TCGv cond)
904 if (dc->npc == JUMP_PC) {
905 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
906 dc->npc = DYNAMIC_PC;
907 } else if (dc->npc != DYNAMIC_PC) {
908 tcg_gen_movi_tl(cpu_npc, dc->npc);
912 static inline void save_state(DisasContext *dc, TCGv cond)
914 tcg_gen_movi_tl(cpu_pc, dc->pc);
915 /* flush pending conditional evaluations before exposing cpu state */
916 if (dc->cc_op != CC_OP_FLAGS) {
917 dc->cc_op = CC_OP_FLAGS;
918 gen_helper_compute_psr();
920 save_npc(dc, cond);
923 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
925 if (dc->npc == JUMP_PC) {
926 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
927 tcg_gen_mov_tl(cpu_pc, cpu_npc);
928 dc->pc = DYNAMIC_PC;
929 } else if (dc->npc == DYNAMIC_PC) {
930 tcg_gen_mov_tl(cpu_pc, cpu_npc);
931 dc->pc = DYNAMIC_PC;
932 } else {
933 dc->pc = dc->npc;
937 static inline void gen_op_next_insn(void)
939 tcg_gen_mov_tl(cpu_pc, cpu_npc);
940 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
943 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
944 DisasContext *dc)
946 TCGv_i32 r_src;
948 #ifdef TARGET_SPARC64
949 if (cc)
950 r_src = cpu_xcc;
951 else
952 r_src = cpu_psr;
953 #else
954 r_src = cpu_psr;
955 #endif
956 switch (dc->cc_op) {
957 case CC_OP_FLAGS:
958 break;
959 default:
960 gen_helper_compute_psr();
961 dc->cc_op = CC_OP_FLAGS;
962 break;
964 switch (cond) {
965 case 0x0:
966 gen_op_eval_bn(r_dst);
967 break;
968 case 0x1:
969 gen_op_eval_be(r_dst, r_src);
970 break;
971 case 0x2:
972 gen_op_eval_ble(r_dst, r_src);
973 break;
974 case 0x3:
975 gen_op_eval_bl(r_dst, r_src);
976 break;
977 case 0x4:
978 gen_op_eval_bleu(r_dst, r_src);
979 break;
980 case 0x5:
981 gen_op_eval_bcs(r_dst, r_src);
982 break;
983 case 0x6:
984 gen_op_eval_bneg(r_dst, r_src);
985 break;
986 case 0x7:
987 gen_op_eval_bvs(r_dst, r_src);
988 break;
989 case 0x8:
990 gen_op_eval_ba(r_dst);
991 break;
992 case 0x9:
993 gen_op_eval_bne(r_dst, r_src);
994 break;
995 case 0xa:
996 gen_op_eval_bg(r_dst, r_src);
997 break;
998 case 0xb:
999 gen_op_eval_bge(r_dst, r_src);
1000 break;
1001 case 0xc:
1002 gen_op_eval_bgu(r_dst, r_src);
1003 break;
1004 case 0xd:
1005 gen_op_eval_bcc(r_dst, r_src);
1006 break;
1007 case 0xe:
1008 gen_op_eval_bpos(r_dst, r_src);
1009 break;
1010 case 0xf:
1011 gen_op_eval_bvc(r_dst, r_src);
1012 break;
1016 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1018 unsigned int offset;
1020 switch (cc) {
1021 default:
1022 case 0x0:
1023 offset = 0;
1024 break;
1025 case 0x1:
1026 offset = 32 - 10;
1027 break;
1028 case 0x2:
1029 offset = 34 - 10;
1030 break;
1031 case 0x3:
1032 offset = 36 - 10;
1033 break;
1036 switch (cond) {
1037 case 0x0:
1038 gen_op_eval_bn(r_dst);
1039 break;
1040 case 0x1:
1041 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1042 break;
1043 case 0x2:
1044 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1045 break;
1046 case 0x3:
1047 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1048 break;
1049 case 0x4:
1050 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1051 break;
1052 case 0x5:
1053 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1054 break;
1055 case 0x6:
1056 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1057 break;
1058 case 0x7:
1059 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1060 break;
1061 case 0x8:
1062 gen_op_eval_ba(r_dst);
1063 break;
1064 case 0x9:
1065 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1066 break;
1067 case 0xa:
1068 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1069 break;
1070 case 0xb:
1071 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1072 break;
1073 case 0xc:
1074 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1075 break;
1076 case 0xd:
1077 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1078 break;
1079 case 0xe:
1080 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1081 break;
1082 case 0xf:
1083 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1084 break;
1088 #ifdef TARGET_SPARC64
1089 // Inverted logic
1090 static const int gen_tcg_cond_reg[8] = {
1092 TCG_COND_NE,
1093 TCG_COND_GT,
1094 TCG_COND_GE,
1096 TCG_COND_EQ,
1097 TCG_COND_LE,
1098 TCG_COND_LT,
1101 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1103 int l1;
1105 l1 = gen_new_label();
1106 tcg_gen_movi_tl(r_dst, 0);
1107 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1108 tcg_gen_movi_tl(r_dst, 1);
1109 gen_set_label(l1);
1111 #endif
1113 /* XXX: potentially incorrect if dynamic npc */
1114 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1115 TCGv r_cond)
1117 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1118 target_ulong target = dc->pc + offset;
1120 if (cond == 0x0) {
1121 /* unconditional not taken */
1122 if (a) {
1123 dc->pc = dc->npc + 4;
1124 dc->npc = dc->pc + 4;
1125 } else {
1126 dc->pc = dc->npc;
1127 dc->npc = dc->pc + 4;
1129 } else if (cond == 0x8) {
1130 /* unconditional taken */
1131 if (a) {
1132 dc->pc = target;
1133 dc->npc = dc->pc + 4;
1134 } else {
1135 dc->pc = dc->npc;
1136 dc->npc = target;
1138 } else {
1139 flush_cond(dc, r_cond);
1140 gen_cond(r_cond, cc, cond, dc);
1141 if (a) {
1142 gen_branch_a(dc, target, dc->npc, r_cond);
1143 dc->is_br = 1;
1144 } else {
1145 dc->pc = dc->npc;
1146 dc->jump_pc[0] = target;
1147 dc->jump_pc[1] = dc->npc + 4;
1148 dc->npc = JUMP_PC;
1153 /* XXX: potentially incorrect if dynamic npc */
1154 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1155 TCGv r_cond)
1157 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1158 target_ulong target = dc->pc + offset;
1160 if (cond == 0x0) {
1161 /* unconditional not taken */
1162 if (a) {
1163 dc->pc = dc->npc + 4;
1164 dc->npc = dc->pc + 4;
1165 } else {
1166 dc->pc = dc->npc;
1167 dc->npc = dc->pc + 4;
1169 } else if (cond == 0x8) {
1170 /* unconditional taken */
1171 if (a) {
1172 dc->pc = target;
1173 dc->npc = dc->pc + 4;
1174 } else {
1175 dc->pc = dc->npc;
1176 dc->npc = target;
1178 } else {
1179 flush_cond(dc, r_cond);
1180 gen_fcond(r_cond, cc, cond);
1181 if (a) {
1182 gen_branch_a(dc, target, dc->npc, r_cond);
1183 dc->is_br = 1;
1184 } else {
1185 dc->pc = dc->npc;
1186 dc->jump_pc[0] = target;
1187 dc->jump_pc[1] = dc->npc + 4;
1188 dc->npc = JUMP_PC;
1193 #ifdef TARGET_SPARC64
1194 /* XXX: potentially incorrect if dynamic npc */
1195 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1196 TCGv r_cond, TCGv r_reg)
1198 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1199 target_ulong target = dc->pc + offset;
1201 flush_cond(dc, r_cond);
1202 gen_cond_reg(r_cond, cond, r_reg);
1203 if (a) {
1204 gen_branch_a(dc, target, dc->npc, r_cond);
1205 dc->is_br = 1;
1206 } else {
1207 dc->pc = dc->npc;
1208 dc->jump_pc[0] = target;
1209 dc->jump_pc[1] = dc->npc + 4;
1210 dc->npc = JUMP_PC;
1214 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1216 switch (fccno) {
1217 case 0:
1218 gen_helper_fcmps(r_rs1, r_rs2);
1219 break;
1220 case 1:
1221 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1222 break;
1223 case 2:
1224 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1225 break;
1226 case 3:
1227 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1228 break;
1232 static inline void gen_op_fcmpd(int fccno)
1234 switch (fccno) {
1235 case 0:
1236 gen_helper_fcmpd();
1237 break;
1238 case 1:
1239 gen_helper_fcmpd_fcc1();
1240 break;
1241 case 2:
1242 gen_helper_fcmpd_fcc2();
1243 break;
1244 case 3:
1245 gen_helper_fcmpd_fcc3();
1246 break;
1250 static inline void gen_op_fcmpq(int fccno)
1252 switch (fccno) {
1253 case 0:
1254 gen_helper_fcmpq();
1255 break;
1256 case 1:
1257 gen_helper_fcmpq_fcc1();
1258 break;
1259 case 2:
1260 gen_helper_fcmpq_fcc2();
1261 break;
1262 case 3:
1263 gen_helper_fcmpq_fcc3();
1264 break;
1268 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1270 switch (fccno) {
1271 case 0:
1272 gen_helper_fcmpes(r_rs1, r_rs2);
1273 break;
1274 case 1:
1275 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1276 break;
1277 case 2:
1278 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1279 break;
1280 case 3:
1281 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1282 break;
1286 static inline void gen_op_fcmped(int fccno)
1288 switch (fccno) {
1289 case 0:
1290 gen_helper_fcmped();
1291 break;
1292 case 1:
1293 gen_helper_fcmped_fcc1();
1294 break;
1295 case 2:
1296 gen_helper_fcmped_fcc2();
1297 break;
1298 case 3:
1299 gen_helper_fcmped_fcc3();
1300 break;
1304 static inline void gen_op_fcmpeq(int fccno)
1306 switch (fccno) {
1307 case 0:
1308 gen_helper_fcmpeq();
1309 break;
1310 case 1:
1311 gen_helper_fcmpeq_fcc1();
1312 break;
1313 case 2:
1314 gen_helper_fcmpeq_fcc2();
1315 break;
1316 case 3:
1317 gen_helper_fcmpeq_fcc3();
1318 break;
1322 #else
1324 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1326 gen_helper_fcmps(r_rs1, r_rs2);
1329 static inline void gen_op_fcmpd(int fccno)
1331 gen_helper_fcmpd();
1334 static inline void gen_op_fcmpq(int fccno)
1336 gen_helper_fcmpq();
1339 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1341 gen_helper_fcmpes(r_rs1, r_rs2);
1344 static inline void gen_op_fcmped(int fccno)
1346 gen_helper_fcmped();
1349 static inline void gen_op_fcmpeq(int fccno)
1351 gen_helper_fcmpeq();
1353 #endif
1355 static inline void gen_op_fpexception_im(int fsr_flags)
1357 TCGv_i32 r_const;
1359 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1360 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1361 r_const = tcg_const_i32(TT_FP_EXCP);
1362 gen_helper_raise_exception(r_const);
1363 tcg_temp_free_i32(r_const);
1366 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1368 #if !defined(CONFIG_USER_ONLY)
1369 if (!dc->fpu_enabled) {
1370 TCGv_i32 r_const;
1372 save_state(dc, r_cond);
1373 r_const = tcg_const_i32(TT_NFPU_INSN);
1374 gen_helper_raise_exception(r_const);
1375 tcg_temp_free_i32(r_const);
1376 dc->is_br = 1;
1377 return 1;
1379 #endif
1380 return 0;
1383 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1385 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1388 static inline void gen_clear_float_exceptions(void)
1390 gen_helper_clear_float_exceptions();
1393 /* asi moves */
1394 #ifdef TARGET_SPARC64
1395 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1397 int asi;
1398 TCGv_i32 r_asi;
1400 if (IS_IMM) {
1401 r_asi = tcg_temp_new_i32();
1402 tcg_gen_mov_i32(r_asi, cpu_asi);
1403 } else {
1404 asi = GET_FIELD(insn, 19, 26);
1405 r_asi = tcg_const_i32(asi);
1407 return r_asi;
1410 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1411 int sign)
1413 TCGv_i32 r_asi, r_size, r_sign;
1415 r_asi = gen_get_asi(insn, addr);
1416 r_size = tcg_const_i32(size);
1417 r_sign = tcg_const_i32(sign);
1418 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1419 tcg_temp_free_i32(r_sign);
1420 tcg_temp_free_i32(r_size);
1421 tcg_temp_free_i32(r_asi);
1424 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1426 TCGv_i32 r_asi, r_size;
1428 r_asi = gen_get_asi(insn, addr);
1429 r_size = tcg_const_i32(size);
1430 gen_helper_st_asi(addr, src, r_asi, r_size);
1431 tcg_temp_free_i32(r_size);
1432 tcg_temp_free_i32(r_asi);
1435 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1437 TCGv_i32 r_asi, r_size, r_rd;
1439 r_asi = gen_get_asi(insn, addr);
1440 r_size = tcg_const_i32(size);
1441 r_rd = tcg_const_i32(rd);
1442 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1443 tcg_temp_free_i32(r_rd);
1444 tcg_temp_free_i32(r_size);
1445 tcg_temp_free_i32(r_asi);
1448 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1450 TCGv_i32 r_asi, r_size, r_rd;
1452 r_asi = gen_get_asi(insn, addr);
1453 r_size = tcg_const_i32(size);
1454 r_rd = tcg_const_i32(rd);
1455 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1456 tcg_temp_free_i32(r_rd);
1457 tcg_temp_free_i32(r_size);
1458 tcg_temp_free_i32(r_asi);
1461 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1463 TCGv_i32 r_asi, r_size, r_sign;
1465 r_asi = gen_get_asi(insn, addr);
1466 r_size = tcg_const_i32(4);
1467 r_sign = tcg_const_i32(0);
1468 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1469 tcg_temp_free_i32(r_sign);
1470 gen_helper_st_asi(addr, dst, r_asi, r_size);
1471 tcg_temp_free_i32(r_size);
1472 tcg_temp_free_i32(r_asi);
1473 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1476 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1478 TCGv_i32 r_asi, r_rd;
1480 r_asi = gen_get_asi(insn, addr);
1481 r_rd = tcg_const_i32(rd);
1482 gen_helper_ldda_asi(addr, r_asi, r_rd);
1483 tcg_temp_free_i32(r_rd);
1484 tcg_temp_free_i32(r_asi);
1487 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1489 TCGv_i32 r_asi, r_size;
1491 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1492 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1493 r_asi = gen_get_asi(insn, addr);
1494 r_size = tcg_const_i32(8);
1495 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1496 tcg_temp_free_i32(r_size);
1497 tcg_temp_free_i32(r_asi);
1500 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1501 int rd)
1503 TCGv r_val1;
1504 TCGv_i32 r_asi;
1506 r_val1 = tcg_temp_new();
1507 gen_movl_reg_TN(rd, r_val1);
1508 r_asi = gen_get_asi(insn, addr);
1509 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1510 tcg_temp_free_i32(r_asi);
1511 tcg_temp_free(r_val1);
1514 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1515 int rd)
1517 TCGv_i32 r_asi;
1519 gen_movl_reg_TN(rd, cpu_tmp64);
1520 r_asi = gen_get_asi(insn, addr);
1521 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1522 tcg_temp_free_i32(r_asi);
1525 #elif !defined(CONFIG_USER_ONLY)
1527 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1528 int sign)
1530 TCGv_i32 r_asi, r_size, r_sign;
1532 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1533 r_size = tcg_const_i32(size);
1534 r_sign = tcg_const_i32(sign);
1535 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1536 tcg_temp_free(r_sign);
1537 tcg_temp_free(r_size);
1538 tcg_temp_free(r_asi);
1539 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1542 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1544 TCGv_i32 r_asi, r_size;
1546 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1547 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1548 r_size = tcg_const_i32(size);
1549 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1550 tcg_temp_free(r_size);
1551 tcg_temp_free(r_asi);
1554 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1556 TCGv_i32 r_asi, r_size, r_sign;
1557 TCGv_i64 r_val;
1559 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1560 r_size = tcg_const_i32(4);
1561 r_sign = tcg_const_i32(0);
1562 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1563 tcg_temp_free(r_sign);
1564 r_val = tcg_temp_new_i64();
1565 tcg_gen_extu_tl_i64(r_val, dst);
1566 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1567 tcg_temp_free_i64(r_val);
1568 tcg_temp_free(r_size);
1569 tcg_temp_free(r_asi);
1570 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1573 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1575 TCGv_i32 r_asi, r_size, r_sign;
1577 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1578 r_size = tcg_const_i32(8);
1579 r_sign = tcg_const_i32(0);
1580 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1581 tcg_temp_free(r_sign);
1582 tcg_temp_free(r_size);
1583 tcg_temp_free(r_asi);
1584 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1585 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1586 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1587 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1588 gen_movl_TN_reg(rd, hi);
1591 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1593 TCGv_i32 r_asi, r_size;
1595 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1596 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1597 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1598 r_size = tcg_const_i32(8);
1599 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1600 tcg_temp_free(r_size);
1601 tcg_temp_free(r_asi);
1603 #endif
1605 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1606 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1608 TCGv_i64 r_val;
1609 TCGv_i32 r_asi, r_size;
1611 gen_ld_asi(dst, addr, insn, 1, 0);
1613 r_val = tcg_const_i64(0xffULL);
1614 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1615 r_size = tcg_const_i32(1);
1616 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1617 tcg_temp_free_i32(r_size);
1618 tcg_temp_free_i32(r_asi);
1619 tcg_temp_free_i64(r_val);
1621 #endif
1623 static inline TCGv get_src1(unsigned int insn, TCGv def)
1625 TCGv r_rs1 = def;
1626 unsigned int rs1;
1628 rs1 = GET_FIELD(insn, 13, 17);
1629 if (rs1 == 0)
1630 r_rs1 = tcg_const_tl(0); // XXX how to free?
1631 else if (rs1 < 8)
1632 r_rs1 = cpu_gregs[rs1];
1633 else
1634 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1635 return r_rs1;
1638 static inline TCGv get_src2(unsigned int insn, TCGv def)
1640 TCGv r_rs2 = def;
1642 if (IS_IMM) { /* immediate */
1643 target_long simm;
1645 simm = GET_FIELDs(insn, 19, 31);
1646 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1647 } else { /* register */
1648 unsigned int rs2;
1650 rs2 = GET_FIELD(insn, 27, 31);
1651 if (rs2 == 0)
1652 r_rs2 = tcg_const_tl(0); // XXX how to free?
1653 else if (rs2 < 8)
1654 r_rs2 = cpu_gregs[rs2];
1655 else
1656 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1658 return r_rs2;
1661 #ifdef TARGET_SPARC64
1662 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1664 TCGv r_tl = tcg_temp_new();
1666 /* load env->tl into r_tl */
1668 TCGv_i32 r_tl_tmp = tcg_temp_new_i32();
1669 tcg_gen_ld_i32(r_tl_tmp, cpu_env, offsetof(CPUSPARCState, tl));
1670 tcg_gen_ext_i32_tl(r_tl, r_tl_tmp);
1671 tcg_temp_free_i32(r_tl_tmp);
1674 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675 tcg_gen_andi_tl(r_tl, r_tl, MAXTL_MASK);
1677 /* calculate offset to current trap state from env->ts, reuse r_tl */
1678 tcg_gen_muli_tl(r_tl, r_tl, sizeof (trap_state));
1679 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1681 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1682 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl);
1684 tcg_temp_free(r_tl);
1686 #endif
1688 #define CHECK_IU_FEATURE(dc, FEATURE) \
1689 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1690 goto illegal_insn;
1691 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1692 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1693 goto nfpu_insn;
1695 /* before an instruction, dc->pc must be static */
1696 static void disas_sparc_insn(DisasContext * dc)
1698 unsigned int insn, opc, rs1, rs2, rd;
1699 target_long simm;
1701 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1702 tcg_gen_debug_insn_start(dc->pc);
1703 insn = ldl_code(dc->pc);
1704 opc = GET_FIELD(insn, 0, 1);
1706 rd = GET_FIELD(insn, 2, 6);
1708 cpu_src1 = tcg_temp_new(); // const
1709 cpu_src2 = tcg_temp_new(); // const
1711 switch (opc) {
1712 case 0: /* branches/sethi */
1714 unsigned int xop = GET_FIELD(insn, 7, 9);
1715 int32_t target;
1716 switch (xop) {
1717 #ifdef TARGET_SPARC64
1718 case 0x1: /* V9 BPcc */
1720 int cc;
1722 target = GET_FIELD_SP(insn, 0, 18);
1723 target = sign_extend(target, 18);
1724 target <<= 2;
1725 cc = GET_FIELD_SP(insn, 20, 21);
1726 if (cc == 0)
1727 do_branch(dc, target, insn, 0, cpu_cond);
1728 else if (cc == 2)
1729 do_branch(dc, target, insn, 1, cpu_cond);
1730 else
1731 goto illegal_insn;
1732 goto jmp_insn;
1734 case 0x3: /* V9 BPr */
1736 target = GET_FIELD_SP(insn, 0, 13) |
1737 (GET_FIELD_SP(insn, 20, 21) << 14);
1738 target = sign_extend(target, 16);
1739 target <<= 2;
1740 cpu_src1 = get_src1(insn, cpu_src1);
1741 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1742 goto jmp_insn;
1744 case 0x5: /* V9 FBPcc */
1746 int cc = GET_FIELD_SP(insn, 20, 21);
1747 if (gen_trap_ifnofpu(dc, cpu_cond))
1748 goto jmp_insn;
1749 target = GET_FIELD_SP(insn, 0, 18);
1750 target = sign_extend(target, 19);
1751 target <<= 2;
1752 do_fbranch(dc, target, insn, cc, cpu_cond);
1753 goto jmp_insn;
1755 #else
1756 case 0x7: /* CBN+x */
1758 goto ncp_insn;
1760 #endif
1761 case 0x2: /* BN+x */
1763 target = GET_FIELD(insn, 10, 31);
1764 target = sign_extend(target, 22);
1765 target <<= 2;
1766 do_branch(dc, target, insn, 0, cpu_cond);
1767 goto jmp_insn;
1769 case 0x6: /* FBN+x */
1771 if (gen_trap_ifnofpu(dc, cpu_cond))
1772 goto jmp_insn;
1773 target = GET_FIELD(insn, 10, 31);
1774 target = sign_extend(target, 22);
1775 target <<= 2;
1776 do_fbranch(dc, target, insn, 0, cpu_cond);
1777 goto jmp_insn;
1779 case 0x4: /* SETHI */
1780 if (rd) { // nop
1781 uint32_t value = GET_FIELD(insn, 10, 31);
1782 TCGv r_const;
1784 r_const = tcg_const_tl(value << 10);
1785 gen_movl_TN_reg(rd, r_const);
1786 tcg_temp_free(r_const);
1788 break;
1789 case 0x0: /* UNIMPL */
1790 default:
1791 goto illegal_insn;
1793 break;
1795 break;
1796 case 1: /*CALL*/
1798 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1799 TCGv r_const;
1801 r_const = tcg_const_tl(dc->pc);
1802 gen_movl_TN_reg(15, r_const);
1803 tcg_temp_free(r_const);
1804 target += dc->pc;
1805 gen_mov_pc_npc(dc, cpu_cond);
1806 dc->npc = target;
1808 goto jmp_insn;
1809 case 2: /* FPU & Logical Operations */
1811 unsigned int xop = GET_FIELD(insn, 7, 12);
1812 if (xop == 0x3a) { /* generate trap */
1813 int cond;
1815 cpu_src1 = get_src1(insn, cpu_src1);
1816 if (IS_IMM) {
1817 rs2 = GET_FIELD(insn, 25, 31);
1818 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1819 } else {
1820 rs2 = GET_FIELD(insn, 27, 31);
1821 if (rs2 != 0) {
1822 gen_movl_reg_TN(rs2, cpu_src2);
1823 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1824 } else
1825 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1827 cond = GET_FIELD(insn, 3, 6);
1828 if (cond == 0x8) {
1829 save_state(dc, cpu_cond);
1830 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1831 supervisor(dc))
1832 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1833 else
1834 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1835 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1836 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1837 gen_helper_raise_exception(cpu_tmp32);
1838 } else if (cond != 0) {
1839 TCGv r_cond = tcg_temp_new();
1840 int l1;
1841 #ifdef TARGET_SPARC64
1842 /* V9 icc/xcc */
1843 int cc = GET_FIELD_SP(insn, 11, 12);
1845 save_state(dc, cpu_cond);
1846 if (cc == 0)
1847 gen_cond(r_cond, 0, cond, dc);
1848 else if (cc == 2)
1849 gen_cond(r_cond, 1, cond, dc);
1850 else
1851 goto illegal_insn;
1852 #else
1853 save_state(dc, cpu_cond);
1854 gen_cond(r_cond, 0, cond, dc);
1855 #endif
1856 l1 = gen_new_label();
1857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1859 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1860 supervisor(dc))
1861 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1862 else
1863 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1864 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1865 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1866 gen_helper_raise_exception(cpu_tmp32);
1868 gen_set_label(l1);
1869 tcg_temp_free(r_cond);
1871 gen_op_next_insn();
1872 tcg_gen_exit_tb(0);
1873 dc->is_br = 1;
1874 goto jmp_insn;
1875 } else if (xop == 0x28) {
1876 rs1 = GET_FIELD(insn, 13, 17);
1877 switch(rs1) {
1878 case 0: /* rdy */
1879 #ifndef TARGET_SPARC64
1880 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1881 manual, rdy on the microSPARC
1882 II */
1883 case 0x0f: /* stbar in the SPARCv8 manual,
1884 rdy on the microSPARC II */
1885 case 0x10 ... 0x1f: /* implementation-dependent in the
1886 SPARCv8 manual, rdy on the
1887 microSPARC II */
1888 #endif
1889 gen_movl_TN_reg(rd, cpu_y);
1890 break;
1891 #ifdef TARGET_SPARC64
1892 case 0x2: /* V9 rdccr */
1893 gen_helper_compute_psr();
1894 gen_helper_rdccr(cpu_dst);
1895 gen_movl_TN_reg(rd, cpu_dst);
1896 break;
1897 case 0x3: /* V9 rdasi */
1898 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1899 gen_movl_TN_reg(rd, cpu_dst);
1900 break;
1901 case 0x4: /* V9 rdtick */
1903 TCGv_ptr r_tickptr;
1905 r_tickptr = tcg_temp_new_ptr();
1906 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1907 offsetof(CPUState, tick));
1908 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1909 tcg_temp_free_ptr(r_tickptr);
1910 gen_movl_TN_reg(rd, cpu_dst);
1912 break;
1913 case 0x5: /* V9 rdpc */
1915 TCGv r_const;
1917 r_const = tcg_const_tl(dc->pc);
1918 gen_movl_TN_reg(rd, r_const);
1919 tcg_temp_free(r_const);
1921 break;
1922 case 0x6: /* V9 rdfprs */
1923 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1924 gen_movl_TN_reg(rd, cpu_dst);
1925 break;
1926 case 0xf: /* V9 membar */
1927 break; /* no effect */
1928 case 0x13: /* Graphics Status */
1929 if (gen_trap_ifnofpu(dc, cpu_cond))
1930 goto jmp_insn;
1931 gen_movl_TN_reg(rd, cpu_gsr);
1932 break;
1933 case 0x16: /* Softint */
1934 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1935 gen_movl_TN_reg(rd, cpu_dst);
1936 break;
1937 case 0x17: /* Tick compare */
1938 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1939 break;
1940 case 0x18: /* System tick */
1942 TCGv_ptr r_tickptr;
1944 r_tickptr = tcg_temp_new_ptr();
1945 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1946 offsetof(CPUState, stick));
1947 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1948 tcg_temp_free_ptr(r_tickptr);
1949 gen_movl_TN_reg(rd, cpu_dst);
1951 break;
1952 case 0x19: /* System tick compare */
1953 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1954 break;
1955 case 0x10: /* Performance Control */
1956 case 0x11: /* Performance Instrumentation Counter */
1957 case 0x12: /* Dispatch Control */
1958 case 0x14: /* Softint set, WO */
1959 case 0x15: /* Softint clear, WO */
1960 #endif
1961 default:
1962 goto illegal_insn;
1964 #if !defined(CONFIG_USER_ONLY)
1965 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1966 #ifndef TARGET_SPARC64
1967 if (!supervisor(dc))
1968 goto priv_insn;
1969 gen_helper_compute_psr();
1970 dc->cc_op = CC_OP_FLAGS;
1971 gen_helper_rdpsr(cpu_dst);
1972 #else
1973 CHECK_IU_FEATURE(dc, HYPV);
1974 if (!hypervisor(dc))
1975 goto priv_insn;
1976 rs1 = GET_FIELD(insn, 13, 17);
1977 switch (rs1) {
1978 case 0: // hpstate
1979 // gen_op_rdhpstate();
1980 break;
1981 case 1: // htstate
1982 // gen_op_rdhtstate();
1983 break;
1984 case 3: // hintp
1985 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1986 break;
1987 case 5: // htba
1988 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1989 break;
1990 case 6: // hver
1991 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1992 break;
1993 case 31: // hstick_cmpr
1994 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1995 break;
1996 default:
1997 goto illegal_insn;
1999 #endif
2000 gen_movl_TN_reg(rd, cpu_dst);
2001 break;
2002 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2003 if (!supervisor(dc))
2004 goto priv_insn;
2005 #ifdef TARGET_SPARC64
2006 rs1 = GET_FIELD(insn, 13, 17);
2007 switch (rs1) {
2008 case 0: // tpc
2010 TCGv_ptr r_tsptr;
2012 r_tsptr = tcg_temp_new_ptr();
2013 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2014 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2015 offsetof(trap_state, tpc));
2016 tcg_temp_free_ptr(r_tsptr);
2018 break;
2019 case 1: // tnpc
2021 TCGv_ptr r_tsptr;
2023 r_tsptr = tcg_temp_new_ptr();
2024 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2025 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2026 offsetof(trap_state, tnpc));
2027 tcg_temp_free_ptr(r_tsptr);
2029 break;
2030 case 2: // tstate
2032 TCGv_ptr r_tsptr;
2034 r_tsptr = tcg_temp_new_ptr();
2035 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2036 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2037 offsetof(trap_state, tstate));
2038 tcg_temp_free_ptr(r_tsptr);
2040 break;
2041 case 3: // tt
2043 TCGv_ptr r_tsptr;
2045 r_tsptr = tcg_temp_new_ptr();
2046 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2047 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2048 offsetof(trap_state, tt));
2049 tcg_temp_free_ptr(r_tsptr);
2050 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2052 break;
2053 case 4: // tick
2055 TCGv_ptr r_tickptr;
2057 r_tickptr = tcg_temp_new_ptr();
2058 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2059 offsetof(CPUState, tick));
2060 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2061 gen_movl_TN_reg(rd, cpu_tmp0);
2062 tcg_temp_free_ptr(r_tickptr);
2064 break;
2065 case 5: // tba
2066 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2067 break;
2068 case 6: // pstate
2069 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2070 offsetof(CPUSPARCState, pstate));
2071 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2072 break;
2073 case 7: // tl
2074 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2075 offsetof(CPUSPARCState, tl));
2076 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2077 break;
2078 case 8: // pil
2079 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2080 offsetof(CPUSPARCState, psrpil));
2081 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2082 break;
2083 case 9: // cwp
2084 gen_helper_rdcwp(cpu_tmp0);
2085 break;
2086 case 10: // cansave
2087 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2088 offsetof(CPUSPARCState, cansave));
2089 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2090 break;
2091 case 11: // canrestore
2092 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2093 offsetof(CPUSPARCState, canrestore));
2094 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2095 break;
2096 case 12: // cleanwin
2097 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2098 offsetof(CPUSPARCState, cleanwin));
2099 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2100 break;
2101 case 13: // otherwin
2102 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2103 offsetof(CPUSPARCState, otherwin));
2104 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2105 break;
2106 case 14: // wstate
2107 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2108 offsetof(CPUSPARCState, wstate));
2109 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2110 break;
2111 case 16: // UA2005 gl
2112 CHECK_IU_FEATURE(dc, GL);
2113 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2114 offsetof(CPUSPARCState, gl));
2115 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2116 break;
2117 case 26: // UA2005 strand status
2118 CHECK_IU_FEATURE(dc, HYPV);
2119 if (!hypervisor(dc))
2120 goto priv_insn;
2121 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2122 break;
2123 case 31: // ver
2124 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2125 break;
2126 case 15: // fq
2127 default:
2128 goto illegal_insn;
2130 #else
2131 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2132 #endif
2133 gen_movl_TN_reg(rd, cpu_tmp0);
2134 break;
2135 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2136 #ifdef TARGET_SPARC64
2137 save_state(dc, cpu_cond);
2138 gen_helper_flushw();
2139 #else
2140 if (!supervisor(dc))
2141 goto priv_insn;
2142 gen_movl_TN_reg(rd, cpu_tbr);
2143 #endif
2144 break;
2145 #endif
2146 } else if (xop == 0x34) { /* FPU Operations */
2147 if (gen_trap_ifnofpu(dc, cpu_cond))
2148 goto jmp_insn;
2149 gen_op_clear_ieee_excp_and_FTT();
2150 rs1 = GET_FIELD(insn, 13, 17);
2151 rs2 = GET_FIELD(insn, 27, 31);
2152 xop = GET_FIELD(insn, 18, 26);
2153 switch (xop) {
2154 case 0x1: /* fmovs */
2155 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2156 break;
2157 case 0x5: /* fnegs */
2158 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2159 break;
2160 case 0x9: /* fabss */
2161 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2162 break;
2163 case 0x29: /* fsqrts */
2164 CHECK_FPU_FEATURE(dc, FSQRT);
2165 gen_clear_float_exceptions();
2166 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2167 gen_helper_check_ieee_exceptions();
2168 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2169 break;
2170 case 0x2a: /* fsqrtd */
2171 CHECK_FPU_FEATURE(dc, FSQRT);
2172 gen_op_load_fpr_DT1(DFPREG(rs2));
2173 gen_clear_float_exceptions();
2174 gen_helper_fsqrtd();
2175 gen_helper_check_ieee_exceptions();
2176 gen_op_store_DT0_fpr(DFPREG(rd));
2177 break;
2178 case 0x2b: /* fsqrtq */
2179 CHECK_FPU_FEATURE(dc, FLOAT128);
2180 gen_op_load_fpr_QT1(QFPREG(rs2));
2181 gen_clear_float_exceptions();
2182 gen_helper_fsqrtq();
2183 gen_helper_check_ieee_exceptions();
2184 gen_op_store_QT0_fpr(QFPREG(rd));
2185 break;
2186 case 0x41: /* fadds */
2187 gen_clear_float_exceptions();
2188 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2189 gen_helper_check_ieee_exceptions();
2190 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2191 break;
2192 case 0x42: /* faddd */
2193 gen_op_load_fpr_DT0(DFPREG(rs1));
2194 gen_op_load_fpr_DT1(DFPREG(rs2));
2195 gen_clear_float_exceptions();
2196 gen_helper_faddd();
2197 gen_helper_check_ieee_exceptions();
2198 gen_op_store_DT0_fpr(DFPREG(rd));
2199 break;
2200 case 0x43: /* faddq */
2201 CHECK_FPU_FEATURE(dc, FLOAT128);
2202 gen_op_load_fpr_QT0(QFPREG(rs1));
2203 gen_op_load_fpr_QT1(QFPREG(rs2));
2204 gen_clear_float_exceptions();
2205 gen_helper_faddq();
2206 gen_helper_check_ieee_exceptions();
2207 gen_op_store_QT0_fpr(QFPREG(rd));
2208 break;
2209 case 0x45: /* fsubs */
2210 gen_clear_float_exceptions();
2211 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2212 gen_helper_check_ieee_exceptions();
2213 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2214 break;
2215 case 0x46: /* fsubd */
2216 gen_op_load_fpr_DT0(DFPREG(rs1));
2217 gen_op_load_fpr_DT1(DFPREG(rs2));
2218 gen_clear_float_exceptions();
2219 gen_helper_fsubd();
2220 gen_helper_check_ieee_exceptions();
2221 gen_op_store_DT0_fpr(DFPREG(rd));
2222 break;
2223 case 0x47: /* fsubq */
2224 CHECK_FPU_FEATURE(dc, FLOAT128);
2225 gen_op_load_fpr_QT0(QFPREG(rs1));
2226 gen_op_load_fpr_QT1(QFPREG(rs2));
2227 gen_clear_float_exceptions();
2228 gen_helper_fsubq();
2229 gen_helper_check_ieee_exceptions();
2230 gen_op_store_QT0_fpr(QFPREG(rd));
2231 break;
2232 case 0x49: /* fmuls */
2233 CHECK_FPU_FEATURE(dc, FMUL);
2234 gen_clear_float_exceptions();
2235 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2236 gen_helper_check_ieee_exceptions();
2237 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2238 break;
2239 case 0x4a: /* fmuld */
2240 CHECK_FPU_FEATURE(dc, FMUL);
2241 gen_op_load_fpr_DT0(DFPREG(rs1));
2242 gen_op_load_fpr_DT1(DFPREG(rs2));
2243 gen_clear_float_exceptions();
2244 gen_helper_fmuld();
2245 gen_helper_check_ieee_exceptions();
2246 gen_op_store_DT0_fpr(DFPREG(rd));
2247 break;
2248 case 0x4b: /* fmulq */
2249 CHECK_FPU_FEATURE(dc, FLOAT128);
2250 CHECK_FPU_FEATURE(dc, FMUL);
2251 gen_op_load_fpr_QT0(QFPREG(rs1));
2252 gen_op_load_fpr_QT1(QFPREG(rs2));
2253 gen_clear_float_exceptions();
2254 gen_helper_fmulq();
2255 gen_helper_check_ieee_exceptions();
2256 gen_op_store_QT0_fpr(QFPREG(rd));
2257 break;
2258 case 0x4d: /* fdivs */
2259 gen_clear_float_exceptions();
2260 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2261 gen_helper_check_ieee_exceptions();
2262 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2263 break;
2264 case 0x4e: /* fdivd */
2265 gen_op_load_fpr_DT0(DFPREG(rs1));
2266 gen_op_load_fpr_DT1(DFPREG(rs2));
2267 gen_clear_float_exceptions();
2268 gen_helper_fdivd();
2269 gen_helper_check_ieee_exceptions();
2270 gen_op_store_DT0_fpr(DFPREG(rd));
2271 break;
2272 case 0x4f: /* fdivq */
2273 CHECK_FPU_FEATURE(dc, FLOAT128);
2274 gen_op_load_fpr_QT0(QFPREG(rs1));
2275 gen_op_load_fpr_QT1(QFPREG(rs2));
2276 gen_clear_float_exceptions();
2277 gen_helper_fdivq();
2278 gen_helper_check_ieee_exceptions();
2279 gen_op_store_QT0_fpr(QFPREG(rd));
2280 break;
2281 case 0x69: /* fsmuld */
2282 CHECK_FPU_FEATURE(dc, FSMULD);
2283 gen_clear_float_exceptions();
2284 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2285 gen_helper_check_ieee_exceptions();
2286 gen_op_store_DT0_fpr(DFPREG(rd));
2287 break;
2288 case 0x6e: /* fdmulq */
2289 CHECK_FPU_FEATURE(dc, FLOAT128);
2290 gen_op_load_fpr_DT0(DFPREG(rs1));
2291 gen_op_load_fpr_DT1(DFPREG(rs2));
2292 gen_clear_float_exceptions();
2293 gen_helper_fdmulq();
2294 gen_helper_check_ieee_exceptions();
2295 gen_op_store_QT0_fpr(QFPREG(rd));
2296 break;
2297 case 0xc4: /* fitos */
2298 gen_clear_float_exceptions();
2299 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2300 gen_helper_check_ieee_exceptions();
2301 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2302 break;
2303 case 0xc6: /* fdtos */
2304 gen_op_load_fpr_DT1(DFPREG(rs2));
2305 gen_clear_float_exceptions();
2306 gen_helper_fdtos(cpu_tmp32);
2307 gen_helper_check_ieee_exceptions();
2308 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2309 break;
2310 case 0xc7: /* fqtos */
2311 CHECK_FPU_FEATURE(dc, FLOAT128);
2312 gen_op_load_fpr_QT1(QFPREG(rs2));
2313 gen_clear_float_exceptions();
2314 gen_helper_fqtos(cpu_tmp32);
2315 gen_helper_check_ieee_exceptions();
2316 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2317 break;
2318 case 0xc8: /* fitod */
2319 gen_helper_fitod(cpu_fpr[rs2]);
2320 gen_op_store_DT0_fpr(DFPREG(rd));
2321 break;
2322 case 0xc9: /* fstod */
2323 gen_helper_fstod(cpu_fpr[rs2]);
2324 gen_op_store_DT0_fpr(DFPREG(rd));
2325 break;
2326 case 0xcb: /* fqtod */
2327 CHECK_FPU_FEATURE(dc, FLOAT128);
2328 gen_op_load_fpr_QT1(QFPREG(rs2));
2329 gen_clear_float_exceptions();
2330 gen_helper_fqtod();
2331 gen_helper_check_ieee_exceptions();
2332 gen_op_store_DT0_fpr(DFPREG(rd));
2333 break;
2334 case 0xcc: /* fitoq */
2335 CHECK_FPU_FEATURE(dc, FLOAT128);
2336 gen_helper_fitoq(cpu_fpr[rs2]);
2337 gen_op_store_QT0_fpr(QFPREG(rd));
2338 break;
2339 case 0xcd: /* fstoq */
2340 CHECK_FPU_FEATURE(dc, FLOAT128);
2341 gen_helper_fstoq(cpu_fpr[rs2]);
2342 gen_op_store_QT0_fpr(QFPREG(rd));
2343 break;
2344 case 0xce: /* fdtoq */
2345 CHECK_FPU_FEATURE(dc, FLOAT128);
2346 gen_op_load_fpr_DT1(DFPREG(rs2));
2347 gen_helper_fdtoq();
2348 gen_op_store_QT0_fpr(QFPREG(rd));
2349 break;
2350 case 0xd1: /* fstoi */
2351 gen_clear_float_exceptions();
2352 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2353 gen_helper_check_ieee_exceptions();
2354 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2355 break;
2356 case 0xd2: /* fdtoi */
2357 gen_op_load_fpr_DT1(DFPREG(rs2));
2358 gen_clear_float_exceptions();
2359 gen_helper_fdtoi(cpu_tmp32);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0xd3: /* fqtoi */
2364 CHECK_FPU_FEATURE(dc, FLOAT128);
2365 gen_op_load_fpr_QT1(QFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 gen_helper_fqtoi(cpu_tmp32);
2368 gen_helper_check_ieee_exceptions();
2369 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2370 break;
2371 #ifdef TARGET_SPARC64
2372 case 0x2: /* V9 fmovd */
2373 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2374 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2375 cpu_fpr[DFPREG(rs2) + 1]);
2376 break;
2377 case 0x3: /* V9 fmovq */
2378 CHECK_FPU_FEATURE(dc, FLOAT128);
2379 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2380 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2381 cpu_fpr[QFPREG(rs2) + 1]);
2382 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2383 cpu_fpr[QFPREG(rs2) + 2]);
2384 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2385 cpu_fpr[QFPREG(rs2) + 3]);
2386 break;
2387 case 0x6: /* V9 fnegd */
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_helper_fnegd();
2390 gen_op_store_DT0_fpr(DFPREG(rd));
2391 break;
2392 case 0x7: /* V9 fnegq */
2393 CHECK_FPU_FEATURE(dc, FLOAT128);
2394 gen_op_load_fpr_QT1(QFPREG(rs2));
2395 gen_helper_fnegq();
2396 gen_op_store_QT0_fpr(QFPREG(rd));
2397 break;
2398 case 0xa: /* V9 fabsd */
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_helper_fabsd();
2401 gen_op_store_DT0_fpr(DFPREG(rd));
2402 break;
2403 case 0xb: /* V9 fabsq */
2404 CHECK_FPU_FEATURE(dc, FLOAT128);
2405 gen_op_load_fpr_QT1(QFPREG(rs2));
2406 gen_helper_fabsq();
2407 gen_op_store_QT0_fpr(QFPREG(rd));
2408 break;
2409 case 0x81: /* V9 fstox */
2410 gen_clear_float_exceptions();
2411 gen_helper_fstox(cpu_fpr[rs2]);
2412 gen_helper_check_ieee_exceptions();
2413 gen_op_store_DT0_fpr(DFPREG(rd));
2414 break;
2415 case 0x82: /* V9 fdtox */
2416 gen_op_load_fpr_DT1(DFPREG(rs2));
2417 gen_clear_float_exceptions();
2418 gen_helper_fdtox();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd));
2421 break;
2422 case 0x83: /* V9 fqtox */
2423 CHECK_FPU_FEATURE(dc, FLOAT128);
2424 gen_op_load_fpr_QT1(QFPREG(rs2));
2425 gen_clear_float_exceptions();
2426 gen_helper_fqtox();
2427 gen_helper_check_ieee_exceptions();
2428 gen_op_store_DT0_fpr(DFPREG(rd));
2429 break;
2430 case 0x84: /* V9 fxtos */
2431 gen_op_load_fpr_DT1(DFPREG(rs2));
2432 gen_clear_float_exceptions();
2433 gen_helper_fxtos(cpu_tmp32);
2434 gen_helper_check_ieee_exceptions();
2435 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2436 break;
2437 case 0x88: /* V9 fxtod */
2438 gen_op_load_fpr_DT1(DFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 gen_helper_fxtod();
2441 gen_helper_check_ieee_exceptions();
2442 gen_op_store_DT0_fpr(DFPREG(rd));
2443 break;
2444 case 0x8c: /* V9 fxtoq */
2445 CHECK_FPU_FEATURE(dc, FLOAT128);
2446 gen_op_load_fpr_DT1(DFPREG(rs2));
2447 gen_clear_float_exceptions();
2448 gen_helper_fxtoq();
2449 gen_helper_check_ieee_exceptions();
2450 gen_op_store_QT0_fpr(QFPREG(rd));
2451 break;
2452 #endif
2453 default:
2454 goto illegal_insn;
2456 } else if (xop == 0x35) { /* FPU Operations */
2457 #ifdef TARGET_SPARC64
2458 int cond;
2459 #endif
2460 if (gen_trap_ifnofpu(dc, cpu_cond))
2461 goto jmp_insn;
2462 gen_op_clear_ieee_excp_and_FTT();
2463 rs1 = GET_FIELD(insn, 13, 17);
2464 rs2 = GET_FIELD(insn, 27, 31);
2465 xop = GET_FIELD(insn, 18, 26);
2466 #ifdef TARGET_SPARC64
2467 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2468 int l1;
2470 l1 = gen_new_label();
2471 cond = GET_FIELD_SP(insn, 14, 17);
2472 cpu_src1 = get_src1(insn, cpu_src1);
2473 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2474 0, l1);
2475 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2476 gen_set_label(l1);
2477 break;
2478 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2479 int l1;
2481 l1 = gen_new_label();
2482 cond = GET_FIELD_SP(insn, 14, 17);
2483 cpu_src1 = get_src1(insn, cpu_src1);
2484 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2485 0, l1);
2486 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2487 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2488 gen_set_label(l1);
2489 break;
2490 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2491 int l1;
2493 CHECK_FPU_FEATURE(dc, FLOAT128);
2494 l1 = gen_new_label();
2495 cond = GET_FIELD_SP(insn, 14, 17);
2496 cpu_src1 = get_src1(insn, cpu_src1);
2497 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2498 0, l1);
2499 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2500 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2501 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2502 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2503 gen_set_label(l1);
2504 break;
2506 #endif
2507 switch (xop) {
2508 #ifdef TARGET_SPARC64
2509 #define FMOVSCC(fcc) \
2511 TCGv r_cond; \
2512 int l1; \
2514 l1 = gen_new_label(); \
2515 r_cond = tcg_temp_new(); \
2516 cond = GET_FIELD_SP(insn, 14, 17); \
2517 gen_fcond(r_cond, fcc, cond); \
2518 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2519 0, l1); \
2520 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2521 gen_set_label(l1); \
2522 tcg_temp_free(r_cond); \
2524 #define FMOVDCC(fcc) \
2526 TCGv r_cond; \
2527 int l1; \
2529 l1 = gen_new_label(); \
2530 r_cond = tcg_temp_new(); \
2531 cond = GET_FIELD_SP(insn, 14, 17); \
2532 gen_fcond(r_cond, fcc, cond); \
2533 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2534 0, l1); \
2535 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2536 cpu_fpr[DFPREG(rs2)]); \
2537 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2538 cpu_fpr[DFPREG(rs2) + 1]); \
2539 gen_set_label(l1); \
2540 tcg_temp_free(r_cond); \
2542 #define FMOVQCC(fcc) \
2544 TCGv r_cond; \
2545 int l1; \
2547 l1 = gen_new_label(); \
2548 r_cond = tcg_temp_new(); \
2549 cond = GET_FIELD_SP(insn, 14, 17); \
2550 gen_fcond(r_cond, fcc, cond); \
2551 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2552 0, l1); \
2553 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2554 cpu_fpr[QFPREG(rs2)]); \
2555 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2556 cpu_fpr[QFPREG(rs2) + 1]); \
2557 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2558 cpu_fpr[QFPREG(rs2) + 2]); \
2559 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2560 cpu_fpr[QFPREG(rs2) + 3]); \
2561 gen_set_label(l1); \
2562 tcg_temp_free(r_cond); \
2564 case 0x001: /* V9 fmovscc %fcc0 */
2565 FMOVSCC(0);
2566 break;
2567 case 0x002: /* V9 fmovdcc %fcc0 */
2568 FMOVDCC(0);
2569 break;
2570 case 0x003: /* V9 fmovqcc %fcc0 */
2571 CHECK_FPU_FEATURE(dc, FLOAT128);
2572 FMOVQCC(0);
2573 break;
2574 case 0x041: /* V9 fmovscc %fcc1 */
2575 FMOVSCC(1);
2576 break;
2577 case 0x042: /* V9 fmovdcc %fcc1 */
2578 FMOVDCC(1);
2579 break;
2580 case 0x043: /* V9 fmovqcc %fcc1 */
2581 CHECK_FPU_FEATURE(dc, FLOAT128);
2582 FMOVQCC(1);
2583 break;
2584 case 0x081: /* V9 fmovscc %fcc2 */
2585 FMOVSCC(2);
2586 break;
2587 case 0x082: /* V9 fmovdcc %fcc2 */
2588 FMOVDCC(2);
2589 break;
2590 case 0x083: /* V9 fmovqcc %fcc2 */
2591 CHECK_FPU_FEATURE(dc, FLOAT128);
2592 FMOVQCC(2);
2593 break;
2594 case 0x0c1: /* V9 fmovscc %fcc3 */
2595 FMOVSCC(3);
2596 break;
2597 case 0x0c2: /* V9 fmovdcc %fcc3 */
2598 FMOVDCC(3);
2599 break;
2600 case 0x0c3: /* V9 fmovqcc %fcc3 */
2601 CHECK_FPU_FEATURE(dc, FLOAT128);
2602 FMOVQCC(3);
2603 break;
2604 #undef FMOVSCC
2605 #undef FMOVDCC
2606 #undef FMOVQCC
2607 #define FMOVSCC(icc) \
2609 TCGv r_cond; \
2610 int l1; \
2612 l1 = gen_new_label(); \
2613 r_cond = tcg_temp_new(); \
2614 cond = GET_FIELD_SP(insn, 14, 17); \
2615 gen_cond(r_cond, icc, cond, dc); \
2616 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2617 0, l1); \
2618 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2619 gen_set_label(l1); \
2620 tcg_temp_free(r_cond); \
2622 #define FMOVDCC(icc) \
2624 TCGv r_cond; \
2625 int l1; \
2627 l1 = gen_new_label(); \
2628 r_cond = tcg_temp_new(); \
2629 cond = GET_FIELD_SP(insn, 14, 17); \
2630 gen_cond(r_cond, icc, cond, dc); \
2631 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2632 0, l1); \
2633 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2634 cpu_fpr[DFPREG(rs2)]); \
2635 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2636 cpu_fpr[DFPREG(rs2) + 1]); \
2637 gen_set_label(l1); \
2638 tcg_temp_free(r_cond); \
2640 #define FMOVQCC(icc) \
2642 TCGv r_cond; \
2643 int l1; \
2645 l1 = gen_new_label(); \
2646 r_cond = tcg_temp_new(); \
2647 cond = GET_FIELD_SP(insn, 14, 17); \
2648 gen_cond(r_cond, icc, cond, dc); \
2649 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2650 0, l1); \
2651 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2652 cpu_fpr[QFPREG(rs2)]); \
2653 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2654 cpu_fpr[QFPREG(rs2) + 1]); \
2655 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2656 cpu_fpr[QFPREG(rs2) + 2]); \
2657 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2658 cpu_fpr[QFPREG(rs2) + 3]); \
2659 gen_set_label(l1); \
2660 tcg_temp_free(r_cond); \
2663 case 0x101: /* V9 fmovscc %icc */
2664 FMOVSCC(0);
2665 break;
2666 case 0x102: /* V9 fmovdcc %icc */
2667 FMOVDCC(0);
2668 case 0x103: /* V9 fmovqcc %icc */
2669 CHECK_FPU_FEATURE(dc, FLOAT128);
2670 FMOVQCC(0);
2671 break;
2672 case 0x181: /* V9 fmovscc %xcc */
2673 FMOVSCC(1);
2674 break;
2675 case 0x182: /* V9 fmovdcc %xcc */
2676 FMOVDCC(1);
2677 break;
2678 case 0x183: /* V9 fmovqcc %xcc */
2679 CHECK_FPU_FEATURE(dc, FLOAT128);
2680 FMOVQCC(1);
2681 break;
2682 #undef FMOVSCC
2683 #undef FMOVDCC
2684 #undef FMOVQCC
2685 #endif
2686 case 0x51: /* fcmps, V9 %fcc */
2687 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2688 break;
2689 case 0x52: /* fcmpd, V9 %fcc */
2690 gen_op_load_fpr_DT0(DFPREG(rs1));
2691 gen_op_load_fpr_DT1(DFPREG(rs2));
2692 gen_op_fcmpd(rd & 3);
2693 break;
2694 case 0x53: /* fcmpq, V9 %fcc */
2695 CHECK_FPU_FEATURE(dc, FLOAT128);
2696 gen_op_load_fpr_QT0(QFPREG(rs1));
2697 gen_op_load_fpr_QT1(QFPREG(rs2));
2698 gen_op_fcmpq(rd & 3);
2699 break;
2700 case 0x55: /* fcmpes, V9 %fcc */
2701 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2702 break;
2703 case 0x56: /* fcmped, V9 %fcc */
2704 gen_op_load_fpr_DT0(DFPREG(rs1));
2705 gen_op_load_fpr_DT1(DFPREG(rs2));
2706 gen_op_fcmped(rd & 3);
2707 break;
2708 case 0x57: /* fcmpeq, V9 %fcc */
2709 CHECK_FPU_FEATURE(dc, FLOAT128);
2710 gen_op_load_fpr_QT0(QFPREG(rs1));
2711 gen_op_load_fpr_QT1(QFPREG(rs2));
2712 gen_op_fcmpeq(rd & 3);
2713 break;
2714 default:
2715 goto illegal_insn;
2717 } else if (xop == 0x2) {
2718 // clr/mov shortcut
2720 rs1 = GET_FIELD(insn, 13, 17);
2721 if (rs1 == 0) {
2722 // or %g0, x, y -> mov T0, x; mov y, T0
2723 if (IS_IMM) { /* immediate */
2724 TCGv r_const;
2726 simm = GET_FIELDs(insn, 19, 31);
2727 r_const = tcg_const_tl(simm);
2728 gen_movl_TN_reg(rd, r_const);
2729 tcg_temp_free(r_const);
2730 } else { /* register */
2731 rs2 = GET_FIELD(insn, 27, 31);
2732 gen_movl_reg_TN(rs2, cpu_dst);
2733 gen_movl_TN_reg(rd, cpu_dst);
2735 } else {
2736 cpu_src1 = get_src1(insn, cpu_src1);
2737 if (IS_IMM) { /* immediate */
2738 simm = GET_FIELDs(insn, 19, 31);
2739 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2740 gen_movl_TN_reg(rd, cpu_dst);
2741 } else { /* register */
2742 // or x, %g0, y -> mov T1, x; mov y, T1
2743 rs2 = GET_FIELD(insn, 27, 31);
2744 if (rs2 != 0) {
2745 gen_movl_reg_TN(rs2, cpu_src2);
2746 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2747 gen_movl_TN_reg(rd, cpu_dst);
2748 } else
2749 gen_movl_TN_reg(rd, cpu_src1);
2752 #ifdef TARGET_SPARC64
2753 } else if (xop == 0x25) { /* sll, V9 sllx */
2754 cpu_src1 = get_src1(insn, cpu_src1);
2755 if (IS_IMM) { /* immediate */
2756 simm = GET_FIELDs(insn, 20, 31);
2757 if (insn & (1 << 12)) {
2758 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2759 } else {
2760 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2762 } else { /* register */
2763 rs2 = GET_FIELD(insn, 27, 31);
2764 gen_movl_reg_TN(rs2, cpu_src2);
2765 if (insn & (1 << 12)) {
2766 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2767 } else {
2768 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2770 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2772 gen_movl_TN_reg(rd, cpu_dst);
2773 } else if (xop == 0x26) { /* srl, V9 srlx */
2774 cpu_src1 = get_src1(insn, cpu_src1);
2775 if (IS_IMM) { /* immediate */
2776 simm = GET_FIELDs(insn, 20, 31);
2777 if (insn & (1 << 12)) {
2778 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2779 } else {
2780 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2781 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2783 } else { /* register */
2784 rs2 = GET_FIELD(insn, 27, 31);
2785 gen_movl_reg_TN(rs2, cpu_src2);
2786 if (insn & (1 << 12)) {
2787 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2788 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2789 } else {
2790 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2791 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2792 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2795 gen_movl_TN_reg(rd, cpu_dst);
2796 } else if (xop == 0x27) { /* sra, V9 srax */
2797 cpu_src1 = get_src1(insn, cpu_src1);
2798 if (IS_IMM) { /* immediate */
2799 simm = GET_FIELDs(insn, 20, 31);
2800 if (insn & (1 << 12)) {
2801 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2802 } else {
2803 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2804 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2805 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2807 } else { /* register */
2808 rs2 = GET_FIELD(insn, 27, 31);
2809 gen_movl_reg_TN(rs2, cpu_src2);
2810 if (insn & (1 << 12)) {
2811 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2812 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2813 } else {
2814 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2815 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2816 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2817 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2820 gen_movl_TN_reg(rd, cpu_dst);
2821 #endif
2822 } else if (xop < 0x36) {
2823 if (xop < 0x20) {
2824 cpu_src1 = get_src1(insn, cpu_src1);
2825 cpu_src2 = get_src2(insn, cpu_src2);
2826 switch (xop & ~0x10) {
2827 case 0x0: /* add */
2828 if (IS_IMM) {
2829 simm = GET_FIELDs(insn, 19, 31);
2830 if (xop & 0x10) {
2831 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2832 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2833 dc->cc_op = CC_OP_ADD;
2834 } else {
2835 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2837 } else {
2838 if (xop & 0x10) {
2839 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2840 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2841 dc->cc_op = CC_OP_ADD;
2842 } else {
2843 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2846 break;
2847 case 0x1: /* and */
2848 if (IS_IMM) {
2849 simm = GET_FIELDs(insn, 19, 31);
2850 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2851 } else {
2852 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2854 if (xop & 0x10) {
2855 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2856 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2857 dc->cc_op = CC_OP_LOGIC;
2859 break;
2860 case 0x2: /* or */
2861 if (IS_IMM) {
2862 simm = GET_FIELDs(insn, 19, 31);
2863 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2864 } else {
2865 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2867 if (xop & 0x10) {
2868 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2869 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2870 dc->cc_op = CC_OP_LOGIC;
2872 break;
2873 case 0x3: /* xor */
2874 if (IS_IMM) {
2875 simm = GET_FIELDs(insn, 19, 31);
2876 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2877 } else {
2878 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2880 if (xop & 0x10) {
2881 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2882 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2883 dc->cc_op = CC_OP_LOGIC;
2885 break;
2886 case 0x4: /* sub */
2887 if (IS_IMM) {
2888 simm = GET_FIELDs(insn, 19, 31);
2889 if (xop & 0x10) {
2890 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2891 } else {
2892 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2894 } else {
2895 if (xop & 0x10) {
2896 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2897 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2898 dc->cc_op = CC_OP_SUB;
2899 } else {
2900 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2903 break;
2904 case 0x5: /* andn */
2905 if (IS_IMM) {
2906 simm = GET_FIELDs(insn, 19, 31);
2907 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2908 } else {
2909 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2911 if (xop & 0x10) {
2912 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2913 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2914 dc->cc_op = CC_OP_LOGIC;
2916 break;
2917 case 0x6: /* orn */
2918 if (IS_IMM) {
2919 simm = GET_FIELDs(insn, 19, 31);
2920 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2921 } else {
2922 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2924 if (xop & 0x10) {
2925 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2926 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2927 dc->cc_op = CC_OP_LOGIC;
2929 break;
2930 case 0x7: /* xorn */
2931 if (IS_IMM) {
2932 simm = GET_FIELDs(insn, 19, 31);
2933 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2934 } else {
2935 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2936 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2938 if (xop & 0x10) {
2939 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2940 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2941 dc->cc_op = CC_OP_LOGIC;
2943 break;
2944 case 0x8: /* addx, V9 addc */
2945 if (IS_IMM) {
2946 simm = GET_FIELDs(insn, 19, 31);
2947 if (xop & 0x10) {
2948 gen_helper_compute_psr();
2949 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2950 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2951 dc->cc_op = CC_OP_ADDX;
2952 } else {
2953 gen_helper_compute_psr();
2954 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2955 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2956 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2958 } else {
2959 if (xop & 0x10) {
2960 gen_helper_compute_psr();
2961 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2962 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2963 dc->cc_op = CC_OP_ADDX;
2964 } else {
2965 gen_helper_compute_psr();
2966 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2967 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2968 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2971 break;
2972 #ifdef TARGET_SPARC64
2973 case 0x9: /* V9 mulx */
2974 if (IS_IMM) {
2975 simm = GET_FIELDs(insn, 19, 31);
2976 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2977 } else {
2978 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2980 break;
2981 #endif
2982 case 0xa: /* umul */
2983 CHECK_IU_FEATURE(dc, MUL);
2984 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2985 if (xop & 0x10) {
2986 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2987 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2988 dc->cc_op = CC_OP_LOGIC;
2990 break;
2991 case 0xb: /* smul */
2992 CHECK_IU_FEATURE(dc, MUL);
2993 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2994 if (xop & 0x10) {
2995 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2996 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2997 dc->cc_op = CC_OP_LOGIC;
2999 break;
3000 case 0xc: /* subx, V9 subc */
3001 if (IS_IMM) {
3002 simm = GET_FIELDs(insn, 19, 31);
3003 if (xop & 0x10) {
3004 gen_helper_compute_psr();
3005 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3006 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3007 dc->cc_op = CC_OP_SUBX;
3008 } else {
3009 gen_helper_compute_psr();
3010 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3011 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3012 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3014 } else {
3015 if (xop & 0x10) {
3016 gen_helper_compute_psr();
3017 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3018 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3019 dc->cc_op = CC_OP_SUBX;
3020 } else {
3021 gen_helper_compute_psr();
3022 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3023 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3024 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3027 break;
3028 #ifdef TARGET_SPARC64
3029 case 0xd: /* V9 udivx */
3030 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3031 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3032 gen_trap_ifdivzero_tl(cpu_cc_src2);
3033 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3034 break;
3035 #endif
3036 case 0xe: /* udiv */
3037 CHECK_IU_FEATURE(dc, DIV);
3038 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3039 if (xop & 0x10) {
3040 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3041 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3042 dc->cc_op = CC_OP_DIV;
3044 break;
3045 case 0xf: /* sdiv */
3046 CHECK_IU_FEATURE(dc, DIV);
3047 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3048 if (xop & 0x10) {
3049 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3050 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3051 dc->cc_op = CC_OP_DIV;
3053 break;
3054 default:
3055 goto illegal_insn;
3057 gen_movl_TN_reg(rd, cpu_dst);
3058 } else {
3059 cpu_src1 = get_src1(insn, cpu_src1);
3060 cpu_src2 = get_src2(insn, cpu_src2);
3061 switch (xop) {
3062 case 0x20: /* taddcc */
3063 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3064 gen_movl_TN_reg(rd, cpu_dst);
3065 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3066 dc->cc_op = CC_OP_TADD;
3067 break;
3068 case 0x21: /* tsubcc */
3069 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3070 gen_movl_TN_reg(rd, cpu_dst);
3071 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3072 dc->cc_op = CC_OP_TSUB;
3073 break;
3074 case 0x22: /* taddcctv */
3075 save_state(dc, cpu_cond);
3076 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3077 gen_movl_TN_reg(rd, cpu_dst);
3078 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3079 dc->cc_op = CC_OP_TADDTV;
3080 break;
3081 case 0x23: /* tsubcctv */
3082 save_state(dc, cpu_cond);
3083 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3084 gen_movl_TN_reg(rd, cpu_dst);
3085 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3086 dc->cc_op = CC_OP_TSUBTV;
3087 break;
3088 case 0x24: /* mulscc */
3089 gen_helper_compute_psr();
3090 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3091 gen_movl_TN_reg(rd, cpu_dst);
3092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3093 dc->cc_op = CC_OP_ADD;
3094 break;
3095 #ifndef TARGET_SPARC64
3096 case 0x25: /* sll */
3097 if (IS_IMM) { /* immediate */
3098 simm = GET_FIELDs(insn, 20, 31);
3099 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3100 } else { /* register */
3101 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3102 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3104 gen_movl_TN_reg(rd, cpu_dst);
3105 break;
3106 case 0x26: /* srl */
3107 if (IS_IMM) { /* immediate */
3108 simm = GET_FIELDs(insn, 20, 31);
3109 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3110 } else { /* register */
3111 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3112 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3114 gen_movl_TN_reg(rd, cpu_dst);
3115 break;
3116 case 0x27: /* sra */
3117 if (IS_IMM) { /* immediate */
3118 simm = GET_FIELDs(insn, 20, 31);
3119 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3120 } else { /* register */
3121 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3122 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3124 gen_movl_TN_reg(rd, cpu_dst);
3125 break;
3126 #endif
3127 case 0x30:
3129 switch(rd) {
3130 case 0: /* wry */
3131 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3132 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3133 break;
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3136 SPARCv8 manual, nop
3137 on the microSPARC
3138 II */
3139 case 0x10 ... 0x1f: /* implementation-dependent
3140 in the SPARCv8
3141 manual, nop on the
3142 microSPARC II */
3143 break;
3144 #else
3145 case 0x2: /* V9 wrccr */
3146 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3147 gen_helper_wrccr(cpu_dst);
3148 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3149 dc->cc_op = CC_OP_FLAGS;
3150 break;
3151 case 0x3: /* V9 wrasi */
3152 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3153 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3154 break;
3155 case 0x6: /* V9 wrfprs */
3156 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3157 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3158 save_state(dc, cpu_cond);
3159 gen_op_next_insn();
3160 tcg_gen_exit_tb(0);
3161 dc->is_br = 1;
3162 break;
3163 case 0xf: /* V9 sir, nop if user */
3164 #if !defined(CONFIG_USER_ONLY)
3165 if (supervisor(dc))
3166 ; // XXX
3167 #endif
3168 break;
3169 case 0x13: /* Graphics Status */
3170 if (gen_trap_ifnofpu(dc, cpu_cond))
3171 goto jmp_insn;
3172 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3173 break;
3174 case 0x14: /* Softint set */
3175 if (!supervisor(dc))
3176 goto illegal_insn;
3177 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3178 gen_helper_set_softint(cpu_tmp64);
3179 break;
3180 case 0x15: /* Softint clear */
3181 if (!supervisor(dc))
3182 goto illegal_insn;
3183 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3184 gen_helper_clear_softint(cpu_tmp64);
3185 break;
3186 case 0x16: /* Softint write */
3187 if (!supervisor(dc))
3188 goto illegal_insn;
3189 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3190 gen_helper_write_softint(cpu_tmp64);
3191 break;
3192 case 0x17: /* Tick compare */
3193 #if !defined(CONFIG_USER_ONLY)
3194 if (!supervisor(dc))
3195 goto illegal_insn;
3196 #endif
3198 TCGv_ptr r_tickptr;
3200 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3201 cpu_src2);
3202 r_tickptr = tcg_temp_new_ptr();
3203 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3204 offsetof(CPUState, tick));
3205 gen_helper_tick_set_limit(r_tickptr,
3206 cpu_tick_cmpr);
3207 tcg_temp_free_ptr(r_tickptr);
3209 break;
3210 case 0x18: /* System tick */
3211 #if !defined(CONFIG_USER_ONLY)
3212 if (!supervisor(dc))
3213 goto illegal_insn;
3214 #endif
3216 TCGv_ptr r_tickptr;
3218 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3219 cpu_src2);
3220 r_tickptr = tcg_temp_new_ptr();
3221 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3222 offsetof(CPUState, stick));
3223 gen_helper_tick_set_count(r_tickptr,
3224 cpu_dst);
3225 tcg_temp_free_ptr(r_tickptr);
3227 break;
3228 case 0x19: /* System tick compare */
3229 #if !defined(CONFIG_USER_ONLY)
3230 if (!supervisor(dc))
3231 goto illegal_insn;
3232 #endif
3234 TCGv_ptr r_tickptr;
3236 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3237 cpu_src2);
3238 r_tickptr = tcg_temp_new_ptr();
3239 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3240 offsetof(CPUState, stick));
3241 gen_helper_tick_set_limit(r_tickptr,
3242 cpu_stick_cmpr);
3243 tcg_temp_free_ptr(r_tickptr);
3245 break;
3247 case 0x10: /* Performance Control */
3248 case 0x11: /* Performance Instrumentation
3249 Counter */
3250 case 0x12: /* Dispatch Control */
3251 #endif
3252 default:
3253 goto illegal_insn;
3256 break;
3257 #if !defined(CONFIG_USER_ONLY)
3258 case 0x31: /* wrpsr, V9 saved, restored */
3260 if (!supervisor(dc))
3261 goto priv_insn;
3262 #ifdef TARGET_SPARC64
3263 switch (rd) {
3264 case 0:
3265 gen_helper_saved();
3266 break;
3267 case 1:
3268 gen_helper_restored();
3269 break;
3270 case 2: /* UA2005 allclean */
3271 case 3: /* UA2005 otherw */
3272 case 4: /* UA2005 normalw */
3273 case 5: /* UA2005 invalw */
3274 // XXX
3275 default:
3276 goto illegal_insn;
3278 #else
3279 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3280 gen_helper_wrpsr(cpu_dst);
3281 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3282 dc->cc_op = CC_OP_FLAGS;
3283 save_state(dc, cpu_cond);
3284 gen_op_next_insn();
3285 tcg_gen_exit_tb(0);
3286 dc->is_br = 1;
3287 #endif
3289 break;
3290 case 0x32: /* wrwim, V9 wrpr */
3292 if (!supervisor(dc))
3293 goto priv_insn;
3294 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3295 #ifdef TARGET_SPARC64
3296 switch (rd) {
3297 case 0: // tpc
3299 TCGv_ptr r_tsptr;
3301 r_tsptr = tcg_temp_new_ptr();
3302 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3303 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3304 offsetof(trap_state, tpc));
3305 tcg_temp_free_ptr(r_tsptr);
3307 break;
3308 case 1: // tnpc
3310 TCGv_ptr r_tsptr;
3312 r_tsptr = tcg_temp_new_ptr();
3313 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3314 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3315 offsetof(trap_state, tnpc));
3316 tcg_temp_free_ptr(r_tsptr);
3318 break;
3319 case 2: // tstate
3321 TCGv_ptr r_tsptr;
3323 r_tsptr = tcg_temp_new_ptr();
3324 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3325 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3326 offsetof(trap_state,
3327 tstate));
3328 tcg_temp_free_ptr(r_tsptr);
3330 break;
3331 case 3: // tt
3333 TCGv_ptr r_tsptr;
3335 r_tsptr = tcg_temp_new_ptr();
3336 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3337 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3338 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3339 offsetof(trap_state, tt));
3340 tcg_temp_free_ptr(r_tsptr);
3342 break;
3343 case 4: // tick
3345 TCGv_ptr r_tickptr;
3347 r_tickptr = tcg_temp_new_ptr();
3348 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3349 offsetof(CPUState, tick));
3350 gen_helper_tick_set_count(r_tickptr,
3351 cpu_tmp0);
3352 tcg_temp_free_ptr(r_tickptr);
3354 break;
3355 case 5: // tba
3356 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3357 break;
3358 case 6: // pstate
3359 save_state(dc, cpu_cond);
3360 gen_helper_wrpstate(cpu_tmp0);
3361 gen_op_next_insn();
3362 tcg_gen_exit_tb(0);
3363 dc->is_br = 1;
3364 break;
3365 case 7: // tl
3366 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3367 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3368 offsetof(CPUSPARCState, tl));
3369 break;
3370 case 8: // pil
3371 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3372 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3373 offsetof(CPUSPARCState,
3374 psrpil));
3375 break;
3376 case 9: // cwp
3377 gen_helper_wrcwp(cpu_tmp0);
3378 break;
3379 case 10: // cansave
3380 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3381 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3382 offsetof(CPUSPARCState,
3383 cansave));
3384 break;
3385 case 11: // canrestore
3386 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3387 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3388 offsetof(CPUSPARCState,
3389 canrestore));
3390 break;
3391 case 12: // cleanwin
3392 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3393 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3394 offsetof(CPUSPARCState,
3395 cleanwin));
3396 break;
3397 case 13: // otherwin
3398 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3399 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3400 offsetof(CPUSPARCState,
3401 otherwin));
3402 break;
3403 case 14: // wstate
3404 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3405 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3406 offsetof(CPUSPARCState,
3407 wstate));
3408 break;
3409 case 16: // UA2005 gl
3410 CHECK_IU_FEATURE(dc, GL);
3411 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3412 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3413 offsetof(CPUSPARCState, gl));
3414 break;
3415 case 26: // UA2005 strand status
3416 CHECK_IU_FEATURE(dc, HYPV);
3417 if (!hypervisor(dc))
3418 goto priv_insn;
3419 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3420 break;
3421 default:
3422 goto illegal_insn;
3424 #else
3425 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3426 if (dc->def->nwindows != 32)
3427 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3428 (1 << dc->def->nwindows) - 1);
3429 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3430 #endif
3432 break;
3433 case 0x33: /* wrtbr, UA2005 wrhpr */
3435 #ifndef TARGET_SPARC64
3436 if (!supervisor(dc))
3437 goto priv_insn;
3438 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3439 #else
3440 CHECK_IU_FEATURE(dc, HYPV);
3441 if (!hypervisor(dc))
3442 goto priv_insn;
3443 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3444 switch (rd) {
3445 case 0: // hpstate
3446 // XXX gen_op_wrhpstate();
3447 save_state(dc, cpu_cond);
3448 gen_op_next_insn();
3449 tcg_gen_exit_tb(0);
3450 dc->is_br = 1;
3451 break;
3452 case 1: // htstate
3453 // XXX gen_op_wrhtstate();
3454 break;
3455 case 3: // hintp
3456 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3457 break;
3458 case 5: // htba
3459 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3460 break;
3461 case 31: // hstick_cmpr
3463 TCGv_ptr r_tickptr;
3465 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3466 r_tickptr = tcg_temp_new_ptr();
3467 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3468 offsetof(CPUState, hstick));
3469 gen_helper_tick_set_limit(r_tickptr,
3470 cpu_hstick_cmpr);
3471 tcg_temp_free_ptr(r_tickptr);
3473 break;
3474 case 6: // hver readonly
3475 default:
3476 goto illegal_insn;
3478 #endif
3480 break;
3481 #endif
3482 #ifdef TARGET_SPARC64
3483 case 0x2c: /* V9 movcc */
3485 int cc = GET_FIELD_SP(insn, 11, 12);
3486 int cond = GET_FIELD_SP(insn, 14, 17);
3487 TCGv r_cond;
3488 int l1;
3490 r_cond = tcg_temp_new();
3491 if (insn & (1 << 18)) {
3492 if (cc == 0)
3493 gen_cond(r_cond, 0, cond, dc);
3494 else if (cc == 2)
3495 gen_cond(r_cond, 1, cond, dc);
3496 else
3497 goto illegal_insn;
3498 } else {
3499 gen_fcond(r_cond, cc, cond);
3502 l1 = gen_new_label();
3504 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3505 if (IS_IMM) { /* immediate */
3506 TCGv r_const;
3508 simm = GET_FIELD_SPs(insn, 0, 10);
3509 r_const = tcg_const_tl(simm);
3510 gen_movl_TN_reg(rd, r_const);
3511 tcg_temp_free(r_const);
3512 } else {
3513 rs2 = GET_FIELD_SP(insn, 0, 4);
3514 gen_movl_reg_TN(rs2, cpu_tmp0);
3515 gen_movl_TN_reg(rd, cpu_tmp0);
3517 gen_set_label(l1);
3518 tcg_temp_free(r_cond);
3519 break;
3521 case 0x2d: /* V9 sdivx */
3522 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3523 gen_movl_TN_reg(rd, cpu_dst);
3524 break;
3525 case 0x2e: /* V9 popc */
3527 cpu_src2 = get_src2(insn, cpu_src2);
3528 gen_helper_popc(cpu_dst, cpu_src2);
3529 gen_movl_TN_reg(rd, cpu_dst);
3531 case 0x2f: /* V9 movr */
3533 int cond = GET_FIELD_SP(insn, 10, 12);
3534 int l1;
3536 cpu_src1 = get_src1(insn, cpu_src1);
3538 l1 = gen_new_label();
3540 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3541 cpu_src1, 0, l1);
3542 if (IS_IMM) { /* immediate */
3543 TCGv r_const;
3545 simm = GET_FIELD_SPs(insn, 0, 9);
3546 r_const = tcg_const_tl(simm);
3547 gen_movl_TN_reg(rd, r_const);
3548 tcg_temp_free(r_const);
3549 } else {
3550 rs2 = GET_FIELD_SP(insn, 0, 4);
3551 gen_movl_reg_TN(rs2, cpu_tmp0);
3552 gen_movl_TN_reg(rd, cpu_tmp0);
3554 gen_set_label(l1);
3555 break;
3557 #endif
3558 default:
3559 goto illegal_insn;
3562 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3563 #ifdef TARGET_SPARC64
3564 int opf = GET_FIELD_SP(insn, 5, 13);
3565 rs1 = GET_FIELD(insn, 13, 17);
3566 rs2 = GET_FIELD(insn, 27, 31);
3567 if (gen_trap_ifnofpu(dc, cpu_cond))
3568 goto jmp_insn;
3570 switch (opf) {
3571 case 0x000: /* VIS I edge8cc */
3572 case 0x001: /* VIS II edge8n */
3573 case 0x002: /* VIS I edge8lcc */
3574 case 0x003: /* VIS II edge8ln */
3575 case 0x004: /* VIS I edge16cc */
3576 case 0x005: /* VIS II edge16n */
3577 case 0x006: /* VIS I edge16lcc */
3578 case 0x007: /* VIS II edge16ln */
3579 case 0x008: /* VIS I edge32cc */
3580 case 0x009: /* VIS II edge32n */
3581 case 0x00a: /* VIS I edge32lcc */
3582 case 0x00b: /* VIS II edge32ln */
3583 // XXX
3584 goto illegal_insn;
3585 case 0x010: /* VIS I array8 */
3586 CHECK_FPU_FEATURE(dc, VIS1);
3587 cpu_src1 = get_src1(insn, cpu_src1);
3588 gen_movl_reg_TN(rs2, cpu_src2);
3589 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3590 gen_movl_TN_reg(rd, cpu_dst);
3591 break;
3592 case 0x012: /* VIS I array16 */
3593 CHECK_FPU_FEATURE(dc, VIS1);
3594 cpu_src1 = get_src1(insn, cpu_src1);
3595 gen_movl_reg_TN(rs2, cpu_src2);
3596 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3597 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3598 gen_movl_TN_reg(rd, cpu_dst);
3599 break;
3600 case 0x014: /* VIS I array32 */
3601 CHECK_FPU_FEATURE(dc, VIS1);
3602 cpu_src1 = get_src1(insn, cpu_src1);
3603 gen_movl_reg_TN(rs2, cpu_src2);
3604 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3605 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3606 gen_movl_TN_reg(rd, cpu_dst);
3607 break;
3608 case 0x018: /* VIS I alignaddr */
3609 CHECK_FPU_FEATURE(dc, VIS1);
3610 cpu_src1 = get_src1(insn, cpu_src1);
3611 gen_movl_reg_TN(rs2, cpu_src2);
3612 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3613 gen_movl_TN_reg(rd, cpu_dst);
3614 break;
3615 case 0x019: /* VIS II bmask */
3616 case 0x01a: /* VIS I alignaddrl */
3617 // XXX
3618 goto illegal_insn;
3619 case 0x020: /* VIS I fcmple16 */
3620 CHECK_FPU_FEATURE(dc, VIS1);
3621 gen_op_load_fpr_DT0(DFPREG(rs1));
3622 gen_op_load_fpr_DT1(DFPREG(rs2));
3623 gen_helper_fcmple16();
3624 gen_op_store_DT0_fpr(DFPREG(rd));
3625 break;
3626 case 0x022: /* VIS I fcmpne16 */
3627 CHECK_FPU_FEATURE(dc, VIS1);
3628 gen_op_load_fpr_DT0(DFPREG(rs1));
3629 gen_op_load_fpr_DT1(DFPREG(rs2));
3630 gen_helper_fcmpne16();
3631 gen_op_store_DT0_fpr(DFPREG(rd));
3632 break;
3633 case 0x024: /* VIS I fcmple32 */
3634 CHECK_FPU_FEATURE(dc, VIS1);
3635 gen_op_load_fpr_DT0(DFPREG(rs1));
3636 gen_op_load_fpr_DT1(DFPREG(rs2));
3637 gen_helper_fcmple32();
3638 gen_op_store_DT0_fpr(DFPREG(rd));
3639 break;
3640 case 0x026: /* VIS I fcmpne32 */
3641 CHECK_FPU_FEATURE(dc, VIS1);
3642 gen_op_load_fpr_DT0(DFPREG(rs1));
3643 gen_op_load_fpr_DT1(DFPREG(rs2));
3644 gen_helper_fcmpne32();
3645 gen_op_store_DT0_fpr(DFPREG(rd));
3646 break;
3647 case 0x028: /* VIS I fcmpgt16 */
3648 CHECK_FPU_FEATURE(dc, VIS1);
3649 gen_op_load_fpr_DT0(DFPREG(rs1));
3650 gen_op_load_fpr_DT1(DFPREG(rs2));
3651 gen_helper_fcmpgt16();
3652 gen_op_store_DT0_fpr(DFPREG(rd));
3653 break;
3654 case 0x02a: /* VIS I fcmpeq16 */
3655 CHECK_FPU_FEATURE(dc, VIS1);
3656 gen_op_load_fpr_DT0(DFPREG(rs1));
3657 gen_op_load_fpr_DT1(DFPREG(rs2));
3658 gen_helper_fcmpeq16();
3659 gen_op_store_DT0_fpr(DFPREG(rd));
3660 break;
3661 case 0x02c: /* VIS I fcmpgt32 */
3662 CHECK_FPU_FEATURE(dc, VIS1);
3663 gen_op_load_fpr_DT0(DFPREG(rs1));
3664 gen_op_load_fpr_DT1(DFPREG(rs2));
3665 gen_helper_fcmpgt32();
3666 gen_op_store_DT0_fpr(DFPREG(rd));
3667 break;
3668 case 0x02e: /* VIS I fcmpeq32 */
3669 CHECK_FPU_FEATURE(dc, VIS1);
3670 gen_op_load_fpr_DT0(DFPREG(rs1));
3671 gen_op_load_fpr_DT1(DFPREG(rs2));
3672 gen_helper_fcmpeq32();
3673 gen_op_store_DT0_fpr(DFPREG(rd));
3674 break;
3675 case 0x031: /* VIS I fmul8x16 */
3676 CHECK_FPU_FEATURE(dc, VIS1);
3677 gen_op_load_fpr_DT0(DFPREG(rs1));
3678 gen_op_load_fpr_DT1(DFPREG(rs2));
3679 gen_helper_fmul8x16();
3680 gen_op_store_DT0_fpr(DFPREG(rd));
3681 break;
3682 case 0x033: /* VIS I fmul8x16au */
3683 CHECK_FPU_FEATURE(dc, VIS1);
3684 gen_op_load_fpr_DT0(DFPREG(rs1));
3685 gen_op_load_fpr_DT1(DFPREG(rs2));
3686 gen_helper_fmul8x16au();
3687 gen_op_store_DT0_fpr(DFPREG(rd));
3688 break;
3689 case 0x035: /* VIS I fmul8x16al */
3690 CHECK_FPU_FEATURE(dc, VIS1);
3691 gen_op_load_fpr_DT0(DFPREG(rs1));
3692 gen_op_load_fpr_DT1(DFPREG(rs2));
3693 gen_helper_fmul8x16al();
3694 gen_op_store_DT0_fpr(DFPREG(rd));
3695 break;
3696 case 0x036: /* VIS I fmul8sux16 */
3697 CHECK_FPU_FEATURE(dc, VIS1);
3698 gen_op_load_fpr_DT0(DFPREG(rs1));
3699 gen_op_load_fpr_DT1(DFPREG(rs2));
3700 gen_helper_fmul8sux16();
3701 gen_op_store_DT0_fpr(DFPREG(rd));
3702 break;
3703 case 0x037: /* VIS I fmul8ulx16 */
3704 CHECK_FPU_FEATURE(dc, VIS1);
3705 gen_op_load_fpr_DT0(DFPREG(rs1));
3706 gen_op_load_fpr_DT1(DFPREG(rs2));
3707 gen_helper_fmul8ulx16();
3708 gen_op_store_DT0_fpr(DFPREG(rd));
3709 break;
3710 case 0x038: /* VIS I fmuld8sux16 */
3711 CHECK_FPU_FEATURE(dc, VIS1);
3712 gen_op_load_fpr_DT0(DFPREG(rs1));
3713 gen_op_load_fpr_DT1(DFPREG(rs2));
3714 gen_helper_fmuld8sux16();
3715 gen_op_store_DT0_fpr(DFPREG(rd));
3716 break;
3717 case 0x039: /* VIS I fmuld8ulx16 */
3718 CHECK_FPU_FEATURE(dc, VIS1);
3719 gen_op_load_fpr_DT0(DFPREG(rs1));
3720 gen_op_load_fpr_DT1(DFPREG(rs2));
3721 gen_helper_fmuld8ulx16();
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x03a: /* VIS I fpack32 */
3725 case 0x03b: /* VIS I fpack16 */
3726 case 0x03d: /* VIS I fpackfix */
3727 case 0x03e: /* VIS I pdist */
3728 // XXX
3729 goto illegal_insn;
3730 case 0x048: /* VIS I faligndata */
3731 CHECK_FPU_FEATURE(dc, VIS1);
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 gen_helper_faligndata();
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3736 break;
3737 case 0x04b: /* VIS I fpmerge */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 gen_helper_fpmerge();
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x04c: /* VIS II bshuffle */
3745 // XXX
3746 goto illegal_insn;
3747 case 0x04d: /* VIS I fexpand */
3748 CHECK_FPU_FEATURE(dc, VIS1);
3749 gen_op_load_fpr_DT0(DFPREG(rs1));
3750 gen_op_load_fpr_DT1(DFPREG(rs2));
3751 gen_helper_fexpand();
3752 gen_op_store_DT0_fpr(DFPREG(rd));
3753 break;
3754 case 0x050: /* VIS I fpadd16 */
3755 CHECK_FPU_FEATURE(dc, VIS1);
3756 gen_op_load_fpr_DT0(DFPREG(rs1));
3757 gen_op_load_fpr_DT1(DFPREG(rs2));
3758 gen_helper_fpadd16();
3759 gen_op_store_DT0_fpr(DFPREG(rd));
3760 break;
3761 case 0x051: /* VIS I fpadd16s */
3762 CHECK_FPU_FEATURE(dc, VIS1);
3763 gen_helper_fpadd16s(cpu_fpr[rd],
3764 cpu_fpr[rs1], cpu_fpr[rs2]);
3765 break;
3766 case 0x052: /* VIS I fpadd32 */
3767 CHECK_FPU_FEATURE(dc, VIS1);
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3770 gen_helper_fpadd32();
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3772 break;
3773 case 0x053: /* VIS I fpadd32s */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_helper_fpadd32s(cpu_fpr[rd],
3776 cpu_fpr[rs1], cpu_fpr[rs2]);
3777 break;
3778 case 0x054: /* VIS I fpsub16 */
3779 CHECK_FPU_FEATURE(dc, VIS1);
3780 gen_op_load_fpr_DT0(DFPREG(rs1));
3781 gen_op_load_fpr_DT1(DFPREG(rs2));
3782 gen_helper_fpsub16();
3783 gen_op_store_DT0_fpr(DFPREG(rd));
3784 break;
3785 case 0x055: /* VIS I fpsub16s */
3786 CHECK_FPU_FEATURE(dc, VIS1);
3787 gen_helper_fpsub16s(cpu_fpr[rd],
3788 cpu_fpr[rs1], cpu_fpr[rs2]);
3789 break;
3790 case 0x056: /* VIS I fpsub32 */
3791 CHECK_FPU_FEATURE(dc, VIS1);
3792 gen_op_load_fpr_DT0(DFPREG(rs1));
3793 gen_op_load_fpr_DT1(DFPREG(rs2));
3794 gen_helper_fpsub32();
3795 gen_op_store_DT0_fpr(DFPREG(rd));
3796 break;
3797 case 0x057: /* VIS I fpsub32s */
3798 CHECK_FPU_FEATURE(dc, VIS1);
3799 gen_helper_fpsub32s(cpu_fpr[rd],
3800 cpu_fpr[rs1], cpu_fpr[rs2]);
3801 break;
3802 case 0x060: /* VIS I fzero */
3803 CHECK_FPU_FEATURE(dc, VIS1);
3804 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3805 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3806 break;
3807 case 0x061: /* VIS I fzeros */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3810 break;
3811 case 0x062: /* VIS I fnor */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3814 cpu_fpr[DFPREG(rs2)]);
3815 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3816 cpu_fpr[DFPREG(rs2) + 1]);
3817 break;
3818 case 0x063: /* VIS I fnors */
3819 CHECK_FPU_FEATURE(dc, VIS1);
3820 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3821 break;
3822 case 0x064: /* VIS I fandnot2 */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3825 cpu_fpr[DFPREG(rs2)]);
3826 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3827 cpu_fpr[DFPREG(rs1) + 1],
3828 cpu_fpr[DFPREG(rs2) + 1]);
3829 break;
3830 case 0x065: /* VIS I fandnot2s */
3831 CHECK_FPU_FEATURE(dc, VIS1);
3832 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3833 break;
3834 case 0x066: /* VIS I fnot2 */
3835 CHECK_FPU_FEATURE(dc, VIS1);
3836 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3837 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3838 cpu_fpr[DFPREG(rs2) + 1]);
3839 break;
3840 case 0x067: /* VIS I fnot2s */
3841 CHECK_FPU_FEATURE(dc, VIS1);
3842 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3843 break;
3844 case 0x068: /* VIS I fandnot1 */
3845 CHECK_FPU_FEATURE(dc, VIS1);
3846 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3847 cpu_fpr[DFPREG(rs1)]);
3848 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3849 cpu_fpr[DFPREG(rs2) + 1],
3850 cpu_fpr[DFPREG(rs1) + 1]);
3851 break;
3852 case 0x069: /* VIS I fandnot1s */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3855 break;
3856 case 0x06a: /* VIS I fnot1 */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3859 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3860 cpu_fpr[DFPREG(rs1) + 1]);
3861 break;
3862 case 0x06b: /* VIS I fnot1s */
3863 CHECK_FPU_FEATURE(dc, VIS1);
3864 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3865 break;
3866 case 0x06c: /* VIS I fxor */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3869 cpu_fpr[DFPREG(rs2)]);
3870 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3871 cpu_fpr[DFPREG(rs1) + 1],
3872 cpu_fpr[DFPREG(rs2) + 1]);
3873 break;
3874 case 0x06d: /* VIS I fxors */
3875 CHECK_FPU_FEATURE(dc, VIS1);
3876 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3877 break;
3878 case 0x06e: /* VIS I fnand */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3881 cpu_fpr[DFPREG(rs2)]);
3882 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3883 cpu_fpr[DFPREG(rs2) + 1]);
3884 break;
3885 case 0x06f: /* VIS I fnands */
3886 CHECK_FPU_FEATURE(dc, VIS1);
3887 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3888 break;
3889 case 0x070: /* VIS I fand */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3892 cpu_fpr[DFPREG(rs2)]);
3893 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3894 cpu_fpr[DFPREG(rs1) + 1],
3895 cpu_fpr[DFPREG(rs2) + 1]);
3896 break;
3897 case 0x071: /* VIS I fands */
3898 CHECK_FPU_FEATURE(dc, VIS1);
3899 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3900 break;
3901 case 0x072: /* VIS I fxnor */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3904 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3905 cpu_fpr[DFPREG(rs1)]);
3906 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3907 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3908 cpu_fpr[DFPREG(rs1) + 1]);
3909 break;
3910 case 0x073: /* VIS I fxnors */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3913 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3914 break;
3915 case 0x074: /* VIS I fsrc1 */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3918 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3919 cpu_fpr[DFPREG(rs1) + 1]);
3920 break;
3921 case 0x075: /* VIS I fsrc1s */
3922 CHECK_FPU_FEATURE(dc, VIS1);
3923 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3924 break;
3925 case 0x076: /* VIS I fornot2 */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3928 cpu_fpr[DFPREG(rs2)]);
3929 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3930 cpu_fpr[DFPREG(rs1) + 1],
3931 cpu_fpr[DFPREG(rs2) + 1]);
3932 break;
3933 case 0x077: /* VIS I fornot2s */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3936 break;
3937 case 0x078: /* VIS I fsrc2 */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 gen_op_load_fpr_DT0(DFPREG(rs2));
3940 gen_op_store_DT0_fpr(DFPREG(rd));
3941 break;
3942 case 0x079: /* VIS I fsrc2s */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3945 break;
3946 case 0x07a: /* VIS I fornot1 */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3949 cpu_fpr[DFPREG(rs1)]);
3950 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3951 cpu_fpr[DFPREG(rs2) + 1],
3952 cpu_fpr[DFPREG(rs1) + 1]);
3953 break;
3954 case 0x07b: /* VIS I fornot1s */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3957 break;
3958 case 0x07c: /* VIS I for */
3959 CHECK_FPU_FEATURE(dc, VIS1);
3960 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3961 cpu_fpr[DFPREG(rs2)]);
3962 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3963 cpu_fpr[DFPREG(rs1) + 1],
3964 cpu_fpr[DFPREG(rs2) + 1]);
3965 break;
3966 case 0x07d: /* VIS I fors */
3967 CHECK_FPU_FEATURE(dc, VIS1);
3968 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3969 break;
3970 case 0x07e: /* VIS I fone */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3973 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3974 break;
3975 case 0x07f: /* VIS I fones */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3978 break;
3979 case 0x080: /* VIS I shutdown */
3980 case 0x081: /* VIS II siam */
3981 // XXX
3982 goto illegal_insn;
3983 default:
3984 goto illegal_insn;
3986 #else
3987 goto ncp_insn;
3988 #endif
3989 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3990 #ifdef TARGET_SPARC64
3991 goto illegal_insn;
3992 #else
3993 goto ncp_insn;
3994 #endif
3995 #ifdef TARGET_SPARC64
3996 } else if (xop == 0x39) { /* V9 return */
3997 TCGv_i32 r_const;
3999 save_state(dc, cpu_cond);
4000 cpu_src1 = get_src1(insn, cpu_src1);
4001 if (IS_IMM) { /* immediate */
4002 simm = GET_FIELDs(insn, 19, 31);
4003 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4004 } else { /* register */
4005 rs2 = GET_FIELD(insn, 27, 31);
4006 if (rs2) {
4007 gen_movl_reg_TN(rs2, cpu_src2);
4008 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4009 } else
4010 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4012 gen_helper_restore();
4013 gen_mov_pc_npc(dc, cpu_cond);
4014 r_const = tcg_const_i32(3);
4015 gen_helper_check_align(cpu_dst, r_const);
4016 tcg_temp_free_i32(r_const);
4017 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4018 dc->npc = DYNAMIC_PC;
4019 goto jmp_insn;
4020 #endif
4021 } else {
4022 cpu_src1 = get_src1(insn, cpu_src1);
4023 if (IS_IMM) { /* immediate */
4024 simm = GET_FIELDs(insn, 19, 31);
4025 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4026 } else { /* register */
4027 rs2 = GET_FIELD(insn, 27, 31);
4028 if (rs2) {
4029 gen_movl_reg_TN(rs2, cpu_src2);
4030 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4031 } else
4032 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4034 switch (xop) {
4035 case 0x38: /* jmpl */
4037 TCGv r_pc;
4038 TCGv_i32 r_const;
4040 r_pc = tcg_const_tl(dc->pc);
4041 gen_movl_TN_reg(rd, r_pc);
4042 tcg_temp_free(r_pc);
4043 gen_mov_pc_npc(dc, cpu_cond);
4044 r_const = tcg_const_i32(3);
4045 gen_helper_check_align(cpu_dst, r_const);
4046 tcg_temp_free_i32(r_const);
4047 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4048 dc->npc = DYNAMIC_PC;
4050 goto jmp_insn;
4051 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4052 case 0x39: /* rett, V9 return */
4054 TCGv_i32 r_const;
4056 if (!supervisor(dc))
4057 goto priv_insn;
4058 gen_mov_pc_npc(dc, cpu_cond);
4059 r_const = tcg_const_i32(3);
4060 gen_helper_check_align(cpu_dst, r_const);
4061 tcg_temp_free_i32(r_const);
4062 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4063 dc->npc = DYNAMIC_PC;
4064 gen_helper_rett();
4066 goto jmp_insn;
4067 #endif
4068 case 0x3b: /* flush */
4069 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4070 goto unimp_flush;
4071 gen_helper_flush(cpu_dst);
4072 break;
4073 case 0x3c: /* save */
4074 save_state(dc, cpu_cond);
4075 gen_helper_save();
4076 gen_movl_TN_reg(rd, cpu_dst);
4077 break;
4078 case 0x3d: /* restore */
4079 save_state(dc, cpu_cond);
4080 gen_helper_restore();
4081 gen_movl_TN_reg(rd, cpu_dst);
4082 break;
4083 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4084 case 0x3e: /* V9 done/retry */
4086 switch (rd) {
4087 case 0:
4088 if (!supervisor(dc))
4089 goto priv_insn;
4090 dc->npc = DYNAMIC_PC;
4091 dc->pc = DYNAMIC_PC;
4092 gen_helper_done();
4093 goto jmp_insn;
4094 case 1:
4095 if (!supervisor(dc))
4096 goto priv_insn;
4097 dc->npc = DYNAMIC_PC;
4098 dc->pc = DYNAMIC_PC;
4099 gen_helper_retry();
4100 goto jmp_insn;
4101 default:
4102 goto illegal_insn;
4105 break;
4106 #endif
4107 default:
4108 goto illegal_insn;
4111 break;
4113 break;
4114 case 3: /* load/store instructions */
4116 unsigned int xop = GET_FIELD(insn, 7, 12);
4118 /* flush pending conditional evaluations before exposing
4119 cpu state */
4120 if (dc->cc_op != CC_OP_FLAGS) {
4121 dc->cc_op = CC_OP_FLAGS;
4122 gen_helper_compute_psr();
4124 cpu_src1 = get_src1(insn, cpu_src1);
4125 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4126 rs2 = GET_FIELD(insn, 27, 31);
4127 gen_movl_reg_TN(rs2, cpu_src2);
4128 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4129 } else if (IS_IMM) { /* immediate */
4130 simm = GET_FIELDs(insn, 19, 31);
4131 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4132 } else { /* register */
4133 rs2 = GET_FIELD(insn, 27, 31);
4134 if (rs2 != 0) {
4135 gen_movl_reg_TN(rs2, cpu_src2);
4136 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4137 } else
4138 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4140 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4141 (xop > 0x17 && xop <= 0x1d ) ||
4142 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4143 switch (xop) {
4144 case 0x0: /* ld, V9 lduw, load unsigned word */
4145 gen_address_mask(dc, cpu_addr);
4146 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4147 break;
4148 case 0x1: /* ldub, load unsigned byte */
4149 gen_address_mask(dc, cpu_addr);
4150 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4151 break;
4152 case 0x2: /* lduh, load unsigned halfword */
4153 gen_address_mask(dc, cpu_addr);
4154 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4155 break;
4156 case 0x3: /* ldd, load double word */
4157 if (rd & 1)
4158 goto illegal_insn;
4159 else {
4160 TCGv_i32 r_const;
4162 save_state(dc, cpu_cond);
4163 r_const = tcg_const_i32(7);
4164 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4165 tcg_temp_free_i32(r_const);
4166 gen_address_mask(dc, cpu_addr);
4167 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4168 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4169 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4170 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4171 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4172 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4173 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4175 break;
4176 case 0x9: /* ldsb, load signed byte */
4177 gen_address_mask(dc, cpu_addr);
4178 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4179 break;
4180 case 0xa: /* ldsh, load signed halfword */
4181 gen_address_mask(dc, cpu_addr);
4182 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4183 break;
4184 case 0xd: /* ldstub -- XXX: should be atomically */
4186 TCGv r_const;
4188 gen_address_mask(dc, cpu_addr);
4189 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4190 r_const = tcg_const_tl(0xff);
4191 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4192 tcg_temp_free(r_const);
4194 break;
4195 case 0x0f: /* swap, swap register with memory. Also
4196 atomically */
4197 CHECK_IU_FEATURE(dc, SWAP);
4198 gen_movl_reg_TN(rd, cpu_val);
4199 gen_address_mask(dc, cpu_addr);
4200 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4201 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4202 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4203 break;
4204 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4205 case 0x10: /* lda, V9 lduwa, load word alternate */
4206 #ifndef TARGET_SPARC64
4207 if (IS_IMM)
4208 goto illegal_insn;
4209 if (!supervisor(dc))
4210 goto priv_insn;
4211 #endif
4212 save_state(dc, cpu_cond);
4213 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4214 break;
4215 case 0x11: /* lduba, load unsigned byte alternate */
4216 #ifndef TARGET_SPARC64
4217 if (IS_IMM)
4218 goto illegal_insn;
4219 if (!supervisor(dc))
4220 goto priv_insn;
4221 #endif
4222 save_state(dc, cpu_cond);
4223 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4224 break;
4225 case 0x12: /* lduha, load unsigned halfword alternate */
4226 #ifndef TARGET_SPARC64
4227 if (IS_IMM)
4228 goto illegal_insn;
4229 if (!supervisor(dc))
4230 goto priv_insn;
4231 #endif
4232 save_state(dc, cpu_cond);
4233 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4234 break;
4235 case 0x13: /* ldda, load double word alternate */
4236 #ifndef TARGET_SPARC64
4237 if (IS_IMM)
4238 goto illegal_insn;
4239 if (!supervisor(dc))
4240 goto priv_insn;
4241 #endif
4242 if (rd & 1)
4243 goto illegal_insn;
4244 save_state(dc, cpu_cond);
4245 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4246 goto skip_move;
4247 case 0x19: /* ldsba, load signed byte alternate */
4248 #ifndef TARGET_SPARC64
4249 if (IS_IMM)
4250 goto illegal_insn;
4251 if (!supervisor(dc))
4252 goto priv_insn;
4253 #endif
4254 save_state(dc, cpu_cond);
4255 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4256 break;
4257 case 0x1a: /* ldsha, load signed halfword alternate */
4258 #ifndef TARGET_SPARC64
4259 if (IS_IMM)
4260 goto illegal_insn;
4261 if (!supervisor(dc))
4262 goto priv_insn;
4263 #endif
4264 save_state(dc, cpu_cond);
4265 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4266 break;
4267 case 0x1d: /* ldstuba -- XXX: should be atomically */
4268 #ifndef TARGET_SPARC64
4269 if (IS_IMM)
4270 goto illegal_insn;
4271 if (!supervisor(dc))
4272 goto priv_insn;
4273 #endif
4274 save_state(dc, cpu_cond);
4275 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4276 break;
4277 case 0x1f: /* swapa, swap reg with alt. memory. Also
4278 atomically */
4279 CHECK_IU_FEATURE(dc, SWAP);
4280 #ifndef TARGET_SPARC64
4281 if (IS_IMM)
4282 goto illegal_insn;
4283 if (!supervisor(dc))
4284 goto priv_insn;
4285 #endif
4286 save_state(dc, cpu_cond);
4287 gen_movl_reg_TN(rd, cpu_val);
4288 gen_swap_asi(cpu_val, cpu_addr, insn);
4289 break;
4291 #ifndef TARGET_SPARC64
4292 case 0x30: /* ldc */
4293 case 0x31: /* ldcsr */
4294 case 0x33: /* lddc */
4295 goto ncp_insn;
4296 #endif
4297 #endif
4298 #ifdef TARGET_SPARC64
4299 case 0x08: /* V9 ldsw */
4300 gen_address_mask(dc, cpu_addr);
4301 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4302 break;
4303 case 0x0b: /* V9 ldx */
4304 gen_address_mask(dc, cpu_addr);
4305 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4306 break;
4307 case 0x18: /* V9 ldswa */
4308 save_state(dc, cpu_cond);
4309 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4310 break;
4311 case 0x1b: /* V9 ldxa */
4312 save_state(dc, cpu_cond);
4313 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4314 break;
4315 case 0x2d: /* V9 prefetch, no effect */
4316 goto skip_move;
4317 case 0x30: /* V9 ldfa */
4318 save_state(dc, cpu_cond);
4319 gen_ldf_asi(cpu_addr, insn, 4, rd);
4320 goto skip_move;
4321 case 0x33: /* V9 lddfa */
4322 save_state(dc, cpu_cond);
4323 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4324 goto skip_move;
4325 case 0x3d: /* V9 prefetcha, no effect */
4326 goto skip_move;
4327 case 0x32: /* V9 ldqfa */
4328 CHECK_FPU_FEATURE(dc, FLOAT128);
4329 save_state(dc, cpu_cond);
4330 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4331 goto skip_move;
4332 #endif
4333 default:
4334 goto illegal_insn;
4336 gen_movl_TN_reg(rd, cpu_val);
4337 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4338 skip_move: ;
4339 #endif
4340 } else if (xop >= 0x20 && xop < 0x24) {
4341 if (gen_trap_ifnofpu(dc, cpu_cond))
4342 goto jmp_insn;
4343 save_state(dc, cpu_cond);
4344 switch (xop) {
4345 case 0x20: /* ldf, load fpreg */
4346 gen_address_mask(dc, cpu_addr);
4347 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4348 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4349 break;
4350 case 0x21: /* ldfsr, V9 ldxfsr */
4351 #ifdef TARGET_SPARC64
4352 gen_address_mask(dc, cpu_addr);
4353 if (rd == 1) {
4354 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4355 gen_helper_ldxfsr(cpu_tmp64);
4356 } else
4357 #else
4359 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4360 gen_helper_ldfsr(cpu_tmp32);
4362 #endif
4363 break;
4364 case 0x22: /* ldqf, load quad fpreg */
4366 TCGv_i32 r_const;
4368 CHECK_FPU_FEATURE(dc, FLOAT128);
4369 r_const = tcg_const_i32(dc->mem_idx);
4370 gen_helper_ldqf(cpu_addr, r_const);
4371 tcg_temp_free_i32(r_const);
4372 gen_op_store_QT0_fpr(QFPREG(rd));
4374 break;
4375 case 0x23: /* lddf, load double fpreg */
4377 TCGv_i32 r_const;
4379 r_const = tcg_const_i32(dc->mem_idx);
4380 gen_helper_lddf(cpu_addr, r_const);
4381 tcg_temp_free_i32(r_const);
4382 gen_op_store_DT0_fpr(DFPREG(rd));
4384 break;
4385 default:
4386 goto illegal_insn;
4388 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4389 xop == 0xe || xop == 0x1e) {
4390 gen_movl_reg_TN(rd, cpu_val);
4391 switch (xop) {
4392 case 0x4: /* st, store word */
4393 gen_address_mask(dc, cpu_addr);
4394 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4395 break;
4396 case 0x5: /* stb, store byte */
4397 gen_address_mask(dc, cpu_addr);
4398 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4399 break;
4400 case 0x6: /* sth, store halfword */
4401 gen_address_mask(dc, cpu_addr);
4402 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4403 break;
4404 case 0x7: /* std, store double word */
4405 if (rd & 1)
4406 goto illegal_insn;
4407 else {
4408 TCGv_i32 r_const;
4410 save_state(dc, cpu_cond);
4411 gen_address_mask(dc, cpu_addr);
4412 r_const = tcg_const_i32(7);
4413 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4414 tcg_temp_free_i32(r_const);
4415 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4416 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4417 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4419 break;
4420 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4421 case 0x14: /* sta, V9 stwa, store word alternate */
4422 #ifndef TARGET_SPARC64
4423 if (IS_IMM)
4424 goto illegal_insn;
4425 if (!supervisor(dc))
4426 goto priv_insn;
4427 #endif
4428 save_state(dc, cpu_cond);
4429 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4430 break;
4431 case 0x15: /* stba, store byte alternate */
4432 #ifndef TARGET_SPARC64
4433 if (IS_IMM)
4434 goto illegal_insn;
4435 if (!supervisor(dc))
4436 goto priv_insn;
4437 #endif
4438 save_state(dc, cpu_cond);
4439 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4440 break;
4441 case 0x16: /* stha, store halfword alternate */
4442 #ifndef TARGET_SPARC64
4443 if (IS_IMM)
4444 goto illegal_insn;
4445 if (!supervisor(dc))
4446 goto priv_insn;
4447 #endif
4448 save_state(dc, cpu_cond);
4449 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4450 break;
4451 case 0x17: /* stda, store double word alternate */
4452 #ifndef TARGET_SPARC64
4453 if (IS_IMM)
4454 goto illegal_insn;
4455 if (!supervisor(dc))
4456 goto priv_insn;
4457 #endif
4458 if (rd & 1)
4459 goto illegal_insn;
4460 else {
4461 save_state(dc, cpu_cond);
4462 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4464 break;
4465 #endif
4466 #ifdef TARGET_SPARC64
4467 case 0x0e: /* V9 stx */
4468 gen_address_mask(dc, cpu_addr);
4469 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4470 break;
4471 case 0x1e: /* V9 stxa */
4472 save_state(dc, cpu_cond);
4473 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4474 break;
4475 #endif
4476 default:
4477 goto illegal_insn;
4479 } else if (xop > 0x23 && xop < 0x28) {
4480 if (gen_trap_ifnofpu(dc, cpu_cond))
4481 goto jmp_insn;
4482 save_state(dc, cpu_cond);
4483 switch (xop) {
4484 case 0x24: /* stf, store fpreg */
4485 gen_address_mask(dc, cpu_addr);
4486 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4487 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4488 break;
4489 case 0x25: /* stfsr, V9 stxfsr */
4490 #ifdef TARGET_SPARC64
4491 gen_address_mask(dc, cpu_addr);
4492 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4493 if (rd == 1)
4494 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4495 else
4496 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4497 #else
4498 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4499 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4500 #endif
4501 break;
4502 case 0x26:
4503 #ifdef TARGET_SPARC64
4504 /* V9 stqf, store quad fpreg */
4506 TCGv_i32 r_const;
4508 CHECK_FPU_FEATURE(dc, FLOAT128);
4509 gen_op_load_fpr_QT0(QFPREG(rd));
4510 r_const = tcg_const_i32(dc->mem_idx);
4511 gen_helper_stqf(cpu_addr, r_const);
4512 tcg_temp_free_i32(r_const);
4514 break;
4515 #else /* !TARGET_SPARC64 */
4516 /* stdfq, store floating point queue */
4517 #if defined(CONFIG_USER_ONLY)
4518 goto illegal_insn;
4519 #else
4520 if (!supervisor(dc))
4521 goto priv_insn;
4522 if (gen_trap_ifnofpu(dc, cpu_cond))
4523 goto jmp_insn;
4524 goto nfq_insn;
4525 #endif
4526 #endif
4527 case 0x27: /* stdf, store double fpreg */
4529 TCGv_i32 r_const;
4531 gen_op_load_fpr_DT0(DFPREG(rd));
4532 r_const = tcg_const_i32(dc->mem_idx);
4533 gen_helper_stdf(cpu_addr, r_const);
4534 tcg_temp_free_i32(r_const);
4536 break;
4537 default:
4538 goto illegal_insn;
4540 } else if (xop > 0x33 && xop < 0x3f) {
4541 save_state(dc, cpu_cond);
4542 switch (xop) {
4543 #ifdef TARGET_SPARC64
4544 case 0x34: /* V9 stfa */
4545 gen_stf_asi(cpu_addr, insn, 4, rd);
4546 break;
4547 case 0x36: /* V9 stqfa */
4549 TCGv_i32 r_const;
4551 CHECK_FPU_FEATURE(dc, FLOAT128);
4552 r_const = tcg_const_i32(7);
4553 gen_helper_check_align(cpu_addr, r_const);
4554 tcg_temp_free_i32(r_const);
4555 gen_op_load_fpr_QT0(QFPREG(rd));
4556 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4558 break;
4559 case 0x37: /* V9 stdfa */
4560 gen_op_load_fpr_DT0(DFPREG(rd));
4561 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4562 break;
4563 case 0x3c: /* V9 casa */
4564 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4565 gen_movl_TN_reg(rd, cpu_val);
4566 break;
4567 case 0x3e: /* V9 casxa */
4568 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4569 gen_movl_TN_reg(rd, cpu_val);
4570 break;
4571 #else
4572 case 0x34: /* stc */
4573 case 0x35: /* stcsr */
4574 case 0x36: /* stdcq */
4575 case 0x37: /* stdc */
4576 goto ncp_insn;
4577 #endif
4578 default:
4579 goto illegal_insn;
4581 } else
4582 goto illegal_insn;
4584 break;
4586 /* default case for non jump instructions */
4587 if (dc->npc == DYNAMIC_PC) {
4588 dc->pc = DYNAMIC_PC;
4589 gen_op_next_insn();
4590 } else if (dc->npc == JUMP_PC) {
4591 /* we can do a static jump */
4592 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4593 dc->is_br = 1;
4594 } else {
4595 dc->pc = dc->npc;
4596 dc->npc = dc->npc + 4;
4598 jmp_insn:
4599 return;
4600 illegal_insn:
4602 TCGv_i32 r_const;
4604 save_state(dc, cpu_cond);
4605 r_const = tcg_const_i32(TT_ILL_INSN);
4606 gen_helper_raise_exception(r_const);
4607 tcg_temp_free_i32(r_const);
4608 dc->is_br = 1;
4610 return;
4611 unimp_flush:
4613 TCGv_i32 r_const;
4615 save_state(dc, cpu_cond);
4616 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4617 gen_helper_raise_exception(r_const);
4618 tcg_temp_free_i32(r_const);
4619 dc->is_br = 1;
4621 return;
4622 #if !defined(CONFIG_USER_ONLY)
4623 priv_insn:
4625 TCGv_i32 r_const;
4627 save_state(dc, cpu_cond);
4628 r_const = tcg_const_i32(TT_PRIV_INSN);
4629 gen_helper_raise_exception(r_const);
4630 tcg_temp_free_i32(r_const);
4631 dc->is_br = 1;
4633 return;
4634 #endif
4635 nfpu_insn:
4636 save_state(dc, cpu_cond);
4637 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4638 dc->is_br = 1;
4639 return;
4640 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4641 nfq_insn:
4642 save_state(dc, cpu_cond);
4643 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4644 dc->is_br = 1;
4645 return;
4646 #endif
4647 #ifndef TARGET_SPARC64
4648 ncp_insn:
4650 TCGv r_const;
4652 save_state(dc, cpu_cond);
4653 r_const = tcg_const_i32(TT_NCP_INSN);
4654 gen_helper_raise_exception(r_const);
4655 tcg_temp_free(r_const);
4656 dc->is_br = 1;
4658 return;
4659 #endif
4662 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4663 int spc, CPUSPARCState *env)
4665 target_ulong pc_start, last_pc;
4666 uint16_t *gen_opc_end;
4667 DisasContext dc1, *dc = &dc1;
4668 CPUBreakpoint *bp;
4669 int j, lj = -1;
4670 int num_insns;
4671 int max_insns;
4673 memset(dc, 0, sizeof(DisasContext));
4674 dc->tb = tb;
4675 pc_start = tb->pc;
4676 dc->pc = pc_start;
4677 last_pc = dc->pc;
4678 dc->npc = (target_ulong) tb->cs_base;
4679 dc->cc_op = CC_OP_DYNAMIC;
4680 dc->mem_idx = cpu_mmu_index(env);
4681 dc->def = env->def;
4682 if ((dc->def->features & CPU_FEATURE_FLOAT))
4683 dc->fpu_enabled = cpu_fpu_enabled(env);
4684 else
4685 dc->fpu_enabled = 0;
4686 #ifdef TARGET_SPARC64
4687 dc->address_mask_32bit = env->pstate & PS_AM;
4688 #endif
4689 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4691 cpu_tmp0 = tcg_temp_new();
4692 cpu_tmp32 = tcg_temp_new_i32();
4693 cpu_tmp64 = tcg_temp_new_i64();
4695 cpu_dst = tcg_temp_local_new();
4697 // loads and stores
4698 cpu_val = tcg_temp_local_new();
4699 cpu_addr = tcg_temp_local_new();
4701 num_insns = 0;
4702 max_insns = tb->cflags & CF_COUNT_MASK;
4703 if (max_insns == 0)
4704 max_insns = CF_COUNT_MASK;
4705 gen_icount_start();
4706 do {
4707 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4708 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4709 if (bp->pc == dc->pc) {
4710 if (dc->pc != pc_start)
4711 save_state(dc, cpu_cond);
4712 gen_helper_debug();
4713 tcg_gen_exit_tb(0);
4714 dc->is_br = 1;
4715 goto exit_gen_loop;
4719 if (spc) {
4720 qemu_log("Search PC...\n");
4721 j = gen_opc_ptr - gen_opc_buf;
4722 if (lj < j) {
4723 lj++;
4724 while (lj < j)
4725 gen_opc_instr_start[lj++] = 0;
4726 gen_opc_pc[lj] = dc->pc;
4727 gen_opc_npc[lj] = dc->npc;
4728 gen_opc_instr_start[lj] = 1;
4729 gen_opc_icount[lj] = num_insns;
4732 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4733 gen_io_start();
4734 last_pc = dc->pc;
4735 disas_sparc_insn(dc);
4736 num_insns++;
4738 if (dc->is_br)
4739 break;
4740 /* if the next PC is different, we abort now */
4741 if (dc->pc != (last_pc + 4))
4742 break;
4743 /* if we reach a page boundary, we stop generation so that the
4744 PC of a TT_TFAULT exception is always in the right page */
4745 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4746 break;
4747 /* if single step mode, we generate only one instruction and
4748 generate an exception */
4749 if (env->singlestep_enabled || singlestep) {
4750 tcg_gen_movi_tl(cpu_pc, dc->pc);
4751 tcg_gen_exit_tb(0);
4752 break;
4754 } while ((gen_opc_ptr < gen_opc_end) &&
4755 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4756 num_insns < max_insns);
4758 exit_gen_loop:
4759 tcg_temp_free(cpu_addr);
4760 tcg_temp_free(cpu_val);
4761 tcg_temp_free(cpu_dst);
4762 tcg_temp_free_i64(cpu_tmp64);
4763 tcg_temp_free_i32(cpu_tmp32);
4764 tcg_temp_free(cpu_tmp0);
4765 if (tb->cflags & CF_LAST_IO)
4766 gen_io_end();
4767 if (!dc->is_br) {
4768 if (dc->pc != DYNAMIC_PC &&
4769 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4770 /* static PC and NPC: we can use direct chaining */
4771 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4772 } else {
4773 if (dc->pc != DYNAMIC_PC)
4774 tcg_gen_movi_tl(cpu_pc, dc->pc);
4775 save_npc(dc, cpu_cond);
4776 tcg_gen_exit_tb(0);
4779 gen_icount_end(tb, num_insns);
4780 *gen_opc_ptr = INDEX_op_end;
4781 if (spc) {
4782 j = gen_opc_ptr - gen_opc_buf;
4783 lj++;
4784 while (lj <= j)
4785 gen_opc_instr_start[lj++] = 0;
4786 #if 0
4787 log_page_dump();
4788 #endif
4789 gen_opc_jump_pc[0] = dc->jump_pc[0];
4790 gen_opc_jump_pc[1] = dc->jump_pc[1];
4791 } else {
4792 tb->size = last_pc + 4 - pc_start;
4793 tb->icount = num_insns;
4795 #ifdef DEBUG_DISAS
4796 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4797 qemu_log("--------------\n");
4798 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4799 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4800 qemu_log("\n");
4802 #endif
4805 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4807 gen_intermediate_code_internal(tb, 0, env);
4810 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4812 gen_intermediate_code_internal(tb, 1, env);
4815 void gen_intermediate_code_init(CPUSPARCState *env)
4817 unsigned int i;
4818 static int inited;
4819 static const char * const gregnames[8] = {
4820 NULL, // g0 not used
4821 "g1",
4822 "g2",
4823 "g3",
4824 "g4",
4825 "g5",
4826 "g6",
4827 "g7",
4829 static const char * const fregnames[64] = {
4830 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4831 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4832 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4833 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4834 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4835 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4836 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4837 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4840 /* init various static tables */
4841 if (!inited) {
4842 inited = 1;
4844 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4845 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4846 offsetof(CPUState, regwptr),
4847 "regwptr");
4848 #ifdef TARGET_SPARC64
4849 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4850 "xcc");
4851 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4852 "asi");
4853 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4854 "fprs");
4855 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4856 "gsr");
4857 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4858 offsetof(CPUState, tick_cmpr),
4859 "tick_cmpr");
4860 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4861 offsetof(CPUState, stick_cmpr),
4862 "stick_cmpr");
4863 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4864 offsetof(CPUState, hstick_cmpr),
4865 "hstick_cmpr");
4866 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4867 "hintp");
4868 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4869 "htba");
4870 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4871 "hver");
4872 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4873 offsetof(CPUState, ssr), "ssr");
4874 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4875 offsetof(CPUState, version), "ver");
4876 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4877 offsetof(CPUState, softint),
4878 "softint");
4879 #else
4880 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4881 "wim");
4882 #endif
4883 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4884 "cond");
4885 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4886 "cc_src");
4887 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4888 offsetof(CPUState, cc_src2),
4889 "cc_src2");
4890 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4891 "cc_dst");
4892 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4893 "cc_op");
4894 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4895 "psr");
4896 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4897 "fsr");
4898 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4899 "pc");
4900 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4901 "npc");
4902 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4903 #ifndef CONFIG_USER_ONLY
4904 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4905 "tbr");
4906 #endif
4907 for (i = 1; i < 8; i++)
4908 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4909 offsetof(CPUState, gregs[i]),
4910 gregnames[i]);
4911 for (i = 0; i < TARGET_FPREGS; i++)
4912 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4913 offsetof(CPUState, fpr[i]),
4914 fregnames[i]);
4916 /* register helpers */
4918 #define GEN_HELPER 2
4919 #include "helper.h"
4923 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4924 unsigned long searched_pc, int pc_pos, void *puc)
4926 target_ulong npc;
4927 env->pc = gen_opc_pc[pc_pos];
4928 npc = gen_opc_npc[pc_pos];
4929 if (npc == 1) {
4930 /* dynamic NPC: already stored */
4931 } else if (npc == 2) {
4932 target_ulong t2 = (target_ulong)(unsigned long)puc;
4933 /* jump PC: use T2 and the jump targets of the translation */
4934 if (t2)
4935 env->npc = gen_opc_jump_pc[0];
4936 else
4937 env->npc = gen_opc_jump_pc[1];
4938 } else {
4939 env->npc = npc;
4942 /* flush pending conditional evaluations before exposing cpu state */
4943 if (CC_OP != CC_OP_FLAGS) {
4944 helper_compute_psr();