Sparc64: replace tsptr with helper routine
[qemu/aliguori-queue.git] / target-sparc / translate.c
blob5ef543a1e9c5104fa4489375a475bba30b557be9
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
279 TCGv r_temp;
280 TCGv_i32 r_const;
281 int l1;
283 l1 = gen_new_label();
285 r_temp = tcg_temp_new();
286 tcg_gen_xor_tl(r_temp, src1, src2);
287 tcg_gen_not_tl(r_temp, r_temp);
288 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
289 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
290 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
291 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
292 r_const = tcg_const_i32(TT_TOVF);
293 gen_helper_raise_exception(r_const);
294 tcg_temp_free_i32(r_const);
295 gen_set_label(l1);
296 tcg_temp_free(r_temp);
299 static inline void gen_tag_tv(TCGv src1, TCGv src2)
301 int l1;
302 TCGv_i32 r_const;
304 l1 = gen_new_label();
305 tcg_gen_or_tl(cpu_tmp0, src1, src2);
306 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
307 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
308 r_const = tcg_const_i32(TT_TOVF);
309 gen_helper_raise_exception(r_const);
310 tcg_temp_free_i32(r_const);
311 gen_set_label(l1);
314 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
316 tcg_gen_mov_tl(cpu_cc_src, src1);
317 tcg_gen_movi_tl(cpu_cc_src2, src2);
318 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
319 tcg_gen_mov_tl(dst, cpu_cc_dst);
322 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
324 tcg_gen_mov_tl(cpu_cc_src, src1);
325 tcg_gen_mov_tl(cpu_cc_src2, src2);
326 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
327 tcg_gen_mov_tl(dst, cpu_cc_dst);
330 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
332 tcg_gen_mov_tl(cpu_cc_src, src1);
333 tcg_gen_movi_tl(cpu_cc_src2, src2);
334 gen_mov_reg_C(cpu_tmp0, cpu_psr);
335 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
336 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
337 tcg_gen_mov_tl(dst, cpu_cc_dst);
340 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
342 tcg_gen_mov_tl(cpu_cc_src, src1);
343 tcg_gen_mov_tl(cpu_cc_src2, src2);
344 gen_mov_reg_C(cpu_tmp0, cpu_psr);
345 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
346 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
347 tcg_gen_mov_tl(dst, cpu_cc_dst);
350 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
352 tcg_gen_mov_tl(cpu_cc_src, src1);
353 tcg_gen_mov_tl(cpu_cc_src2, src2);
354 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
355 tcg_gen_mov_tl(dst, cpu_cc_dst);
358 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
360 tcg_gen_mov_tl(cpu_cc_src, src1);
361 tcg_gen_mov_tl(cpu_cc_src2, src2);
362 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
363 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
364 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
365 tcg_gen_mov_tl(dst, cpu_cc_dst);
368 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
370 TCGv r_temp;
371 TCGv_i32 r_const;
372 int l1;
374 l1 = gen_new_label();
376 r_temp = tcg_temp_new();
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
379 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
380 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
381 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
382 r_const = tcg_const_i32(TT_TOVF);
383 gen_helper_raise_exception(r_const);
384 tcg_temp_free_i32(r_const);
385 gen_set_label(l1);
386 tcg_temp_free(r_temp);
389 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
391 tcg_gen_mov_tl(cpu_cc_src, src1);
392 tcg_gen_movi_tl(cpu_cc_src2, src2);
393 if (src2 == 0) {
394 tcg_gen_mov_tl(cpu_cc_dst, src1);
395 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
396 dc->cc_op = CC_OP_LOGIC;
397 } else {
398 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
399 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
400 dc->cc_op = CC_OP_SUB;
402 tcg_gen_mov_tl(dst, cpu_cc_dst);
405 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
407 tcg_gen_mov_tl(cpu_cc_src, src1);
408 tcg_gen_mov_tl(cpu_cc_src2, src2);
409 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
410 tcg_gen_mov_tl(dst, cpu_cc_dst);
413 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
415 tcg_gen_mov_tl(cpu_cc_src, src1);
416 tcg_gen_movi_tl(cpu_cc_src2, src2);
417 gen_mov_reg_C(cpu_tmp0, cpu_psr);
418 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
419 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
420 tcg_gen_mov_tl(dst, cpu_cc_dst);
423 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
425 tcg_gen_mov_tl(cpu_cc_src, src1);
426 tcg_gen_mov_tl(cpu_cc_src2, src2);
427 gen_mov_reg_C(cpu_tmp0, cpu_psr);
428 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
429 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
430 tcg_gen_mov_tl(dst, cpu_cc_dst);
433 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
435 tcg_gen_mov_tl(cpu_cc_src, src1);
436 tcg_gen_mov_tl(cpu_cc_src2, src2);
437 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
438 tcg_gen_mov_tl(dst, cpu_cc_dst);
441 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
443 tcg_gen_mov_tl(cpu_cc_src, src1);
444 tcg_gen_mov_tl(cpu_cc_src2, src2);
445 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
446 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
447 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 tcg_gen_mov_tl(dst, cpu_cc_dst);
451 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
453 TCGv r_temp;
454 int l1;
456 l1 = gen_new_label();
457 r_temp = tcg_temp_new();
459 /* old op:
460 if (!(env->y & 1))
461 T1 = 0;
463 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
464 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
465 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
466 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
467 tcg_gen_movi_tl(cpu_cc_src2, 0);
468 gen_set_label(l1);
470 // b2 = T0 & 1;
471 // env->y = (b2 << 31) | (env->y >> 1);
472 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
473 tcg_gen_shli_tl(r_temp, r_temp, 31);
474 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
475 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
476 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
477 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
479 // b1 = N ^ V;
480 gen_mov_reg_N(cpu_tmp0, cpu_psr);
481 gen_mov_reg_V(r_temp, cpu_psr);
482 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
483 tcg_temp_free(r_temp);
485 // T0 = (b1 << 31) | (T0 >> 1);
486 // src1 = T0;
487 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
488 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
489 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
491 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
498 TCGv_i64 r_temp, r_temp2;
500 r_temp = tcg_temp_new_i64();
501 r_temp2 = tcg_temp_new_i64();
503 tcg_gen_extu_tl_i64(r_temp, src2);
504 tcg_gen_extu_tl_i64(r_temp2, src1);
505 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
507 tcg_gen_shri_i64(r_temp, r_temp2, 32);
508 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
509 tcg_temp_free_i64(r_temp);
510 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
511 #ifdef TARGET_SPARC64
512 tcg_gen_mov_i64(dst, r_temp2);
513 #else
514 tcg_gen_trunc_i64_tl(dst, r_temp2);
515 #endif
516 tcg_temp_free_i64(r_temp2);
519 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
521 TCGv_i64 r_temp, r_temp2;
523 r_temp = tcg_temp_new_i64();
524 r_temp2 = tcg_temp_new_i64();
526 tcg_gen_ext_tl_i64(r_temp, src2);
527 tcg_gen_ext_tl_i64(r_temp2, src1);
528 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
530 tcg_gen_shri_i64(r_temp, r_temp2, 32);
531 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
532 tcg_temp_free_i64(r_temp);
533 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
534 #ifdef TARGET_SPARC64
535 tcg_gen_mov_i64(dst, r_temp2);
536 #else
537 tcg_gen_trunc_i64_tl(dst, r_temp2);
538 #endif
539 tcg_temp_free_i64(r_temp2);
542 #ifdef TARGET_SPARC64
543 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
545 TCGv_i32 r_const;
546 int l1;
548 l1 = gen_new_label();
549 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
550 r_const = tcg_const_i32(TT_DIV_ZERO);
551 gen_helper_raise_exception(r_const);
552 tcg_temp_free_i32(r_const);
553 gen_set_label(l1);
556 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
558 int l1, l2;
560 l1 = gen_new_label();
561 l2 = gen_new_label();
562 tcg_gen_mov_tl(cpu_cc_src, src1);
563 tcg_gen_mov_tl(cpu_cc_src2, src2);
564 gen_trap_ifdivzero_tl(cpu_cc_src2);
565 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
567 tcg_gen_movi_i64(dst, INT64_MIN);
568 tcg_gen_br(l2);
569 gen_set_label(l1);
570 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
571 gen_set_label(l2);
573 #endif
575 // 1
576 static inline void gen_op_eval_ba(TCGv dst)
578 tcg_gen_movi_tl(dst, 1);
581 // Z
582 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
584 gen_mov_reg_Z(dst, src);
587 // Z | (N ^ V)
588 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
590 gen_mov_reg_N(cpu_tmp0, src);
591 gen_mov_reg_V(dst, src);
592 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
593 gen_mov_reg_Z(cpu_tmp0, src);
594 tcg_gen_or_tl(dst, dst, cpu_tmp0);
597 // N ^ V
598 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
600 gen_mov_reg_V(cpu_tmp0, src);
601 gen_mov_reg_N(dst, src);
602 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
605 // C | Z
606 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
608 gen_mov_reg_Z(cpu_tmp0, src);
609 gen_mov_reg_C(dst, src);
610 tcg_gen_or_tl(dst, dst, cpu_tmp0);
613 // C
614 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
616 gen_mov_reg_C(dst, src);
619 // V
620 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
622 gen_mov_reg_V(dst, src);
625 // 0
626 static inline void gen_op_eval_bn(TCGv dst)
628 tcg_gen_movi_tl(dst, 0);
631 // N
632 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
634 gen_mov_reg_N(dst, src);
637 // !Z
638 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
640 gen_mov_reg_Z(dst, src);
641 tcg_gen_xori_tl(dst, dst, 0x1);
644 // !(Z | (N ^ V))
645 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
647 gen_mov_reg_N(cpu_tmp0, src);
648 gen_mov_reg_V(dst, src);
649 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
650 gen_mov_reg_Z(cpu_tmp0, src);
651 tcg_gen_or_tl(dst, dst, cpu_tmp0);
652 tcg_gen_xori_tl(dst, dst, 0x1);
655 // !(N ^ V)
656 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
658 gen_mov_reg_V(cpu_tmp0, src);
659 gen_mov_reg_N(dst, src);
660 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
661 tcg_gen_xori_tl(dst, dst, 0x1);
664 // !(C | Z)
665 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
667 gen_mov_reg_Z(cpu_tmp0, src);
668 gen_mov_reg_C(dst, src);
669 tcg_gen_or_tl(dst, dst, cpu_tmp0);
670 tcg_gen_xori_tl(dst, dst, 0x1);
673 // !C
674 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
676 gen_mov_reg_C(dst, src);
677 tcg_gen_xori_tl(dst, dst, 0x1);
680 // !N
681 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
683 gen_mov_reg_N(dst, src);
684 tcg_gen_xori_tl(dst, dst, 0x1);
687 // !V
688 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
690 gen_mov_reg_V(dst, src);
691 tcg_gen_xori_tl(dst, dst, 0x1);
695 FPSR bit field FCC1 | FCC0:
699 3 unordered
701 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
702 unsigned int fcc_offset)
704 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
705 tcg_gen_andi_tl(reg, reg, 0x1);
708 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
709 unsigned int fcc_offset)
711 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
712 tcg_gen_andi_tl(reg, reg, 0x1);
715 // !0: FCC0 | FCC1
716 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
717 unsigned int fcc_offset)
719 gen_mov_reg_FCC0(dst, src, fcc_offset);
720 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
721 tcg_gen_or_tl(dst, dst, cpu_tmp0);
724 // 1 or 2: FCC0 ^ FCC1
725 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
726 unsigned int fcc_offset)
728 gen_mov_reg_FCC0(dst, src, fcc_offset);
729 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
730 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
733 // 1 or 3: FCC0
734 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
735 unsigned int fcc_offset)
737 gen_mov_reg_FCC0(dst, src, fcc_offset);
740 // 1: FCC0 & !FCC1
741 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
742 unsigned int fcc_offset)
744 gen_mov_reg_FCC0(dst, src, fcc_offset);
745 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
746 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
747 tcg_gen_and_tl(dst, dst, cpu_tmp0);
750 // 2 or 3: FCC1
751 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
752 unsigned int fcc_offset)
754 gen_mov_reg_FCC1(dst, src, fcc_offset);
757 // 2: !FCC0 & FCC1
758 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
759 unsigned int fcc_offset)
761 gen_mov_reg_FCC0(dst, src, fcc_offset);
762 tcg_gen_xori_tl(dst, dst, 0x1);
763 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
764 tcg_gen_and_tl(dst, dst, cpu_tmp0);
767 // 3: FCC0 & FCC1
768 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
769 unsigned int fcc_offset)
771 gen_mov_reg_FCC0(dst, src, fcc_offset);
772 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
773 tcg_gen_and_tl(dst, dst, cpu_tmp0);
776 // 0: !(FCC0 | FCC1)
777 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
778 unsigned int fcc_offset)
780 gen_mov_reg_FCC0(dst, src, fcc_offset);
781 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
782 tcg_gen_or_tl(dst, dst, cpu_tmp0);
783 tcg_gen_xori_tl(dst, dst, 0x1);
786 // 0 or 3: !(FCC0 ^ FCC1)
787 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
788 unsigned int fcc_offset)
790 gen_mov_reg_FCC0(dst, src, fcc_offset);
791 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
792 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
793 tcg_gen_xori_tl(dst, dst, 0x1);
796 // 0 or 2: !FCC0
797 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
798 unsigned int fcc_offset)
800 gen_mov_reg_FCC0(dst, src, fcc_offset);
801 tcg_gen_xori_tl(dst, dst, 0x1);
804 // !1: !(FCC0 & !FCC1)
805 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
806 unsigned int fcc_offset)
808 gen_mov_reg_FCC0(dst, src, fcc_offset);
809 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
810 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
811 tcg_gen_and_tl(dst, dst, cpu_tmp0);
812 tcg_gen_xori_tl(dst, dst, 0x1);
815 // 0 or 1: !FCC1
816 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
817 unsigned int fcc_offset)
819 gen_mov_reg_FCC1(dst, src, fcc_offset);
820 tcg_gen_xori_tl(dst, dst, 0x1);
823 // !2: !(!FCC0 & FCC1)
824 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
825 unsigned int fcc_offset)
827 gen_mov_reg_FCC0(dst, src, fcc_offset);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
830 tcg_gen_and_tl(dst, dst, cpu_tmp0);
831 tcg_gen_xori_tl(dst, dst, 0x1);
834 // !3: !(FCC0 & FCC1)
835 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
836 unsigned int fcc_offset)
838 gen_mov_reg_FCC0(dst, src, fcc_offset);
839 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
840 tcg_gen_and_tl(dst, dst, cpu_tmp0);
841 tcg_gen_xori_tl(dst, dst, 0x1);
844 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
845 target_ulong pc2, TCGv r_cond)
847 int l1;
849 l1 = gen_new_label();
851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
853 gen_goto_tb(dc, 0, pc1, pc1 + 4);
855 gen_set_label(l1);
856 gen_goto_tb(dc, 1, pc2, pc2 + 4);
859 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
860 target_ulong pc2, TCGv r_cond)
862 int l1;
864 l1 = gen_new_label();
866 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
868 gen_goto_tb(dc, 0, pc2, pc1);
870 gen_set_label(l1);
871 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
874 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
875 TCGv r_cond)
877 int l1, l2;
879 l1 = gen_new_label();
880 l2 = gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
884 tcg_gen_movi_tl(cpu_npc, npc1);
885 tcg_gen_br(l2);
887 gen_set_label(l1);
888 tcg_gen_movi_tl(cpu_npc, npc2);
889 gen_set_label(l2);
892 /* call this function before using the condition register as it may
893 have been set for a jump */
894 static inline void flush_cond(DisasContext *dc, TCGv cond)
896 if (dc->npc == JUMP_PC) {
897 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
898 dc->npc = DYNAMIC_PC;
902 static inline void save_npc(DisasContext *dc, TCGv cond)
904 if (dc->npc == JUMP_PC) {
905 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
906 dc->npc = DYNAMIC_PC;
907 } else if (dc->npc != DYNAMIC_PC) {
908 tcg_gen_movi_tl(cpu_npc, dc->npc);
912 static inline void save_state(DisasContext *dc, TCGv cond)
914 tcg_gen_movi_tl(cpu_pc, dc->pc);
915 save_npc(dc, cond);
918 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
920 if (dc->npc == JUMP_PC) {
921 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
922 tcg_gen_mov_tl(cpu_pc, cpu_npc);
923 dc->pc = DYNAMIC_PC;
924 } else if (dc->npc == DYNAMIC_PC) {
925 tcg_gen_mov_tl(cpu_pc, cpu_npc);
926 dc->pc = DYNAMIC_PC;
927 } else {
928 dc->pc = dc->npc;
932 static inline void gen_op_next_insn(void)
934 tcg_gen_mov_tl(cpu_pc, cpu_npc);
935 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
938 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
939 DisasContext *dc)
941 TCGv_i32 r_src;
943 #ifdef TARGET_SPARC64
944 if (cc)
945 r_src = cpu_xcc;
946 else
947 r_src = cpu_psr;
948 #else
949 r_src = cpu_psr;
950 #endif
951 switch (dc->cc_op) {
952 case CC_OP_FLAGS:
953 break;
954 default:
955 gen_helper_compute_psr();
956 dc->cc_op = CC_OP_FLAGS;
957 break;
959 switch (cond) {
960 case 0x0:
961 gen_op_eval_bn(r_dst);
962 break;
963 case 0x1:
964 gen_op_eval_be(r_dst, r_src);
965 break;
966 case 0x2:
967 gen_op_eval_ble(r_dst, r_src);
968 break;
969 case 0x3:
970 gen_op_eval_bl(r_dst, r_src);
971 break;
972 case 0x4:
973 gen_op_eval_bleu(r_dst, r_src);
974 break;
975 case 0x5:
976 gen_op_eval_bcs(r_dst, r_src);
977 break;
978 case 0x6:
979 gen_op_eval_bneg(r_dst, r_src);
980 break;
981 case 0x7:
982 gen_op_eval_bvs(r_dst, r_src);
983 break;
984 case 0x8:
985 gen_op_eval_ba(r_dst);
986 break;
987 case 0x9:
988 gen_op_eval_bne(r_dst, r_src);
989 break;
990 case 0xa:
991 gen_op_eval_bg(r_dst, r_src);
992 break;
993 case 0xb:
994 gen_op_eval_bge(r_dst, r_src);
995 break;
996 case 0xc:
997 gen_op_eval_bgu(r_dst, r_src);
998 break;
999 case 0xd:
1000 gen_op_eval_bcc(r_dst, r_src);
1001 break;
1002 case 0xe:
1003 gen_op_eval_bpos(r_dst, r_src);
1004 break;
1005 case 0xf:
1006 gen_op_eval_bvc(r_dst, r_src);
1007 break;
1011 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1013 unsigned int offset;
1015 switch (cc) {
1016 default:
1017 case 0x0:
1018 offset = 0;
1019 break;
1020 case 0x1:
1021 offset = 32 - 10;
1022 break;
1023 case 0x2:
1024 offset = 34 - 10;
1025 break;
1026 case 0x3:
1027 offset = 36 - 10;
1028 break;
1031 switch (cond) {
1032 case 0x0:
1033 gen_op_eval_bn(r_dst);
1034 break;
1035 case 0x1:
1036 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1037 break;
1038 case 0x2:
1039 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1040 break;
1041 case 0x3:
1042 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1043 break;
1044 case 0x4:
1045 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1046 break;
1047 case 0x5:
1048 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1049 break;
1050 case 0x6:
1051 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1052 break;
1053 case 0x7:
1054 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1055 break;
1056 case 0x8:
1057 gen_op_eval_ba(r_dst);
1058 break;
1059 case 0x9:
1060 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1061 break;
1062 case 0xa:
1063 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1064 break;
1065 case 0xb:
1066 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1067 break;
1068 case 0xc:
1069 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1070 break;
1071 case 0xd:
1072 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1073 break;
1074 case 0xe:
1075 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1076 break;
1077 case 0xf:
1078 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1079 break;
1083 #ifdef TARGET_SPARC64
1084 // Inverted logic
1085 static const int gen_tcg_cond_reg[8] = {
1087 TCG_COND_NE,
1088 TCG_COND_GT,
1089 TCG_COND_GE,
1091 TCG_COND_EQ,
1092 TCG_COND_LE,
1093 TCG_COND_LT,
1096 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1098 int l1;
1100 l1 = gen_new_label();
1101 tcg_gen_movi_tl(r_dst, 0);
1102 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1103 tcg_gen_movi_tl(r_dst, 1);
1104 gen_set_label(l1);
1106 #endif
1108 /* XXX: potentially incorrect if dynamic npc */
1109 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1110 TCGv r_cond)
1112 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1113 target_ulong target = dc->pc + offset;
1115 if (cond == 0x0) {
1116 /* unconditional not taken */
1117 if (a) {
1118 dc->pc = dc->npc + 4;
1119 dc->npc = dc->pc + 4;
1120 } else {
1121 dc->pc = dc->npc;
1122 dc->npc = dc->pc + 4;
1124 } else if (cond == 0x8) {
1125 /* unconditional taken */
1126 if (a) {
1127 dc->pc = target;
1128 dc->npc = dc->pc + 4;
1129 } else {
1130 dc->pc = dc->npc;
1131 dc->npc = target;
1133 } else {
1134 flush_cond(dc, r_cond);
1135 gen_cond(r_cond, cc, cond, dc);
1136 if (a) {
1137 gen_branch_a(dc, target, dc->npc, r_cond);
1138 dc->is_br = 1;
1139 } else {
1140 dc->pc = dc->npc;
1141 dc->jump_pc[0] = target;
1142 dc->jump_pc[1] = dc->npc + 4;
1143 dc->npc = JUMP_PC;
1148 /* XXX: potentially incorrect if dynamic npc */
1149 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1150 TCGv r_cond)
1152 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1153 target_ulong target = dc->pc + offset;
1155 if (cond == 0x0) {
1156 /* unconditional not taken */
1157 if (a) {
1158 dc->pc = dc->npc + 4;
1159 dc->npc = dc->pc + 4;
1160 } else {
1161 dc->pc = dc->npc;
1162 dc->npc = dc->pc + 4;
1164 } else if (cond == 0x8) {
1165 /* unconditional taken */
1166 if (a) {
1167 dc->pc = target;
1168 dc->npc = dc->pc + 4;
1169 } else {
1170 dc->pc = dc->npc;
1171 dc->npc = target;
1173 } else {
1174 flush_cond(dc, r_cond);
1175 gen_fcond(r_cond, cc, cond);
1176 if (a) {
1177 gen_branch_a(dc, target, dc->npc, r_cond);
1178 dc->is_br = 1;
1179 } else {
1180 dc->pc = dc->npc;
1181 dc->jump_pc[0] = target;
1182 dc->jump_pc[1] = dc->npc + 4;
1183 dc->npc = JUMP_PC;
1188 #ifdef TARGET_SPARC64
1189 /* XXX: potentially incorrect if dynamic npc */
1190 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1191 TCGv r_cond, TCGv r_reg)
1193 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1194 target_ulong target = dc->pc + offset;
1196 flush_cond(dc, r_cond);
1197 gen_cond_reg(r_cond, cond, r_reg);
1198 if (a) {
1199 gen_branch_a(dc, target, dc->npc, r_cond);
1200 dc->is_br = 1;
1201 } else {
1202 dc->pc = dc->npc;
1203 dc->jump_pc[0] = target;
1204 dc->jump_pc[1] = dc->npc + 4;
1205 dc->npc = JUMP_PC;
1209 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1211 switch (fccno) {
1212 case 0:
1213 gen_helper_fcmps(r_rs1, r_rs2);
1214 break;
1215 case 1:
1216 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1217 break;
1218 case 2:
1219 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1220 break;
1221 case 3:
1222 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1223 break;
1227 static inline void gen_op_fcmpd(int fccno)
1229 switch (fccno) {
1230 case 0:
1231 gen_helper_fcmpd();
1232 break;
1233 case 1:
1234 gen_helper_fcmpd_fcc1();
1235 break;
1236 case 2:
1237 gen_helper_fcmpd_fcc2();
1238 break;
1239 case 3:
1240 gen_helper_fcmpd_fcc3();
1241 break;
1245 static inline void gen_op_fcmpq(int fccno)
1247 switch (fccno) {
1248 case 0:
1249 gen_helper_fcmpq();
1250 break;
1251 case 1:
1252 gen_helper_fcmpq_fcc1();
1253 break;
1254 case 2:
1255 gen_helper_fcmpq_fcc2();
1256 break;
1257 case 3:
1258 gen_helper_fcmpq_fcc3();
1259 break;
1263 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1265 switch (fccno) {
1266 case 0:
1267 gen_helper_fcmpes(r_rs1, r_rs2);
1268 break;
1269 case 1:
1270 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1271 break;
1272 case 2:
1273 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1274 break;
1275 case 3:
1276 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1277 break;
1281 static inline void gen_op_fcmped(int fccno)
1283 switch (fccno) {
1284 case 0:
1285 gen_helper_fcmped();
1286 break;
1287 case 1:
1288 gen_helper_fcmped_fcc1();
1289 break;
1290 case 2:
1291 gen_helper_fcmped_fcc2();
1292 break;
1293 case 3:
1294 gen_helper_fcmped_fcc3();
1295 break;
1299 static inline void gen_op_fcmpeq(int fccno)
1301 switch (fccno) {
1302 case 0:
1303 gen_helper_fcmpeq();
1304 break;
1305 case 1:
1306 gen_helper_fcmpeq_fcc1();
1307 break;
1308 case 2:
1309 gen_helper_fcmpeq_fcc2();
1310 break;
1311 case 3:
1312 gen_helper_fcmpeq_fcc3();
1313 break;
1317 #else
1319 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1321 gen_helper_fcmps(r_rs1, r_rs2);
1324 static inline void gen_op_fcmpd(int fccno)
1326 gen_helper_fcmpd();
1329 static inline void gen_op_fcmpq(int fccno)
1331 gen_helper_fcmpq();
1334 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1336 gen_helper_fcmpes(r_rs1, r_rs2);
1339 static inline void gen_op_fcmped(int fccno)
1341 gen_helper_fcmped();
1344 static inline void gen_op_fcmpeq(int fccno)
1346 gen_helper_fcmpeq();
1348 #endif
1350 static inline void gen_op_fpexception_im(int fsr_flags)
1352 TCGv_i32 r_const;
1354 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1355 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1356 r_const = tcg_const_i32(TT_FP_EXCP);
1357 gen_helper_raise_exception(r_const);
1358 tcg_temp_free_i32(r_const);
1361 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1363 #if !defined(CONFIG_USER_ONLY)
1364 if (!dc->fpu_enabled) {
1365 TCGv_i32 r_const;
1367 save_state(dc, r_cond);
1368 r_const = tcg_const_i32(TT_NFPU_INSN);
1369 gen_helper_raise_exception(r_const);
1370 tcg_temp_free_i32(r_const);
1371 dc->is_br = 1;
1372 return 1;
1374 #endif
1375 return 0;
1378 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1380 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1383 static inline void gen_clear_float_exceptions(void)
1385 gen_helper_clear_float_exceptions();
1388 /* asi moves */
1389 #ifdef TARGET_SPARC64
1390 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1392 int asi;
1393 TCGv_i32 r_asi;
1395 if (IS_IMM) {
1396 r_asi = tcg_temp_new_i32();
1397 tcg_gen_mov_i32(r_asi, cpu_asi);
1398 } else {
1399 asi = GET_FIELD(insn, 19, 26);
1400 r_asi = tcg_const_i32(asi);
1402 return r_asi;
1405 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1406 int sign)
1408 TCGv_i32 r_asi, r_size, r_sign;
1410 r_asi = gen_get_asi(insn, addr);
1411 r_size = tcg_const_i32(size);
1412 r_sign = tcg_const_i32(sign);
1413 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1414 tcg_temp_free_i32(r_sign);
1415 tcg_temp_free_i32(r_size);
1416 tcg_temp_free_i32(r_asi);
1419 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1421 TCGv_i32 r_asi, r_size;
1423 r_asi = gen_get_asi(insn, addr);
1424 r_size = tcg_const_i32(size);
1425 gen_helper_st_asi(addr, src, r_asi, r_size);
1426 tcg_temp_free_i32(r_size);
1427 tcg_temp_free_i32(r_asi);
1430 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1432 TCGv_i32 r_asi, r_size, r_rd;
1434 r_asi = gen_get_asi(insn, addr);
1435 r_size = tcg_const_i32(size);
1436 r_rd = tcg_const_i32(rd);
1437 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1438 tcg_temp_free_i32(r_rd);
1439 tcg_temp_free_i32(r_size);
1440 tcg_temp_free_i32(r_asi);
1443 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1445 TCGv_i32 r_asi, r_size, r_rd;
1447 r_asi = gen_get_asi(insn, addr);
1448 r_size = tcg_const_i32(size);
1449 r_rd = tcg_const_i32(rd);
1450 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1451 tcg_temp_free_i32(r_rd);
1452 tcg_temp_free_i32(r_size);
1453 tcg_temp_free_i32(r_asi);
1456 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1458 TCGv_i32 r_asi, r_size, r_sign;
1460 r_asi = gen_get_asi(insn, addr);
1461 r_size = tcg_const_i32(4);
1462 r_sign = tcg_const_i32(0);
1463 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1464 tcg_temp_free_i32(r_sign);
1465 gen_helper_st_asi(addr, dst, r_asi, r_size);
1466 tcg_temp_free_i32(r_size);
1467 tcg_temp_free_i32(r_asi);
1468 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1471 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1473 TCGv_i32 r_asi, r_rd;
1475 r_asi = gen_get_asi(insn, addr);
1476 r_rd = tcg_const_i32(rd);
1477 gen_helper_ldda_asi(addr, r_asi, r_rd);
1478 tcg_temp_free_i32(r_rd);
1479 tcg_temp_free_i32(r_asi);
1482 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1484 TCGv_i32 r_asi, r_size;
1486 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1487 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1488 r_asi = gen_get_asi(insn, addr);
1489 r_size = tcg_const_i32(8);
1490 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1491 tcg_temp_free_i32(r_size);
1492 tcg_temp_free_i32(r_asi);
1495 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1496 int rd)
1498 TCGv r_val1;
1499 TCGv_i32 r_asi;
1501 r_val1 = tcg_temp_new();
1502 gen_movl_reg_TN(rd, r_val1);
1503 r_asi = gen_get_asi(insn, addr);
1504 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1505 tcg_temp_free_i32(r_asi);
1506 tcg_temp_free(r_val1);
1509 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1510 int rd)
1512 TCGv_i32 r_asi;
1514 gen_movl_reg_TN(rd, cpu_tmp64);
1515 r_asi = gen_get_asi(insn, addr);
1516 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1517 tcg_temp_free_i32(r_asi);
1520 #elif !defined(CONFIG_USER_ONLY)
1522 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1523 int sign)
1525 TCGv_i32 r_asi, r_size, r_sign;
1527 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1528 r_size = tcg_const_i32(size);
1529 r_sign = tcg_const_i32(sign);
1530 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1531 tcg_temp_free(r_sign);
1532 tcg_temp_free(r_size);
1533 tcg_temp_free(r_asi);
1534 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1537 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1539 TCGv_i32 r_asi, r_size;
1541 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1542 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1543 r_size = tcg_const_i32(size);
1544 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1545 tcg_temp_free(r_size);
1546 tcg_temp_free(r_asi);
1549 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1551 TCGv_i32 r_asi, r_size, r_sign;
1552 TCGv_i64 r_val;
1554 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1555 r_size = tcg_const_i32(4);
1556 r_sign = tcg_const_i32(0);
1557 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1558 tcg_temp_free(r_sign);
1559 r_val = tcg_temp_new_i64();
1560 tcg_gen_extu_tl_i64(r_val, dst);
1561 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1562 tcg_temp_free_i64(r_val);
1563 tcg_temp_free(r_size);
1564 tcg_temp_free(r_asi);
1565 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1568 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1570 TCGv_i32 r_asi, r_size, r_sign;
1572 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1573 r_size = tcg_const_i32(8);
1574 r_sign = tcg_const_i32(0);
1575 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1576 tcg_temp_free(r_sign);
1577 tcg_temp_free(r_size);
1578 tcg_temp_free(r_asi);
1579 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1580 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1581 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1582 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1583 gen_movl_TN_reg(rd, hi);
1586 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1588 TCGv_i32 r_asi, r_size;
1590 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1591 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1592 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1593 r_size = tcg_const_i32(8);
1594 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1595 tcg_temp_free(r_size);
1596 tcg_temp_free(r_asi);
1598 #endif
1600 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1601 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1603 TCGv_i64 r_val;
1604 TCGv_i32 r_asi, r_size;
1606 gen_ld_asi(dst, addr, insn, 1, 0);
1608 r_val = tcg_const_i64(0xffULL);
1609 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1610 r_size = tcg_const_i32(1);
1611 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1612 tcg_temp_free_i32(r_size);
1613 tcg_temp_free_i32(r_asi);
1614 tcg_temp_free_i64(r_val);
1616 #endif
1618 static inline TCGv get_src1(unsigned int insn, TCGv def)
1620 TCGv r_rs1 = def;
1621 unsigned int rs1;
1623 rs1 = GET_FIELD(insn, 13, 17);
1624 if (rs1 == 0)
1625 r_rs1 = tcg_const_tl(0); // XXX how to free?
1626 else if (rs1 < 8)
1627 r_rs1 = cpu_gregs[rs1];
1628 else
1629 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1630 return r_rs1;
1633 static inline TCGv get_src2(unsigned int insn, TCGv def)
1635 TCGv r_rs2 = def;
1637 if (IS_IMM) { /* immediate */
1638 target_long simm;
1640 simm = GET_FIELDs(insn, 19, 31);
1641 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1642 } else { /* register */
1643 unsigned int rs2;
1645 rs2 = GET_FIELD(insn, 27, 31);
1646 if (rs2 == 0)
1647 r_rs2 = tcg_const_tl(0); // XXX how to free?
1648 else if (rs2 < 8)
1649 r_rs2 = cpu_gregs[rs2];
1650 else
1651 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1653 return r_rs2;
1656 #ifdef TARGET_SPARC64
1657 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1659 TCGv r_tl = tcg_temp_new();
1661 /* load env->tl into r_tl */
1663 TCGv_i32 r_tl_tmp = tcg_temp_new_i32();
1664 tcg_gen_ld_i32(r_tl_tmp, cpu_env, offsetof(CPUSPARCState, tl));
1665 tcg_gen_ext_i32_tl(r_tl, r_tl_tmp);
1666 tcg_temp_free_i32(r_tl_tmp);
1669 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1670 tcg_gen_andi_tl(r_tl, r_tl, MAXTL_MASK);
1672 /* calculate offset to current trap state from env->ts, reuse r_tl */
1673 tcg_gen_muli_tl(r_tl, r_tl, sizeof (trap_state));
1674 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1676 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1677 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl);
1679 tcg_temp_free(r_tl);
1681 #endif
1683 #define CHECK_IU_FEATURE(dc, FEATURE) \
1684 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1685 goto illegal_insn;
1686 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1687 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1688 goto nfpu_insn;
1690 /* before an instruction, dc->pc must be static */
1691 static void disas_sparc_insn(DisasContext * dc)
1693 unsigned int insn, opc, rs1, rs2, rd;
1694 target_long simm;
1696 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1697 tcg_gen_debug_insn_start(dc->pc);
1698 insn = ldl_code(dc->pc);
1699 opc = GET_FIELD(insn, 0, 1);
1701 rd = GET_FIELD(insn, 2, 6);
1703 cpu_src1 = tcg_temp_new(); // const
1704 cpu_src2 = tcg_temp_new(); // const
1706 switch (opc) {
1707 case 0: /* branches/sethi */
1709 unsigned int xop = GET_FIELD(insn, 7, 9);
1710 int32_t target;
1711 switch (xop) {
1712 #ifdef TARGET_SPARC64
1713 case 0x1: /* V9 BPcc */
1715 int cc;
1717 target = GET_FIELD_SP(insn, 0, 18);
1718 target = sign_extend(target, 18);
1719 target <<= 2;
1720 cc = GET_FIELD_SP(insn, 20, 21);
1721 if (cc == 0)
1722 do_branch(dc, target, insn, 0, cpu_cond);
1723 else if (cc == 2)
1724 do_branch(dc, target, insn, 1, cpu_cond);
1725 else
1726 goto illegal_insn;
1727 goto jmp_insn;
1729 case 0x3: /* V9 BPr */
1731 target = GET_FIELD_SP(insn, 0, 13) |
1732 (GET_FIELD_SP(insn, 20, 21) << 14);
1733 target = sign_extend(target, 16);
1734 target <<= 2;
1735 cpu_src1 = get_src1(insn, cpu_src1);
1736 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1737 goto jmp_insn;
1739 case 0x5: /* V9 FBPcc */
1741 int cc = GET_FIELD_SP(insn, 20, 21);
1742 if (gen_trap_ifnofpu(dc, cpu_cond))
1743 goto jmp_insn;
1744 target = GET_FIELD_SP(insn, 0, 18);
1745 target = sign_extend(target, 19);
1746 target <<= 2;
1747 do_fbranch(dc, target, insn, cc, cpu_cond);
1748 goto jmp_insn;
1750 #else
1751 case 0x7: /* CBN+x */
1753 goto ncp_insn;
1755 #endif
1756 case 0x2: /* BN+x */
1758 target = GET_FIELD(insn, 10, 31);
1759 target = sign_extend(target, 22);
1760 target <<= 2;
1761 do_branch(dc, target, insn, 0, cpu_cond);
1762 goto jmp_insn;
1764 case 0x6: /* FBN+x */
1766 if (gen_trap_ifnofpu(dc, cpu_cond))
1767 goto jmp_insn;
1768 target = GET_FIELD(insn, 10, 31);
1769 target = sign_extend(target, 22);
1770 target <<= 2;
1771 do_fbranch(dc, target, insn, 0, cpu_cond);
1772 goto jmp_insn;
1774 case 0x4: /* SETHI */
1775 if (rd) { // nop
1776 uint32_t value = GET_FIELD(insn, 10, 31);
1777 TCGv r_const;
1779 r_const = tcg_const_tl(value << 10);
1780 gen_movl_TN_reg(rd, r_const);
1781 tcg_temp_free(r_const);
1783 break;
1784 case 0x0: /* UNIMPL */
1785 default:
1786 goto illegal_insn;
1788 break;
1790 break;
1791 case 1: /*CALL*/
1793 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1794 TCGv r_const;
1796 r_const = tcg_const_tl(dc->pc);
1797 gen_movl_TN_reg(15, r_const);
1798 tcg_temp_free(r_const);
1799 target += dc->pc;
1800 gen_mov_pc_npc(dc, cpu_cond);
1801 dc->npc = target;
1803 goto jmp_insn;
1804 case 2: /* FPU & Logical Operations */
1806 unsigned int xop = GET_FIELD(insn, 7, 12);
1807 if (xop == 0x3a) { /* generate trap */
1808 int cond;
1810 cpu_src1 = get_src1(insn, cpu_src1);
1811 if (IS_IMM) {
1812 rs2 = GET_FIELD(insn, 25, 31);
1813 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1814 } else {
1815 rs2 = GET_FIELD(insn, 27, 31);
1816 if (rs2 != 0) {
1817 gen_movl_reg_TN(rs2, cpu_src2);
1818 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1819 } else
1820 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1822 cond = GET_FIELD(insn, 3, 6);
1823 if (cond == 0x8) {
1824 save_state(dc, cpu_cond);
1825 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1826 supervisor(dc))
1827 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1828 else
1829 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1830 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1831 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1832 gen_helper_raise_exception(cpu_tmp32);
1833 } else if (cond != 0) {
1834 TCGv r_cond = tcg_temp_new();
1835 int l1;
1836 #ifdef TARGET_SPARC64
1837 /* V9 icc/xcc */
1838 int cc = GET_FIELD_SP(insn, 11, 12);
1840 save_state(dc, cpu_cond);
1841 if (cc == 0)
1842 gen_cond(r_cond, 0, cond, dc);
1843 else if (cc == 2)
1844 gen_cond(r_cond, 1, cond, dc);
1845 else
1846 goto illegal_insn;
1847 #else
1848 save_state(dc, cpu_cond);
1849 gen_cond(r_cond, 0, cond, dc);
1850 #endif
1851 l1 = gen_new_label();
1852 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1854 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1855 supervisor(dc))
1856 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1857 else
1858 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1859 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1860 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1861 gen_helper_raise_exception(cpu_tmp32);
1863 gen_set_label(l1);
1864 tcg_temp_free(r_cond);
1866 gen_op_next_insn();
1867 tcg_gen_exit_tb(0);
1868 dc->is_br = 1;
1869 goto jmp_insn;
1870 } else if (xop == 0x28) {
1871 rs1 = GET_FIELD(insn, 13, 17);
1872 switch(rs1) {
1873 case 0: /* rdy */
1874 #ifndef TARGET_SPARC64
1875 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1876 manual, rdy on the microSPARC
1877 II */
1878 case 0x0f: /* stbar in the SPARCv8 manual,
1879 rdy on the microSPARC II */
1880 case 0x10 ... 0x1f: /* implementation-dependent in the
1881 SPARCv8 manual, rdy on the
1882 microSPARC II */
1883 #endif
1884 gen_movl_TN_reg(rd, cpu_y);
1885 break;
1886 #ifdef TARGET_SPARC64
1887 case 0x2: /* V9 rdccr */
1888 gen_helper_compute_psr();
1889 gen_helper_rdccr(cpu_dst);
1890 gen_movl_TN_reg(rd, cpu_dst);
1891 break;
1892 case 0x3: /* V9 rdasi */
1893 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1894 gen_movl_TN_reg(rd, cpu_dst);
1895 break;
1896 case 0x4: /* V9 rdtick */
1898 TCGv_ptr r_tickptr;
1900 r_tickptr = tcg_temp_new_ptr();
1901 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1902 offsetof(CPUState, tick));
1903 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1904 tcg_temp_free_ptr(r_tickptr);
1905 gen_movl_TN_reg(rd, cpu_dst);
1907 break;
1908 case 0x5: /* V9 rdpc */
1910 TCGv r_const;
1912 r_const = tcg_const_tl(dc->pc);
1913 gen_movl_TN_reg(rd, r_const);
1914 tcg_temp_free(r_const);
1916 break;
1917 case 0x6: /* V9 rdfprs */
1918 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1919 gen_movl_TN_reg(rd, cpu_dst);
1920 break;
1921 case 0xf: /* V9 membar */
1922 break; /* no effect */
1923 case 0x13: /* Graphics Status */
1924 if (gen_trap_ifnofpu(dc, cpu_cond))
1925 goto jmp_insn;
1926 gen_movl_TN_reg(rd, cpu_gsr);
1927 break;
1928 case 0x16: /* Softint */
1929 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1930 gen_movl_TN_reg(rd, cpu_dst);
1931 break;
1932 case 0x17: /* Tick compare */
1933 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1934 break;
1935 case 0x18: /* System tick */
1937 TCGv_ptr r_tickptr;
1939 r_tickptr = tcg_temp_new_ptr();
1940 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1941 offsetof(CPUState, stick));
1942 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1943 tcg_temp_free_ptr(r_tickptr);
1944 gen_movl_TN_reg(rd, cpu_dst);
1946 break;
1947 case 0x19: /* System tick compare */
1948 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1949 break;
1950 case 0x10: /* Performance Control */
1951 case 0x11: /* Performance Instrumentation Counter */
1952 case 0x12: /* Dispatch Control */
1953 case 0x14: /* Softint set, WO */
1954 case 0x15: /* Softint clear, WO */
1955 #endif
1956 default:
1957 goto illegal_insn;
1959 #if !defined(CONFIG_USER_ONLY)
1960 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1961 #ifndef TARGET_SPARC64
1962 if (!supervisor(dc))
1963 goto priv_insn;
1964 gen_helper_compute_psr();
1965 dc->cc_op = CC_OP_FLAGS;
1966 gen_helper_rdpsr(cpu_dst);
1967 #else
1968 CHECK_IU_FEATURE(dc, HYPV);
1969 if (!hypervisor(dc))
1970 goto priv_insn;
1971 rs1 = GET_FIELD(insn, 13, 17);
1972 switch (rs1) {
1973 case 0: // hpstate
1974 // gen_op_rdhpstate();
1975 break;
1976 case 1: // htstate
1977 // gen_op_rdhtstate();
1978 break;
1979 case 3: // hintp
1980 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1981 break;
1982 case 5: // htba
1983 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1984 break;
1985 case 6: // hver
1986 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1987 break;
1988 case 31: // hstick_cmpr
1989 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1990 break;
1991 default:
1992 goto illegal_insn;
1994 #endif
1995 gen_movl_TN_reg(rd, cpu_dst);
1996 break;
1997 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
1998 if (!supervisor(dc))
1999 goto priv_insn;
2000 #ifdef TARGET_SPARC64
2001 rs1 = GET_FIELD(insn, 13, 17);
2002 switch (rs1) {
2003 case 0: // tpc
2005 TCGv_ptr r_tsptr;
2007 r_tsptr = tcg_temp_new_ptr();
2008 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2009 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2010 offsetof(trap_state, tpc));
2011 tcg_temp_free_ptr(r_tsptr);
2013 break;
2014 case 1: // tnpc
2016 TCGv_ptr r_tsptr;
2018 r_tsptr = tcg_temp_new_ptr();
2019 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2020 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2021 offsetof(trap_state, tnpc));
2022 tcg_temp_free_ptr(r_tsptr);
2024 break;
2025 case 2: // tstate
2027 TCGv_ptr r_tsptr;
2029 r_tsptr = tcg_temp_new_ptr();
2030 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2031 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2032 offsetof(trap_state, tstate));
2033 tcg_temp_free_ptr(r_tsptr);
2035 break;
2036 case 3: // tt
2038 TCGv_ptr r_tsptr;
2040 r_tsptr = tcg_temp_new_ptr();
2041 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2042 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2043 offsetof(trap_state, tt));
2044 tcg_temp_free_ptr(r_tsptr);
2045 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2047 break;
2048 case 4: // tick
2050 TCGv_ptr r_tickptr;
2052 r_tickptr = tcg_temp_new_ptr();
2053 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2054 offsetof(CPUState, tick));
2055 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2056 gen_movl_TN_reg(rd, cpu_tmp0);
2057 tcg_temp_free_ptr(r_tickptr);
2059 break;
2060 case 5: // tba
2061 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2062 break;
2063 case 6: // pstate
2064 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2065 offsetof(CPUSPARCState, pstate));
2066 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2067 break;
2068 case 7: // tl
2069 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2070 offsetof(CPUSPARCState, tl));
2071 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2072 break;
2073 case 8: // pil
2074 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2075 offsetof(CPUSPARCState, psrpil));
2076 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2077 break;
2078 case 9: // cwp
2079 gen_helper_rdcwp(cpu_tmp0);
2080 break;
2081 case 10: // cansave
2082 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2083 offsetof(CPUSPARCState, cansave));
2084 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2085 break;
2086 case 11: // canrestore
2087 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2088 offsetof(CPUSPARCState, canrestore));
2089 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2090 break;
2091 case 12: // cleanwin
2092 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2093 offsetof(CPUSPARCState, cleanwin));
2094 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2095 break;
2096 case 13: // otherwin
2097 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2098 offsetof(CPUSPARCState, otherwin));
2099 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2100 break;
2101 case 14: // wstate
2102 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2103 offsetof(CPUSPARCState, wstate));
2104 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2105 break;
2106 case 16: // UA2005 gl
2107 CHECK_IU_FEATURE(dc, GL);
2108 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2109 offsetof(CPUSPARCState, gl));
2110 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2111 break;
2112 case 26: // UA2005 strand status
2113 CHECK_IU_FEATURE(dc, HYPV);
2114 if (!hypervisor(dc))
2115 goto priv_insn;
2116 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2117 break;
2118 case 31: // ver
2119 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2120 break;
2121 case 15: // fq
2122 default:
2123 goto illegal_insn;
2125 #else
2126 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2127 #endif
2128 gen_movl_TN_reg(rd, cpu_tmp0);
2129 break;
2130 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2131 #ifdef TARGET_SPARC64
2132 save_state(dc, cpu_cond);
2133 gen_helper_flushw();
2134 #else
2135 if (!supervisor(dc))
2136 goto priv_insn;
2137 gen_movl_TN_reg(rd, cpu_tbr);
2138 #endif
2139 break;
2140 #endif
2141 } else if (xop == 0x34) { /* FPU Operations */
2142 if (gen_trap_ifnofpu(dc, cpu_cond))
2143 goto jmp_insn;
2144 gen_op_clear_ieee_excp_and_FTT();
2145 rs1 = GET_FIELD(insn, 13, 17);
2146 rs2 = GET_FIELD(insn, 27, 31);
2147 xop = GET_FIELD(insn, 18, 26);
2148 switch (xop) {
2149 case 0x1: /* fmovs */
2150 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2151 break;
2152 case 0x5: /* fnegs */
2153 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2154 break;
2155 case 0x9: /* fabss */
2156 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2157 break;
2158 case 0x29: /* fsqrts */
2159 CHECK_FPU_FEATURE(dc, FSQRT);
2160 gen_clear_float_exceptions();
2161 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2162 gen_helper_check_ieee_exceptions();
2163 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2164 break;
2165 case 0x2a: /* fsqrtd */
2166 CHECK_FPU_FEATURE(dc, FSQRT);
2167 gen_op_load_fpr_DT1(DFPREG(rs2));
2168 gen_clear_float_exceptions();
2169 gen_helper_fsqrtd();
2170 gen_helper_check_ieee_exceptions();
2171 gen_op_store_DT0_fpr(DFPREG(rd));
2172 break;
2173 case 0x2b: /* fsqrtq */
2174 CHECK_FPU_FEATURE(dc, FLOAT128);
2175 gen_op_load_fpr_QT1(QFPREG(rs2));
2176 gen_clear_float_exceptions();
2177 gen_helper_fsqrtq();
2178 gen_helper_check_ieee_exceptions();
2179 gen_op_store_QT0_fpr(QFPREG(rd));
2180 break;
2181 case 0x41: /* fadds */
2182 gen_clear_float_exceptions();
2183 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2184 gen_helper_check_ieee_exceptions();
2185 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2186 break;
2187 case 0x42: /* faddd */
2188 gen_op_load_fpr_DT0(DFPREG(rs1));
2189 gen_op_load_fpr_DT1(DFPREG(rs2));
2190 gen_clear_float_exceptions();
2191 gen_helper_faddd();
2192 gen_helper_check_ieee_exceptions();
2193 gen_op_store_DT0_fpr(DFPREG(rd));
2194 break;
2195 case 0x43: /* faddq */
2196 CHECK_FPU_FEATURE(dc, FLOAT128);
2197 gen_op_load_fpr_QT0(QFPREG(rs1));
2198 gen_op_load_fpr_QT1(QFPREG(rs2));
2199 gen_clear_float_exceptions();
2200 gen_helper_faddq();
2201 gen_helper_check_ieee_exceptions();
2202 gen_op_store_QT0_fpr(QFPREG(rd));
2203 break;
2204 case 0x45: /* fsubs */
2205 gen_clear_float_exceptions();
2206 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2207 gen_helper_check_ieee_exceptions();
2208 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2209 break;
2210 case 0x46: /* fsubd */
2211 gen_op_load_fpr_DT0(DFPREG(rs1));
2212 gen_op_load_fpr_DT1(DFPREG(rs2));
2213 gen_clear_float_exceptions();
2214 gen_helper_fsubd();
2215 gen_helper_check_ieee_exceptions();
2216 gen_op_store_DT0_fpr(DFPREG(rd));
2217 break;
2218 case 0x47: /* fsubq */
2219 CHECK_FPU_FEATURE(dc, FLOAT128);
2220 gen_op_load_fpr_QT0(QFPREG(rs1));
2221 gen_op_load_fpr_QT1(QFPREG(rs2));
2222 gen_clear_float_exceptions();
2223 gen_helper_fsubq();
2224 gen_helper_check_ieee_exceptions();
2225 gen_op_store_QT0_fpr(QFPREG(rd));
2226 break;
2227 case 0x49: /* fmuls */
2228 CHECK_FPU_FEATURE(dc, FMUL);
2229 gen_clear_float_exceptions();
2230 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2231 gen_helper_check_ieee_exceptions();
2232 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2233 break;
2234 case 0x4a: /* fmuld */
2235 CHECK_FPU_FEATURE(dc, FMUL);
2236 gen_op_load_fpr_DT0(DFPREG(rs1));
2237 gen_op_load_fpr_DT1(DFPREG(rs2));
2238 gen_clear_float_exceptions();
2239 gen_helper_fmuld();
2240 gen_helper_check_ieee_exceptions();
2241 gen_op_store_DT0_fpr(DFPREG(rd));
2242 break;
2243 case 0x4b: /* fmulq */
2244 CHECK_FPU_FEATURE(dc, FLOAT128);
2245 CHECK_FPU_FEATURE(dc, FMUL);
2246 gen_op_load_fpr_QT0(QFPREG(rs1));
2247 gen_op_load_fpr_QT1(QFPREG(rs2));
2248 gen_clear_float_exceptions();
2249 gen_helper_fmulq();
2250 gen_helper_check_ieee_exceptions();
2251 gen_op_store_QT0_fpr(QFPREG(rd));
2252 break;
2253 case 0x4d: /* fdivs */
2254 gen_clear_float_exceptions();
2255 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2256 gen_helper_check_ieee_exceptions();
2257 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2258 break;
2259 case 0x4e: /* fdivd */
2260 gen_op_load_fpr_DT0(DFPREG(rs1));
2261 gen_op_load_fpr_DT1(DFPREG(rs2));
2262 gen_clear_float_exceptions();
2263 gen_helper_fdivd();
2264 gen_helper_check_ieee_exceptions();
2265 gen_op_store_DT0_fpr(DFPREG(rd));
2266 break;
2267 case 0x4f: /* fdivq */
2268 CHECK_FPU_FEATURE(dc, FLOAT128);
2269 gen_op_load_fpr_QT0(QFPREG(rs1));
2270 gen_op_load_fpr_QT1(QFPREG(rs2));
2271 gen_clear_float_exceptions();
2272 gen_helper_fdivq();
2273 gen_helper_check_ieee_exceptions();
2274 gen_op_store_QT0_fpr(QFPREG(rd));
2275 break;
2276 case 0x69: /* fsmuld */
2277 CHECK_FPU_FEATURE(dc, FSMULD);
2278 gen_clear_float_exceptions();
2279 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2280 gen_helper_check_ieee_exceptions();
2281 gen_op_store_DT0_fpr(DFPREG(rd));
2282 break;
2283 case 0x6e: /* fdmulq */
2284 CHECK_FPU_FEATURE(dc, FLOAT128);
2285 gen_op_load_fpr_DT0(DFPREG(rs1));
2286 gen_op_load_fpr_DT1(DFPREG(rs2));
2287 gen_clear_float_exceptions();
2288 gen_helper_fdmulq();
2289 gen_helper_check_ieee_exceptions();
2290 gen_op_store_QT0_fpr(QFPREG(rd));
2291 break;
2292 case 0xc4: /* fitos */
2293 gen_clear_float_exceptions();
2294 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2295 gen_helper_check_ieee_exceptions();
2296 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2297 break;
2298 case 0xc6: /* fdtos */
2299 gen_op_load_fpr_DT1(DFPREG(rs2));
2300 gen_clear_float_exceptions();
2301 gen_helper_fdtos(cpu_tmp32);
2302 gen_helper_check_ieee_exceptions();
2303 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2304 break;
2305 case 0xc7: /* fqtos */
2306 CHECK_FPU_FEATURE(dc, FLOAT128);
2307 gen_op_load_fpr_QT1(QFPREG(rs2));
2308 gen_clear_float_exceptions();
2309 gen_helper_fqtos(cpu_tmp32);
2310 gen_helper_check_ieee_exceptions();
2311 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2312 break;
2313 case 0xc8: /* fitod */
2314 gen_helper_fitod(cpu_fpr[rs2]);
2315 gen_op_store_DT0_fpr(DFPREG(rd));
2316 break;
2317 case 0xc9: /* fstod */
2318 gen_helper_fstod(cpu_fpr[rs2]);
2319 gen_op_store_DT0_fpr(DFPREG(rd));
2320 break;
2321 case 0xcb: /* fqtod */
2322 CHECK_FPU_FEATURE(dc, FLOAT128);
2323 gen_op_load_fpr_QT1(QFPREG(rs2));
2324 gen_clear_float_exceptions();
2325 gen_helper_fqtod();
2326 gen_helper_check_ieee_exceptions();
2327 gen_op_store_DT0_fpr(DFPREG(rd));
2328 break;
2329 case 0xcc: /* fitoq */
2330 CHECK_FPU_FEATURE(dc, FLOAT128);
2331 gen_helper_fitoq(cpu_fpr[rs2]);
2332 gen_op_store_QT0_fpr(QFPREG(rd));
2333 break;
2334 case 0xcd: /* fstoq */
2335 CHECK_FPU_FEATURE(dc, FLOAT128);
2336 gen_helper_fstoq(cpu_fpr[rs2]);
2337 gen_op_store_QT0_fpr(QFPREG(rd));
2338 break;
2339 case 0xce: /* fdtoq */
2340 CHECK_FPU_FEATURE(dc, FLOAT128);
2341 gen_op_load_fpr_DT1(DFPREG(rs2));
2342 gen_helper_fdtoq();
2343 gen_op_store_QT0_fpr(QFPREG(rd));
2344 break;
2345 case 0xd1: /* fstoi */
2346 gen_clear_float_exceptions();
2347 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2348 gen_helper_check_ieee_exceptions();
2349 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2350 break;
2351 case 0xd2: /* fdtoi */
2352 gen_op_load_fpr_DT1(DFPREG(rs2));
2353 gen_clear_float_exceptions();
2354 gen_helper_fdtoi(cpu_tmp32);
2355 gen_helper_check_ieee_exceptions();
2356 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2357 break;
2358 case 0xd3: /* fqtoi */
2359 CHECK_FPU_FEATURE(dc, FLOAT128);
2360 gen_op_load_fpr_QT1(QFPREG(rs2));
2361 gen_clear_float_exceptions();
2362 gen_helper_fqtoi(cpu_tmp32);
2363 gen_helper_check_ieee_exceptions();
2364 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2365 break;
2366 #ifdef TARGET_SPARC64
2367 case 0x2: /* V9 fmovd */
2368 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2369 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2370 cpu_fpr[DFPREG(rs2) + 1]);
2371 break;
2372 case 0x3: /* V9 fmovq */
2373 CHECK_FPU_FEATURE(dc, FLOAT128);
2374 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2375 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2376 cpu_fpr[QFPREG(rs2) + 1]);
2377 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2378 cpu_fpr[QFPREG(rs2) + 2]);
2379 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2380 cpu_fpr[QFPREG(rs2) + 3]);
2381 break;
2382 case 0x6: /* V9 fnegd */
2383 gen_op_load_fpr_DT1(DFPREG(rs2));
2384 gen_helper_fnegd();
2385 gen_op_store_DT0_fpr(DFPREG(rd));
2386 break;
2387 case 0x7: /* V9 fnegq */
2388 CHECK_FPU_FEATURE(dc, FLOAT128);
2389 gen_op_load_fpr_QT1(QFPREG(rs2));
2390 gen_helper_fnegq();
2391 gen_op_store_QT0_fpr(QFPREG(rd));
2392 break;
2393 case 0xa: /* V9 fabsd */
2394 gen_op_load_fpr_DT1(DFPREG(rs2));
2395 gen_helper_fabsd();
2396 gen_op_store_DT0_fpr(DFPREG(rd));
2397 break;
2398 case 0xb: /* V9 fabsq */
2399 CHECK_FPU_FEATURE(dc, FLOAT128);
2400 gen_op_load_fpr_QT1(QFPREG(rs2));
2401 gen_helper_fabsq();
2402 gen_op_store_QT0_fpr(QFPREG(rd));
2403 break;
2404 case 0x81: /* V9 fstox */
2405 gen_clear_float_exceptions();
2406 gen_helper_fstox(cpu_fpr[rs2]);
2407 gen_helper_check_ieee_exceptions();
2408 gen_op_store_DT0_fpr(DFPREG(rd));
2409 break;
2410 case 0x82: /* V9 fdtox */
2411 gen_op_load_fpr_DT1(DFPREG(rs2));
2412 gen_clear_float_exceptions();
2413 gen_helper_fdtox();
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd));
2416 break;
2417 case 0x83: /* V9 fqtox */
2418 CHECK_FPU_FEATURE(dc, FLOAT128);
2419 gen_op_load_fpr_QT1(QFPREG(rs2));
2420 gen_clear_float_exceptions();
2421 gen_helper_fqtox();
2422 gen_helper_check_ieee_exceptions();
2423 gen_op_store_DT0_fpr(DFPREG(rd));
2424 break;
2425 case 0x84: /* V9 fxtos */
2426 gen_op_load_fpr_DT1(DFPREG(rs2));
2427 gen_clear_float_exceptions();
2428 gen_helper_fxtos(cpu_tmp32);
2429 gen_helper_check_ieee_exceptions();
2430 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2431 break;
2432 case 0x88: /* V9 fxtod */
2433 gen_op_load_fpr_DT1(DFPREG(rs2));
2434 gen_clear_float_exceptions();
2435 gen_helper_fxtod();
2436 gen_helper_check_ieee_exceptions();
2437 gen_op_store_DT0_fpr(DFPREG(rd));
2438 break;
2439 case 0x8c: /* V9 fxtoq */
2440 CHECK_FPU_FEATURE(dc, FLOAT128);
2441 gen_op_load_fpr_DT1(DFPREG(rs2));
2442 gen_clear_float_exceptions();
2443 gen_helper_fxtoq();
2444 gen_helper_check_ieee_exceptions();
2445 gen_op_store_QT0_fpr(QFPREG(rd));
2446 break;
2447 #endif
2448 default:
2449 goto illegal_insn;
2451 } else if (xop == 0x35) { /* FPU Operations */
2452 #ifdef TARGET_SPARC64
2453 int cond;
2454 #endif
2455 if (gen_trap_ifnofpu(dc, cpu_cond))
2456 goto jmp_insn;
2457 gen_op_clear_ieee_excp_and_FTT();
2458 rs1 = GET_FIELD(insn, 13, 17);
2459 rs2 = GET_FIELD(insn, 27, 31);
2460 xop = GET_FIELD(insn, 18, 26);
2461 #ifdef TARGET_SPARC64
2462 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2463 int l1;
2465 l1 = gen_new_label();
2466 cond = GET_FIELD_SP(insn, 14, 17);
2467 cpu_src1 = get_src1(insn, cpu_src1);
2468 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2469 0, l1);
2470 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2471 gen_set_label(l1);
2472 break;
2473 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2474 int l1;
2476 l1 = gen_new_label();
2477 cond = GET_FIELD_SP(insn, 14, 17);
2478 cpu_src1 = get_src1(insn, cpu_src1);
2479 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2480 0, l1);
2481 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2482 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2483 gen_set_label(l1);
2484 break;
2485 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2486 int l1;
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 l1 = gen_new_label();
2490 cond = GET_FIELD_SP(insn, 14, 17);
2491 cpu_src1 = get_src1(insn, cpu_src1);
2492 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2493 0, l1);
2494 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2495 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2496 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2497 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2498 gen_set_label(l1);
2499 break;
2501 #endif
2502 switch (xop) {
2503 #ifdef TARGET_SPARC64
2504 #define FMOVSCC(fcc) \
2506 TCGv r_cond; \
2507 int l1; \
2509 l1 = gen_new_label(); \
2510 r_cond = tcg_temp_new(); \
2511 cond = GET_FIELD_SP(insn, 14, 17); \
2512 gen_fcond(r_cond, fcc, cond); \
2513 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2514 0, l1); \
2515 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2516 gen_set_label(l1); \
2517 tcg_temp_free(r_cond); \
2519 #define FMOVDCC(fcc) \
2521 TCGv r_cond; \
2522 int l1; \
2524 l1 = gen_new_label(); \
2525 r_cond = tcg_temp_new(); \
2526 cond = GET_FIELD_SP(insn, 14, 17); \
2527 gen_fcond(r_cond, fcc, cond); \
2528 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2529 0, l1); \
2530 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2531 cpu_fpr[DFPREG(rs2)]); \
2532 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2533 cpu_fpr[DFPREG(rs2) + 1]); \
2534 gen_set_label(l1); \
2535 tcg_temp_free(r_cond); \
2537 #define FMOVQCC(fcc) \
2539 TCGv r_cond; \
2540 int l1; \
2542 l1 = gen_new_label(); \
2543 r_cond = tcg_temp_new(); \
2544 cond = GET_FIELD_SP(insn, 14, 17); \
2545 gen_fcond(r_cond, fcc, cond); \
2546 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2547 0, l1); \
2548 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2549 cpu_fpr[QFPREG(rs2)]); \
2550 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2551 cpu_fpr[QFPREG(rs2) + 1]); \
2552 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2553 cpu_fpr[QFPREG(rs2) + 2]); \
2554 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2555 cpu_fpr[QFPREG(rs2) + 3]); \
2556 gen_set_label(l1); \
2557 tcg_temp_free(r_cond); \
2559 case 0x001: /* V9 fmovscc %fcc0 */
2560 FMOVSCC(0);
2561 break;
2562 case 0x002: /* V9 fmovdcc %fcc0 */
2563 FMOVDCC(0);
2564 break;
2565 case 0x003: /* V9 fmovqcc %fcc0 */
2566 CHECK_FPU_FEATURE(dc, FLOAT128);
2567 FMOVQCC(0);
2568 break;
2569 case 0x041: /* V9 fmovscc %fcc1 */
2570 FMOVSCC(1);
2571 break;
2572 case 0x042: /* V9 fmovdcc %fcc1 */
2573 FMOVDCC(1);
2574 break;
2575 case 0x043: /* V9 fmovqcc %fcc1 */
2576 CHECK_FPU_FEATURE(dc, FLOAT128);
2577 FMOVQCC(1);
2578 break;
2579 case 0x081: /* V9 fmovscc %fcc2 */
2580 FMOVSCC(2);
2581 break;
2582 case 0x082: /* V9 fmovdcc %fcc2 */
2583 FMOVDCC(2);
2584 break;
2585 case 0x083: /* V9 fmovqcc %fcc2 */
2586 CHECK_FPU_FEATURE(dc, FLOAT128);
2587 FMOVQCC(2);
2588 break;
2589 case 0x0c1: /* V9 fmovscc %fcc3 */
2590 FMOVSCC(3);
2591 break;
2592 case 0x0c2: /* V9 fmovdcc %fcc3 */
2593 FMOVDCC(3);
2594 break;
2595 case 0x0c3: /* V9 fmovqcc %fcc3 */
2596 CHECK_FPU_FEATURE(dc, FLOAT128);
2597 FMOVQCC(3);
2598 break;
2599 #undef FMOVSCC
2600 #undef FMOVDCC
2601 #undef FMOVQCC
2602 #define FMOVSCC(icc) \
2604 TCGv r_cond; \
2605 int l1; \
2607 l1 = gen_new_label(); \
2608 r_cond = tcg_temp_new(); \
2609 cond = GET_FIELD_SP(insn, 14, 17); \
2610 gen_cond(r_cond, icc, cond, dc); \
2611 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2612 0, l1); \
2613 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2614 gen_set_label(l1); \
2615 tcg_temp_free(r_cond); \
2617 #define FMOVDCC(icc) \
2619 TCGv r_cond; \
2620 int l1; \
2622 l1 = gen_new_label(); \
2623 r_cond = tcg_temp_new(); \
2624 cond = GET_FIELD_SP(insn, 14, 17); \
2625 gen_cond(r_cond, icc, cond, dc); \
2626 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2627 0, l1); \
2628 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2629 cpu_fpr[DFPREG(rs2)]); \
2630 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2631 cpu_fpr[DFPREG(rs2) + 1]); \
2632 gen_set_label(l1); \
2633 tcg_temp_free(r_cond); \
2635 #define FMOVQCC(icc) \
2637 TCGv r_cond; \
2638 int l1; \
2640 l1 = gen_new_label(); \
2641 r_cond = tcg_temp_new(); \
2642 cond = GET_FIELD_SP(insn, 14, 17); \
2643 gen_cond(r_cond, icc, cond, dc); \
2644 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2645 0, l1); \
2646 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2647 cpu_fpr[QFPREG(rs2)]); \
2648 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2649 cpu_fpr[QFPREG(rs2) + 1]); \
2650 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2651 cpu_fpr[QFPREG(rs2) + 2]); \
2652 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2653 cpu_fpr[QFPREG(rs2) + 3]); \
2654 gen_set_label(l1); \
2655 tcg_temp_free(r_cond); \
2658 case 0x101: /* V9 fmovscc %icc */
2659 FMOVSCC(0);
2660 break;
2661 case 0x102: /* V9 fmovdcc %icc */
2662 FMOVDCC(0);
2663 case 0x103: /* V9 fmovqcc %icc */
2664 CHECK_FPU_FEATURE(dc, FLOAT128);
2665 FMOVQCC(0);
2666 break;
2667 case 0x181: /* V9 fmovscc %xcc */
2668 FMOVSCC(1);
2669 break;
2670 case 0x182: /* V9 fmovdcc %xcc */
2671 FMOVDCC(1);
2672 break;
2673 case 0x183: /* V9 fmovqcc %xcc */
2674 CHECK_FPU_FEATURE(dc, FLOAT128);
2675 FMOVQCC(1);
2676 break;
2677 #undef FMOVSCC
2678 #undef FMOVDCC
2679 #undef FMOVQCC
2680 #endif
2681 case 0x51: /* fcmps, V9 %fcc */
2682 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2683 break;
2684 case 0x52: /* fcmpd, V9 %fcc */
2685 gen_op_load_fpr_DT0(DFPREG(rs1));
2686 gen_op_load_fpr_DT1(DFPREG(rs2));
2687 gen_op_fcmpd(rd & 3);
2688 break;
2689 case 0x53: /* fcmpq, V9 %fcc */
2690 CHECK_FPU_FEATURE(dc, FLOAT128);
2691 gen_op_load_fpr_QT0(QFPREG(rs1));
2692 gen_op_load_fpr_QT1(QFPREG(rs2));
2693 gen_op_fcmpq(rd & 3);
2694 break;
2695 case 0x55: /* fcmpes, V9 %fcc */
2696 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2697 break;
2698 case 0x56: /* fcmped, V9 %fcc */
2699 gen_op_load_fpr_DT0(DFPREG(rs1));
2700 gen_op_load_fpr_DT1(DFPREG(rs2));
2701 gen_op_fcmped(rd & 3);
2702 break;
2703 case 0x57: /* fcmpeq, V9 %fcc */
2704 CHECK_FPU_FEATURE(dc, FLOAT128);
2705 gen_op_load_fpr_QT0(QFPREG(rs1));
2706 gen_op_load_fpr_QT1(QFPREG(rs2));
2707 gen_op_fcmpeq(rd & 3);
2708 break;
2709 default:
2710 goto illegal_insn;
2712 } else if (xop == 0x2) {
2713 // clr/mov shortcut
2715 rs1 = GET_FIELD(insn, 13, 17);
2716 if (rs1 == 0) {
2717 // or %g0, x, y -> mov T0, x; mov y, T0
2718 if (IS_IMM) { /* immediate */
2719 TCGv r_const;
2721 simm = GET_FIELDs(insn, 19, 31);
2722 r_const = tcg_const_tl(simm);
2723 gen_movl_TN_reg(rd, r_const);
2724 tcg_temp_free(r_const);
2725 } else { /* register */
2726 rs2 = GET_FIELD(insn, 27, 31);
2727 gen_movl_reg_TN(rs2, cpu_dst);
2728 gen_movl_TN_reg(rd, cpu_dst);
2730 } else {
2731 cpu_src1 = get_src1(insn, cpu_src1);
2732 if (IS_IMM) { /* immediate */
2733 simm = GET_FIELDs(insn, 19, 31);
2734 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2735 gen_movl_TN_reg(rd, cpu_dst);
2736 } else { /* register */
2737 // or x, %g0, y -> mov T1, x; mov y, T1
2738 rs2 = GET_FIELD(insn, 27, 31);
2739 if (rs2 != 0) {
2740 gen_movl_reg_TN(rs2, cpu_src2);
2741 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2742 gen_movl_TN_reg(rd, cpu_dst);
2743 } else
2744 gen_movl_TN_reg(rd, cpu_src1);
2747 #ifdef TARGET_SPARC64
2748 } else if (xop == 0x25) { /* sll, V9 sllx */
2749 cpu_src1 = get_src1(insn, cpu_src1);
2750 if (IS_IMM) { /* immediate */
2751 simm = GET_FIELDs(insn, 20, 31);
2752 if (insn & (1 << 12)) {
2753 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2754 } else {
2755 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2757 } else { /* register */
2758 rs2 = GET_FIELD(insn, 27, 31);
2759 gen_movl_reg_TN(rs2, cpu_src2);
2760 if (insn & (1 << 12)) {
2761 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2762 } else {
2763 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2765 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2767 gen_movl_TN_reg(rd, cpu_dst);
2768 } else if (xop == 0x26) { /* srl, V9 srlx */
2769 cpu_src1 = get_src1(insn, cpu_src1);
2770 if (IS_IMM) { /* immediate */
2771 simm = GET_FIELDs(insn, 20, 31);
2772 if (insn & (1 << 12)) {
2773 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2774 } else {
2775 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2776 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2778 } else { /* register */
2779 rs2 = GET_FIELD(insn, 27, 31);
2780 gen_movl_reg_TN(rs2, cpu_src2);
2781 if (insn & (1 << 12)) {
2782 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2783 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2784 } else {
2785 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2786 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2787 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2790 gen_movl_TN_reg(rd, cpu_dst);
2791 } else if (xop == 0x27) { /* sra, V9 srax */
2792 cpu_src1 = get_src1(insn, cpu_src1);
2793 if (IS_IMM) { /* immediate */
2794 simm = GET_FIELDs(insn, 20, 31);
2795 if (insn & (1 << 12)) {
2796 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2797 } else {
2798 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2799 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2800 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2802 } else { /* register */
2803 rs2 = GET_FIELD(insn, 27, 31);
2804 gen_movl_reg_TN(rs2, cpu_src2);
2805 if (insn & (1 << 12)) {
2806 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2807 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2808 } else {
2809 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2810 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2811 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2812 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2815 gen_movl_TN_reg(rd, cpu_dst);
2816 #endif
2817 } else if (xop < 0x36) {
2818 if (xop < 0x20) {
2819 cpu_src1 = get_src1(insn, cpu_src1);
2820 cpu_src2 = get_src2(insn, cpu_src2);
2821 switch (xop & ~0x10) {
2822 case 0x0: /* add */
2823 if (IS_IMM) {
2824 simm = GET_FIELDs(insn, 19, 31);
2825 if (xop & 0x10) {
2826 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2827 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2828 dc->cc_op = CC_OP_ADD;
2829 } else {
2830 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2832 } else {
2833 if (xop & 0x10) {
2834 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2835 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2836 dc->cc_op = CC_OP_ADD;
2837 } else {
2838 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2841 break;
2842 case 0x1: /* and */
2843 if (IS_IMM) {
2844 simm = GET_FIELDs(insn, 19, 31);
2845 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2846 } else {
2847 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2849 if (xop & 0x10) {
2850 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2851 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2852 dc->cc_op = CC_OP_LOGIC;
2854 break;
2855 case 0x2: /* or */
2856 if (IS_IMM) {
2857 simm = GET_FIELDs(insn, 19, 31);
2858 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2859 } else {
2860 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2862 if (xop & 0x10) {
2863 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2864 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2865 dc->cc_op = CC_OP_LOGIC;
2867 break;
2868 case 0x3: /* xor */
2869 if (IS_IMM) {
2870 simm = GET_FIELDs(insn, 19, 31);
2871 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2872 } else {
2873 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2875 if (xop & 0x10) {
2876 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2877 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2878 dc->cc_op = CC_OP_LOGIC;
2880 break;
2881 case 0x4: /* sub */
2882 if (IS_IMM) {
2883 simm = GET_FIELDs(insn, 19, 31);
2884 if (xop & 0x10) {
2885 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2886 } else {
2887 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2889 } else {
2890 if (xop & 0x10) {
2891 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2892 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2893 dc->cc_op = CC_OP_SUB;
2894 } else {
2895 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2898 break;
2899 case 0x5: /* andn */
2900 if (IS_IMM) {
2901 simm = GET_FIELDs(insn, 19, 31);
2902 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2903 } else {
2904 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2906 if (xop & 0x10) {
2907 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2908 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2909 dc->cc_op = CC_OP_LOGIC;
2911 break;
2912 case 0x6: /* orn */
2913 if (IS_IMM) {
2914 simm = GET_FIELDs(insn, 19, 31);
2915 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2916 } else {
2917 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2919 if (xop & 0x10) {
2920 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2921 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2922 dc->cc_op = CC_OP_LOGIC;
2924 break;
2925 case 0x7: /* xorn */
2926 if (IS_IMM) {
2927 simm = GET_FIELDs(insn, 19, 31);
2928 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2929 } else {
2930 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2931 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2933 if (xop & 0x10) {
2934 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2935 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2936 dc->cc_op = CC_OP_LOGIC;
2938 break;
2939 case 0x8: /* addx, V9 addc */
2940 if (IS_IMM) {
2941 simm = GET_FIELDs(insn, 19, 31);
2942 if (xop & 0x10) {
2943 gen_helper_compute_psr();
2944 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2945 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2946 dc->cc_op = CC_OP_ADDX;
2947 } else {
2948 gen_helper_compute_psr();
2949 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2950 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2951 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2953 } else {
2954 if (xop & 0x10) {
2955 gen_helper_compute_psr();
2956 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2957 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2958 dc->cc_op = CC_OP_ADDX;
2959 } else {
2960 gen_helper_compute_psr();
2961 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2962 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2963 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2966 break;
2967 #ifdef TARGET_SPARC64
2968 case 0x9: /* V9 mulx */
2969 if (IS_IMM) {
2970 simm = GET_FIELDs(insn, 19, 31);
2971 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2972 } else {
2973 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2975 break;
2976 #endif
2977 case 0xa: /* umul */
2978 CHECK_IU_FEATURE(dc, MUL);
2979 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2980 if (xop & 0x10) {
2981 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2982 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2983 dc->cc_op = CC_OP_LOGIC;
2985 break;
2986 case 0xb: /* smul */
2987 CHECK_IU_FEATURE(dc, MUL);
2988 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2989 if (xop & 0x10) {
2990 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2991 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2992 dc->cc_op = CC_OP_LOGIC;
2994 break;
2995 case 0xc: /* subx, V9 subc */
2996 if (IS_IMM) {
2997 simm = GET_FIELDs(insn, 19, 31);
2998 if (xop & 0x10) {
2999 gen_helper_compute_psr();
3000 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3001 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3002 dc->cc_op = CC_OP_SUBX;
3003 } else {
3004 gen_helper_compute_psr();
3005 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3006 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3007 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3009 } else {
3010 if (xop & 0x10) {
3011 gen_helper_compute_psr();
3012 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3013 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3014 dc->cc_op = CC_OP_SUBX;
3015 } else {
3016 gen_helper_compute_psr();
3017 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3018 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3019 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3022 break;
3023 #ifdef TARGET_SPARC64
3024 case 0xd: /* V9 udivx */
3025 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3026 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3027 gen_trap_ifdivzero_tl(cpu_cc_src2);
3028 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3029 break;
3030 #endif
3031 case 0xe: /* udiv */
3032 CHECK_IU_FEATURE(dc, DIV);
3033 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3034 if (xop & 0x10) {
3035 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3036 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3037 dc->cc_op = CC_OP_DIV;
3039 break;
3040 case 0xf: /* sdiv */
3041 CHECK_IU_FEATURE(dc, DIV);
3042 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3043 if (xop & 0x10) {
3044 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3045 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3046 dc->cc_op = CC_OP_DIV;
3048 break;
3049 default:
3050 goto illegal_insn;
3052 gen_movl_TN_reg(rd, cpu_dst);
3053 } else {
3054 cpu_src1 = get_src1(insn, cpu_src1);
3055 cpu_src2 = get_src2(insn, cpu_src2);
3056 switch (xop) {
3057 case 0x20: /* taddcc */
3058 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3059 gen_movl_TN_reg(rd, cpu_dst);
3060 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3061 dc->cc_op = CC_OP_TADD;
3062 break;
3063 case 0x21: /* tsubcc */
3064 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3065 gen_movl_TN_reg(rd, cpu_dst);
3066 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3067 dc->cc_op = CC_OP_TSUB;
3068 break;
3069 case 0x22: /* taddcctv */
3070 save_state(dc, cpu_cond);
3071 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3072 gen_movl_TN_reg(rd, cpu_dst);
3073 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3074 dc->cc_op = CC_OP_TADDTV;
3075 break;
3076 case 0x23: /* tsubcctv */
3077 save_state(dc, cpu_cond);
3078 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3079 gen_movl_TN_reg(rd, cpu_dst);
3080 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3081 dc->cc_op = CC_OP_TSUBTV;
3082 break;
3083 case 0x24: /* mulscc */
3084 gen_helper_compute_psr();
3085 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3086 gen_movl_TN_reg(rd, cpu_dst);
3087 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3088 dc->cc_op = CC_OP_ADD;
3089 break;
3090 #ifndef TARGET_SPARC64
3091 case 0x25: /* sll */
3092 if (IS_IMM) { /* immediate */
3093 simm = GET_FIELDs(insn, 20, 31);
3094 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3095 } else { /* register */
3096 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3097 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3099 gen_movl_TN_reg(rd, cpu_dst);
3100 break;
3101 case 0x26: /* srl */
3102 if (IS_IMM) { /* immediate */
3103 simm = GET_FIELDs(insn, 20, 31);
3104 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3105 } else { /* register */
3106 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3107 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3109 gen_movl_TN_reg(rd, cpu_dst);
3110 break;
3111 case 0x27: /* sra */
3112 if (IS_IMM) { /* immediate */
3113 simm = GET_FIELDs(insn, 20, 31);
3114 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3115 } else { /* register */
3116 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3117 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3119 gen_movl_TN_reg(rd, cpu_dst);
3120 break;
3121 #endif
3122 case 0x30:
3124 switch(rd) {
3125 case 0: /* wry */
3126 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3127 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3128 break;
3129 #ifndef TARGET_SPARC64
3130 case 0x01 ... 0x0f: /* undefined in the
3131 SPARCv8 manual, nop
3132 on the microSPARC
3133 II */
3134 case 0x10 ... 0x1f: /* implementation-dependent
3135 in the SPARCv8
3136 manual, nop on the
3137 microSPARC II */
3138 break;
3139 #else
3140 case 0x2: /* V9 wrccr */
3141 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3142 gen_helper_wrccr(cpu_dst);
3143 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3144 dc->cc_op = CC_OP_FLAGS;
3145 break;
3146 case 0x3: /* V9 wrasi */
3147 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3148 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3149 break;
3150 case 0x6: /* V9 wrfprs */
3151 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3152 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3153 save_state(dc, cpu_cond);
3154 gen_op_next_insn();
3155 tcg_gen_exit_tb(0);
3156 dc->is_br = 1;
3157 break;
3158 case 0xf: /* V9 sir, nop if user */
3159 #if !defined(CONFIG_USER_ONLY)
3160 if (supervisor(dc))
3161 ; // XXX
3162 #endif
3163 break;
3164 case 0x13: /* Graphics Status */
3165 if (gen_trap_ifnofpu(dc, cpu_cond))
3166 goto jmp_insn;
3167 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3168 break;
3169 case 0x14: /* Softint set */
3170 if (!supervisor(dc))
3171 goto illegal_insn;
3172 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3173 gen_helper_set_softint(cpu_tmp64);
3174 break;
3175 case 0x15: /* Softint clear */
3176 if (!supervisor(dc))
3177 goto illegal_insn;
3178 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3179 gen_helper_clear_softint(cpu_tmp64);
3180 break;
3181 case 0x16: /* Softint write */
3182 if (!supervisor(dc))
3183 goto illegal_insn;
3184 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3185 gen_helper_write_softint(cpu_tmp64);
3186 break;
3187 case 0x17: /* Tick compare */
3188 #if !defined(CONFIG_USER_ONLY)
3189 if (!supervisor(dc))
3190 goto illegal_insn;
3191 #endif
3193 TCGv_ptr r_tickptr;
3195 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3196 cpu_src2);
3197 r_tickptr = tcg_temp_new_ptr();
3198 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3199 offsetof(CPUState, tick));
3200 gen_helper_tick_set_limit(r_tickptr,
3201 cpu_tick_cmpr);
3202 tcg_temp_free_ptr(r_tickptr);
3204 break;
3205 case 0x18: /* System tick */
3206 #if !defined(CONFIG_USER_ONLY)
3207 if (!supervisor(dc))
3208 goto illegal_insn;
3209 #endif
3211 TCGv_ptr r_tickptr;
3213 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3214 cpu_src2);
3215 r_tickptr = tcg_temp_new_ptr();
3216 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3217 offsetof(CPUState, stick));
3218 gen_helper_tick_set_count(r_tickptr,
3219 cpu_dst);
3220 tcg_temp_free_ptr(r_tickptr);
3222 break;
3223 case 0x19: /* System tick compare */
3224 #if !defined(CONFIG_USER_ONLY)
3225 if (!supervisor(dc))
3226 goto illegal_insn;
3227 #endif
3229 TCGv_ptr r_tickptr;
3231 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3232 cpu_src2);
3233 r_tickptr = tcg_temp_new_ptr();
3234 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3235 offsetof(CPUState, stick));
3236 gen_helper_tick_set_limit(r_tickptr,
3237 cpu_stick_cmpr);
3238 tcg_temp_free_ptr(r_tickptr);
3240 break;
3242 case 0x10: /* Performance Control */
3243 case 0x11: /* Performance Instrumentation
3244 Counter */
3245 case 0x12: /* Dispatch Control */
3246 #endif
3247 default:
3248 goto illegal_insn;
3251 break;
3252 #if !defined(CONFIG_USER_ONLY)
3253 case 0x31: /* wrpsr, V9 saved, restored */
3255 if (!supervisor(dc))
3256 goto priv_insn;
3257 #ifdef TARGET_SPARC64
3258 switch (rd) {
3259 case 0:
3260 gen_helper_saved();
3261 break;
3262 case 1:
3263 gen_helper_restored();
3264 break;
3265 case 2: /* UA2005 allclean */
3266 case 3: /* UA2005 otherw */
3267 case 4: /* UA2005 normalw */
3268 case 5: /* UA2005 invalw */
3269 // XXX
3270 default:
3271 goto illegal_insn;
3273 #else
3274 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3275 gen_helper_wrpsr(cpu_dst);
3276 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3277 dc->cc_op = CC_OP_FLAGS;
3278 save_state(dc, cpu_cond);
3279 gen_op_next_insn();
3280 tcg_gen_exit_tb(0);
3281 dc->is_br = 1;
3282 #endif
3284 break;
3285 case 0x32: /* wrwim, V9 wrpr */
3287 if (!supervisor(dc))
3288 goto priv_insn;
3289 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3290 #ifdef TARGET_SPARC64
3291 switch (rd) {
3292 case 0: // tpc
3294 TCGv_ptr r_tsptr;
3296 r_tsptr = tcg_temp_new_ptr();
3297 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3298 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3299 offsetof(trap_state, tpc));
3300 tcg_temp_free_ptr(r_tsptr);
3302 break;
3303 case 1: // tnpc
3305 TCGv_ptr r_tsptr;
3307 r_tsptr = tcg_temp_new_ptr();
3308 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3309 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3310 offsetof(trap_state, tnpc));
3311 tcg_temp_free_ptr(r_tsptr);
3313 break;
3314 case 2: // tstate
3316 TCGv_ptr r_tsptr;
3318 r_tsptr = tcg_temp_new_ptr();
3319 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3320 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3321 offsetof(trap_state,
3322 tstate));
3323 tcg_temp_free_ptr(r_tsptr);
3325 break;
3326 case 3: // tt
3328 TCGv_ptr r_tsptr;
3330 r_tsptr = tcg_temp_new_ptr();
3331 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3332 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3333 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3334 offsetof(trap_state, tt));
3335 tcg_temp_free_ptr(r_tsptr);
3337 break;
3338 case 4: // tick
3340 TCGv_ptr r_tickptr;
3342 r_tickptr = tcg_temp_new_ptr();
3343 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3344 offsetof(CPUState, tick));
3345 gen_helper_tick_set_count(r_tickptr,
3346 cpu_tmp0);
3347 tcg_temp_free_ptr(r_tickptr);
3349 break;
3350 case 5: // tba
3351 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3352 break;
3353 case 6: // pstate
3354 save_state(dc, cpu_cond);
3355 gen_helper_wrpstate(cpu_tmp0);
3356 gen_op_next_insn();
3357 tcg_gen_exit_tb(0);
3358 dc->is_br = 1;
3359 break;
3360 case 7: // tl
3361 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3362 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3363 offsetof(CPUSPARCState, tl));
3364 break;
3365 case 8: // pil
3366 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3367 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3368 offsetof(CPUSPARCState,
3369 psrpil));
3370 break;
3371 case 9: // cwp
3372 gen_helper_wrcwp(cpu_tmp0);
3373 break;
3374 case 10: // cansave
3375 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3376 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3377 offsetof(CPUSPARCState,
3378 cansave));
3379 break;
3380 case 11: // canrestore
3381 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3382 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3383 offsetof(CPUSPARCState,
3384 canrestore));
3385 break;
3386 case 12: // cleanwin
3387 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3388 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3389 offsetof(CPUSPARCState,
3390 cleanwin));
3391 break;
3392 case 13: // otherwin
3393 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3394 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3395 offsetof(CPUSPARCState,
3396 otherwin));
3397 break;
3398 case 14: // wstate
3399 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3400 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3401 offsetof(CPUSPARCState,
3402 wstate));
3403 break;
3404 case 16: // UA2005 gl
3405 CHECK_IU_FEATURE(dc, GL);
3406 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3407 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3408 offsetof(CPUSPARCState, gl));
3409 break;
3410 case 26: // UA2005 strand status
3411 CHECK_IU_FEATURE(dc, HYPV);
3412 if (!hypervisor(dc))
3413 goto priv_insn;
3414 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3415 break;
3416 default:
3417 goto illegal_insn;
3419 #else
3420 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3421 if (dc->def->nwindows != 32)
3422 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3423 (1 << dc->def->nwindows) - 1);
3424 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3425 #endif
3427 break;
3428 case 0x33: /* wrtbr, UA2005 wrhpr */
3430 #ifndef TARGET_SPARC64
3431 if (!supervisor(dc))
3432 goto priv_insn;
3433 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3434 #else
3435 CHECK_IU_FEATURE(dc, HYPV);
3436 if (!hypervisor(dc))
3437 goto priv_insn;
3438 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3439 switch (rd) {
3440 case 0: // hpstate
3441 // XXX gen_op_wrhpstate();
3442 save_state(dc, cpu_cond);
3443 gen_op_next_insn();
3444 tcg_gen_exit_tb(0);
3445 dc->is_br = 1;
3446 break;
3447 case 1: // htstate
3448 // XXX gen_op_wrhtstate();
3449 break;
3450 case 3: // hintp
3451 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3452 break;
3453 case 5: // htba
3454 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3455 break;
3456 case 31: // hstick_cmpr
3458 TCGv_ptr r_tickptr;
3460 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3461 r_tickptr = tcg_temp_new_ptr();
3462 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3463 offsetof(CPUState, hstick));
3464 gen_helper_tick_set_limit(r_tickptr,
3465 cpu_hstick_cmpr);
3466 tcg_temp_free_ptr(r_tickptr);
3468 break;
3469 case 6: // hver readonly
3470 default:
3471 goto illegal_insn;
3473 #endif
3475 break;
3476 #endif
3477 #ifdef TARGET_SPARC64
3478 case 0x2c: /* V9 movcc */
3480 int cc = GET_FIELD_SP(insn, 11, 12);
3481 int cond = GET_FIELD_SP(insn, 14, 17);
3482 TCGv r_cond;
3483 int l1;
3485 r_cond = tcg_temp_new();
3486 if (insn & (1 << 18)) {
3487 if (cc == 0)
3488 gen_cond(r_cond, 0, cond, dc);
3489 else if (cc == 2)
3490 gen_cond(r_cond, 1, cond, dc);
3491 else
3492 goto illegal_insn;
3493 } else {
3494 gen_fcond(r_cond, cc, cond);
3497 l1 = gen_new_label();
3499 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3500 if (IS_IMM) { /* immediate */
3501 TCGv r_const;
3503 simm = GET_FIELD_SPs(insn, 0, 10);
3504 r_const = tcg_const_tl(simm);
3505 gen_movl_TN_reg(rd, r_const);
3506 tcg_temp_free(r_const);
3507 } else {
3508 rs2 = GET_FIELD_SP(insn, 0, 4);
3509 gen_movl_reg_TN(rs2, cpu_tmp0);
3510 gen_movl_TN_reg(rd, cpu_tmp0);
3512 gen_set_label(l1);
3513 tcg_temp_free(r_cond);
3514 break;
3516 case 0x2d: /* V9 sdivx */
3517 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3518 gen_movl_TN_reg(rd, cpu_dst);
3519 break;
3520 case 0x2e: /* V9 popc */
3522 cpu_src2 = get_src2(insn, cpu_src2);
3523 gen_helper_popc(cpu_dst, cpu_src2);
3524 gen_movl_TN_reg(rd, cpu_dst);
3526 case 0x2f: /* V9 movr */
3528 int cond = GET_FIELD_SP(insn, 10, 12);
3529 int l1;
3531 cpu_src1 = get_src1(insn, cpu_src1);
3533 l1 = gen_new_label();
3535 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3536 cpu_src1, 0, l1);
3537 if (IS_IMM) { /* immediate */
3538 TCGv r_const;
3540 simm = GET_FIELD_SPs(insn, 0, 9);
3541 r_const = tcg_const_tl(simm);
3542 gen_movl_TN_reg(rd, r_const);
3543 tcg_temp_free(r_const);
3544 } else {
3545 rs2 = GET_FIELD_SP(insn, 0, 4);
3546 gen_movl_reg_TN(rs2, cpu_tmp0);
3547 gen_movl_TN_reg(rd, cpu_tmp0);
3549 gen_set_label(l1);
3550 break;
3552 #endif
3553 default:
3554 goto illegal_insn;
3557 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3558 #ifdef TARGET_SPARC64
3559 int opf = GET_FIELD_SP(insn, 5, 13);
3560 rs1 = GET_FIELD(insn, 13, 17);
3561 rs2 = GET_FIELD(insn, 27, 31);
3562 if (gen_trap_ifnofpu(dc, cpu_cond))
3563 goto jmp_insn;
3565 switch (opf) {
3566 case 0x000: /* VIS I edge8cc */
3567 case 0x001: /* VIS II edge8n */
3568 case 0x002: /* VIS I edge8lcc */
3569 case 0x003: /* VIS II edge8ln */
3570 case 0x004: /* VIS I edge16cc */
3571 case 0x005: /* VIS II edge16n */
3572 case 0x006: /* VIS I edge16lcc */
3573 case 0x007: /* VIS II edge16ln */
3574 case 0x008: /* VIS I edge32cc */
3575 case 0x009: /* VIS II edge32n */
3576 case 0x00a: /* VIS I edge32lcc */
3577 case 0x00b: /* VIS II edge32ln */
3578 // XXX
3579 goto illegal_insn;
3580 case 0x010: /* VIS I array8 */
3581 CHECK_FPU_FEATURE(dc, VIS1);
3582 cpu_src1 = get_src1(insn, cpu_src1);
3583 gen_movl_reg_TN(rs2, cpu_src2);
3584 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3585 gen_movl_TN_reg(rd, cpu_dst);
3586 break;
3587 case 0x012: /* VIS I array16 */
3588 CHECK_FPU_FEATURE(dc, VIS1);
3589 cpu_src1 = get_src1(insn, cpu_src1);
3590 gen_movl_reg_TN(rs2, cpu_src2);
3591 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3592 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3593 gen_movl_TN_reg(rd, cpu_dst);
3594 break;
3595 case 0x014: /* VIS I array32 */
3596 CHECK_FPU_FEATURE(dc, VIS1);
3597 cpu_src1 = get_src1(insn, cpu_src1);
3598 gen_movl_reg_TN(rs2, cpu_src2);
3599 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3600 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3601 gen_movl_TN_reg(rd, cpu_dst);
3602 break;
3603 case 0x018: /* VIS I alignaddr */
3604 CHECK_FPU_FEATURE(dc, VIS1);
3605 cpu_src1 = get_src1(insn, cpu_src1);
3606 gen_movl_reg_TN(rs2, cpu_src2);
3607 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3608 gen_movl_TN_reg(rd, cpu_dst);
3609 break;
3610 case 0x019: /* VIS II bmask */
3611 case 0x01a: /* VIS I alignaddrl */
3612 // XXX
3613 goto illegal_insn;
3614 case 0x020: /* VIS I fcmple16 */
3615 CHECK_FPU_FEATURE(dc, VIS1);
3616 gen_op_load_fpr_DT0(DFPREG(rs1));
3617 gen_op_load_fpr_DT1(DFPREG(rs2));
3618 gen_helper_fcmple16();
3619 gen_op_store_DT0_fpr(DFPREG(rd));
3620 break;
3621 case 0x022: /* VIS I fcmpne16 */
3622 CHECK_FPU_FEATURE(dc, VIS1);
3623 gen_op_load_fpr_DT0(DFPREG(rs1));
3624 gen_op_load_fpr_DT1(DFPREG(rs2));
3625 gen_helper_fcmpne16();
3626 gen_op_store_DT0_fpr(DFPREG(rd));
3627 break;
3628 case 0x024: /* VIS I fcmple32 */
3629 CHECK_FPU_FEATURE(dc, VIS1);
3630 gen_op_load_fpr_DT0(DFPREG(rs1));
3631 gen_op_load_fpr_DT1(DFPREG(rs2));
3632 gen_helper_fcmple32();
3633 gen_op_store_DT0_fpr(DFPREG(rd));
3634 break;
3635 case 0x026: /* VIS I fcmpne32 */
3636 CHECK_FPU_FEATURE(dc, VIS1);
3637 gen_op_load_fpr_DT0(DFPREG(rs1));
3638 gen_op_load_fpr_DT1(DFPREG(rs2));
3639 gen_helper_fcmpne32();
3640 gen_op_store_DT0_fpr(DFPREG(rd));
3641 break;
3642 case 0x028: /* VIS I fcmpgt16 */
3643 CHECK_FPU_FEATURE(dc, VIS1);
3644 gen_op_load_fpr_DT0(DFPREG(rs1));
3645 gen_op_load_fpr_DT1(DFPREG(rs2));
3646 gen_helper_fcmpgt16();
3647 gen_op_store_DT0_fpr(DFPREG(rd));
3648 break;
3649 case 0x02a: /* VIS I fcmpeq16 */
3650 CHECK_FPU_FEATURE(dc, VIS1);
3651 gen_op_load_fpr_DT0(DFPREG(rs1));
3652 gen_op_load_fpr_DT1(DFPREG(rs2));
3653 gen_helper_fcmpeq16();
3654 gen_op_store_DT0_fpr(DFPREG(rd));
3655 break;
3656 case 0x02c: /* VIS I fcmpgt32 */
3657 CHECK_FPU_FEATURE(dc, VIS1);
3658 gen_op_load_fpr_DT0(DFPREG(rs1));
3659 gen_op_load_fpr_DT1(DFPREG(rs2));
3660 gen_helper_fcmpgt32();
3661 gen_op_store_DT0_fpr(DFPREG(rd));
3662 break;
3663 case 0x02e: /* VIS I fcmpeq32 */
3664 CHECK_FPU_FEATURE(dc, VIS1);
3665 gen_op_load_fpr_DT0(DFPREG(rs1));
3666 gen_op_load_fpr_DT1(DFPREG(rs2));
3667 gen_helper_fcmpeq32();
3668 gen_op_store_DT0_fpr(DFPREG(rd));
3669 break;
3670 case 0x031: /* VIS I fmul8x16 */
3671 CHECK_FPU_FEATURE(dc, VIS1);
3672 gen_op_load_fpr_DT0(DFPREG(rs1));
3673 gen_op_load_fpr_DT1(DFPREG(rs2));
3674 gen_helper_fmul8x16();
3675 gen_op_store_DT0_fpr(DFPREG(rd));
3676 break;
3677 case 0x033: /* VIS I fmul8x16au */
3678 CHECK_FPU_FEATURE(dc, VIS1);
3679 gen_op_load_fpr_DT0(DFPREG(rs1));
3680 gen_op_load_fpr_DT1(DFPREG(rs2));
3681 gen_helper_fmul8x16au();
3682 gen_op_store_DT0_fpr(DFPREG(rd));
3683 break;
3684 case 0x035: /* VIS I fmul8x16al */
3685 CHECK_FPU_FEATURE(dc, VIS1);
3686 gen_op_load_fpr_DT0(DFPREG(rs1));
3687 gen_op_load_fpr_DT1(DFPREG(rs2));
3688 gen_helper_fmul8x16al();
3689 gen_op_store_DT0_fpr(DFPREG(rd));
3690 break;
3691 case 0x036: /* VIS I fmul8sux16 */
3692 CHECK_FPU_FEATURE(dc, VIS1);
3693 gen_op_load_fpr_DT0(DFPREG(rs1));
3694 gen_op_load_fpr_DT1(DFPREG(rs2));
3695 gen_helper_fmul8sux16();
3696 gen_op_store_DT0_fpr(DFPREG(rd));
3697 break;
3698 case 0x037: /* VIS I fmul8ulx16 */
3699 CHECK_FPU_FEATURE(dc, VIS1);
3700 gen_op_load_fpr_DT0(DFPREG(rs1));
3701 gen_op_load_fpr_DT1(DFPREG(rs2));
3702 gen_helper_fmul8ulx16();
3703 gen_op_store_DT0_fpr(DFPREG(rd));
3704 break;
3705 case 0x038: /* VIS I fmuld8sux16 */
3706 CHECK_FPU_FEATURE(dc, VIS1);
3707 gen_op_load_fpr_DT0(DFPREG(rs1));
3708 gen_op_load_fpr_DT1(DFPREG(rs2));
3709 gen_helper_fmuld8sux16();
3710 gen_op_store_DT0_fpr(DFPREG(rd));
3711 break;
3712 case 0x039: /* VIS I fmuld8ulx16 */
3713 CHECK_FPU_FEATURE(dc, VIS1);
3714 gen_op_load_fpr_DT0(DFPREG(rs1));
3715 gen_op_load_fpr_DT1(DFPREG(rs2));
3716 gen_helper_fmuld8ulx16();
3717 gen_op_store_DT0_fpr(DFPREG(rd));
3718 break;
3719 case 0x03a: /* VIS I fpack32 */
3720 case 0x03b: /* VIS I fpack16 */
3721 case 0x03d: /* VIS I fpackfix */
3722 case 0x03e: /* VIS I pdist */
3723 // XXX
3724 goto illegal_insn;
3725 case 0x048: /* VIS I faligndata */
3726 CHECK_FPU_FEATURE(dc, VIS1);
3727 gen_op_load_fpr_DT0(DFPREG(rs1));
3728 gen_op_load_fpr_DT1(DFPREG(rs2));
3729 gen_helper_faligndata();
3730 gen_op_store_DT0_fpr(DFPREG(rd));
3731 break;
3732 case 0x04b: /* VIS I fpmerge */
3733 CHECK_FPU_FEATURE(dc, VIS1);
3734 gen_op_load_fpr_DT0(DFPREG(rs1));
3735 gen_op_load_fpr_DT1(DFPREG(rs2));
3736 gen_helper_fpmerge();
3737 gen_op_store_DT0_fpr(DFPREG(rd));
3738 break;
3739 case 0x04c: /* VIS II bshuffle */
3740 // XXX
3741 goto illegal_insn;
3742 case 0x04d: /* VIS I fexpand */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 gen_helper_fexpand();
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x050: /* VIS I fpadd16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 gen_helper_fpadd16();
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x051: /* VIS I fpadd16s */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_helper_fpadd16s(cpu_fpr[rd],
3759 cpu_fpr[rs1], cpu_fpr[rs2]);
3760 break;
3761 case 0x052: /* VIS I fpadd32 */
3762 CHECK_FPU_FEATURE(dc, VIS1);
3763 gen_op_load_fpr_DT0(DFPREG(rs1));
3764 gen_op_load_fpr_DT1(DFPREG(rs2));
3765 gen_helper_fpadd32();
3766 gen_op_store_DT0_fpr(DFPREG(rd));
3767 break;
3768 case 0x053: /* VIS I fpadd32s */
3769 CHECK_FPU_FEATURE(dc, VIS1);
3770 gen_helper_fpadd32s(cpu_fpr[rd],
3771 cpu_fpr[rs1], cpu_fpr[rs2]);
3772 break;
3773 case 0x054: /* VIS I fpsub16 */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_op_load_fpr_DT0(DFPREG(rs1));
3776 gen_op_load_fpr_DT1(DFPREG(rs2));
3777 gen_helper_fpsub16();
3778 gen_op_store_DT0_fpr(DFPREG(rd));
3779 break;
3780 case 0x055: /* VIS I fpsub16s */
3781 CHECK_FPU_FEATURE(dc, VIS1);
3782 gen_helper_fpsub16s(cpu_fpr[rd],
3783 cpu_fpr[rs1], cpu_fpr[rs2]);
3784 break;
3785 case 0x056: /* VIS I fpsub32 */
3786 CHECK_FPU_FEATURE(dc, VIS1);
3787 gen_op_load_fpr_DT0(DFPREG(rs1));
3788 gen_op_load_fpr_DT1(DFPREG(rs2));
3789 gen_helper_fpsub32();
3790 gen_op_store_DT0_fpr(DFPREG(rd));
3791 break;
3792 case 0x057: /* VIS I fpsub32s */
3793 CHECK_FPU_FEATURE(dc, VIS1);
3794 gen_helper_fpsub32s(cpu_fpr[rd],
3795 cpu_fpr[rs1], cpu_fpr[rs2]);
3796 break;
3797 case 0x060: /* VIS I fzero */
3798 CHECK_FPU_FEATURE(dc, VIS1);
3799 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3800 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3801 break;
3802 case 0x061: /* VIS I fzeros */
3803 CHECK_FPU_FEATURE(dc, VIS1);
3804 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3805 break;
3806 case 0x062: /* VIS I fnor */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3809 cpu_fpr[DFPREG(rs2)]);
3810 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3811 cpu_fpr[DFPREG(rs2) + 1]);
3812 break;
3813 case 0x063: /* VIS I fnors */
3814 CHECK_FPU_FEATURE(dc, VIS1);
3815 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3816 break;
3817 case 0x064: /* VIS I fandnot2 */
3818 CHECK_FPU_FEATURE(dc, VIS1);
3819 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3820 cpu_fpr[DFPREG(rs2)]);
3821 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3822 cpu_fpr[DFPREG(rs1) + 1],
3823 cpu_fpr[DFPREG(rs2) + 1]);
3824 break;
3825 case 0x065: /* VIS I fandnot2s */
3826 CHECK_FPU_FEATURE(dc, VIS1);
3827 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3828 break;
3829 case 0x066: /* VIS I fnot2 */
3830 CHECK_FPU_FEATURE(dc, VIS1);
3831 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3832 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3833 cpu_fpr[DFPREG(rs2) + 1]);
3834 break;
3835 case 0x067: /* VIS I fnot2s */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3838 break;
3839 case 0x068: /* VIS I fandnot1 */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3842 cpu_fpr[DFPREG(rs1)]);
3843 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3844 cpu_fpr[DFPREG(rs2) + 1],
3845 cpu_fpr[DFPREG(rs1) + 1]);
3846 break;
3847 case 0x069: /* VIS I fandnot1s */
3848 CHECK_FPU_FEATURE(dc, VIS1);
3849 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3850 break;
3851 case 0x06a: /* VIS I fnot1 */
3852 CHECK_FPU_FEATURE(dc, VIS1);
3853 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3854 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3855 cpu_fpr[DFPREG(rs1) + 1]);
3856 break;
3857 case 0x06b: /* VIS I fnot1s */
3858 CHECK_FPU_FEATURE(dc, VIS1);
3859 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3860 break;
3861 case 0x06c: /* VIS I fxor */
3862 CHECK_FPU_FEATURE(dc, VIS1);
3863 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3864 cpu_fpr[DFPREG(rs2)]);
3865 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3866 cpu_fpr[DFPREG(rs1) + 1],
3867 cpu_fpr[DFPREG(rs2) + 1]);
3868 break;
3869 case 0x06d: /* VIS I fxors */
3870 CHECK_FPU_FEATURE(dc, VIS1);
3871 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3872 break;
3873 case 0x06e: /* VIS I fnand */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3876 cpu_fpr[DFPREG(rs2)]);
3877 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3878 cpu_fpr[DFPREG(rs2) + 1]);
3879 break;
3880 case 0x06f: /* VIS I fnands */
3881 CHECK_FPU_FEATURE(dc, VIS1);
3882 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3883 break;
3884 case 0x070: /* VIS I fand */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3887 cpu_fpr[DFPREG(rs2)]);
3888 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3889 cpu_fpr[DFPREG(rs1) + 1],
3890 cpu_fpr[DFPREG(rs2) + 1]);
3891 break;
3892 case 0x071: /* VIS I fands */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3895 break;
3896 case 0x072: /* VIS I fxnor */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3899 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3900 cpu_fpr[DFPREG(rs1)]);
3901 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3902 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3903 cpu_fpr[DFPREG(rs1) + 1]);
3904 break;
3905 case 0x073: /* VIS I fxnors */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3908 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3909 break;
3910 case 0x074: /* VIS I fsrc1 */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3913 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3914 cpu_fpr[DFPREG(rs1) + 1]);
3915 break;
3916 case 0x075: /* VIS I fsrc1s */
3917 CHECK_FPU_FEATURE(dc, VIS1);
3918 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3919 break;
3920 case 0x076: /* VIS I fornot2 */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3923 cpu_fpr[DFPREG(rs2)]);
3924 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3925 cpu_fpr[DFPREG(rs1) + 1],
3926 cpu_fpr[DFPREG(rs2) + 1]);
3927 break;
3928 case 0x077: /* VIS I fornot2s */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3931 break;
3932 case 0x078: /* VIS I fsrc2 */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 gen_op_load_fpr_DT0(DFPREG(rs2));
3935 gen_op_store_DT0_fpr(DFPREG(rd));
3936 break;
3937 case 0x079: /* VIS I fsrc2s */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3940 break;
3941 case 0x07a: /* VIS I fornot1 */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3944 cpu_fpr[DFPREG(rs1)]);
3945 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3946 cpu_fpr[DFPREG(rs2) + 1],
3947 cpu_fpr[DFPREG(rs1) + 1]);
3948 break;
3949 case 0x07b: /* VIS I fornot1s */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3952 break;
3953 case 0x07c: /* VIS I for */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3956 cpu_fpr[DFPREG(rs2)]);
3957 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3958 cpu_fpr[DFPREG(rs1) + 1],
3959 cpu_fpr[DFPREG(rs2) + 1]);
3960 break;
3961 case 0x07d: /* VIS I fors */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3964 break;
3965 case 0x07e: /* VIS I fone */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3968 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3969 break;
3970 case 0x07f: /* VIS I fones */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3973 break;
3974 case 0x080: /* VIS I shutdown */
3975 case 0x081: /* VIS II siam */
3976 // XXX
3977 goto illegal_insn;
3978 default:
3979 goto illegal_insn;
3981 #else
3982 goto ncp_insn;
3983 #endif
3984 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3985 #ifdef TARGET_SPARC64
3986 goto illegal_insn;
3987 #else
3988 goto ncp_insn;
3989 #endif
3990 #ifdef TARGET_SPARC64
3991 } else if (xop == 0x39) { /* V9 return */
3992 TCGv_i32 r_const;
3994 save_state(dc, cpu_cond);
3995 cpu_src1 = get_src1(insn, cpu_src1);
3996 if (IS_IMM) { /* immediate */
3997 simm = GET_FIELDs(insn, 19, 31);
3998 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3999 } else { /* register */
4000 rs2 = GET_FIELD(insn, 27, 31);
4001 if (rs2) {
4002 gen_movl_reg_TN(rs2, cpu_src2);
4003 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4004 } else
4005 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4007 gen_helper_restore();
4008 gen_mov_pc_npc(dc, cpu_cond);
4009 r_const = tcg_const_i32(3);
4010 gen_helper_check_align(cpu_dst, r_const);
4011 tcg_temp_free_i32(r_const);
4012 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4013 dc->npc = DYNAMIC_PC;
4014 goto jmp_insn;
4015 #endif
4016 } else {
4017 cpu_src1 = get_src1(insn, cpu_src1);
4018 if (IS_IMM) { /* immediate */
4019 simm = GET_FIELDs(insn, 19, 31);
4020 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4021 } else { /* register */
4022 rs2 = GET_FIELD(insn, 27, 31);
4023 if (rs2) {
4024 gen_movl_reg_TN(rs2, cpu_src2);
4025 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4026 } else
4027 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4029 switch (xop) {
4030 case 0x38: /* jmpl */
4032 TCGv r_pc;
4033 TCGv_i32 r_const;
4035 r_pc = tcg_const_tl(dc->pc);
4036 gen_movl_TN_reg(rd, r_pc);
4037 tcg_temp_free(r_pc);
4038 gen_mov_pc_npc(dc, cpu_cond);
4039 r_const = tcg_const_i32(3);
4040 gen_helper_check_align(cpu_dst, r_const);
4041 tcg_temp_free_i32(r_const);
4042 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4043 dc->npc = DYNAMIC_PC;
4045 goto jmp_insn;
4046 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4047 case 0x39: /* rett, V9 return */
4049 TCGv_i32 r_const;
4051 if (!supervisor(dc))
4052 goto priv_insn;
4053 gen_mov_pc_npc(dc, cpu_cond);
4054 r_const = tcg_const_i32(3);
4055 gen_helper_check_align(cpu_dst, r_const);
4056 tcg_temp_free_i32(r_const);
4057 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4058 dc->npc = DYNAMIC_PC;
4059 gen_helper_rett();
4061 goto jmp_insn;
4062 #endif
4063 case 0x3b: /* flush */
4064 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4065 goto unimp_flush;
4066 gen_helper_flush(cpu_dst);
4067 break;
4068 case 0x3c: /* save */
4069 save_state(dc, cpu_cond);
4070 gen_helper_save();
4071 gen_movl_TN_reg(rd, cpu_dst);
4072 break;
4073 case 0x3d: /* restore */
4074 save_state(dc, cpu_cond);
4075 gen_helper_restore();
4076 gen_movl_TN_reg(rd, cpu_dst);
4077 break;
4078 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4079 case 0x3e: /* V9 done/retry */
4081 switch (rd) {
4082 case 0:
4083 if (!supervisor(dc))
4084 goto priv_insn;
4085 dc->npc = DYNAMIC_PC;
4086 dc->pc = DYNAMIC_PC;
4087 gen_helper_done();
4088 goto jmp_insn;
4089 case 1:
4090 if (!supervisor(dc))
4091 goto priv_insn;
4092 dc->npc = DYNAMIC_PC;
4093 dc->pc = DYNAMIC_PC;
4094 gen_helper_retry();
4095 goto jmp_insn;
4096 default:
4097 goto illegal_insn;
4100 break;
4101 #endif
4102 default:
4103 goto illegal_insn;
4106 break;
4108 break;
4109 case 3: /* load/store instructions */
4111 unsigned int xop = GET_FIELD(insn, 7, 12);
4113 cpu_src1 = get_src1(insn, cpu_src1);
4114 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4115 rs2 = GET_FIELD(insn, 27, 31);
4116 gen_movl_reg_TN(rs2, cpu_src2);
4117 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4118 } else if (IS_IMM) { /* immediate */
4119 simm = GET_FIELDs(insn, 19, 31);
4120 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4121 } else { /* register */
4122 rs2 = GET_FIELD(insn, 27, 31);
4123 if (rs2 != 0) {
4124 gen_movl_reg_TN(rs2, cpu_src2);
4125 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4126 } else
4127 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4129 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4130 (xop > 0x17 && xop <= 0x1d ) ||
4131 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4132 switch (xop) {
4133 case 0x0: /* ld, V9 lduw, load unsigned word */
4134 gen_address_mask(dc, cpu_addr);
4135 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4136 break;
4137 case 0x1: /* ldub, load unsigned byte */
4138 gen_address_mask(dc, cpu_addr);
4139 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4140 break;
4141 case 0x2: /* lduh, load unsigned halfword */
4142 gen_address_mask(dc, cpu_addr);
4143 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4144 break;
4145 case 0x3: /* ldd, load double word */
4146 if (rd & 1)
4147 goto illegal_insn;
4148 else {
4149 TCGv_i32 r_const;
4151 save_state(dc, cpu_cond);
4152 r_const = tcg_const_i32(7);
4153 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4154 tcg_temp_free_i32(r_const);
4155 gen_address_mask(dc, cpu_addr);
4156 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4157 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4158 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4159 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4160 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4161 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4162 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4164 break;
4165 case 0x9: /* ldsb, load signed byte */
4166 gen_address_mask(dc, cpu_addr);
4167 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4168 break;
4169 case 0xa: /* ldsh, load signed halfword */
4170 gen_address_mask(dc, cpu_addr);
4171 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4172 break;
4173 case 0xd: /* ldstub -- XXX: should be atomically */
4175 TCGv r_const;
4177 gen_address_mask(dc, cpu_addr);
4178 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4179 r_const = tcg_const_tl(0xff);
4180 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4181 tcg_temp_free(r_const);
4183 break;
4184 case 0x0f: /* swap, swap register with memory. Also
4185 atomically */
4186 CHECK_IU_FEATURE(dc, SWAP);
4187 gen_movl_reg_TN(rd, cpu_val);
4188 gen_address_mask(dc, cpu_addr);
4189 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4190 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4191 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4192 break;
4193 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4194 case 0x10: /* lda, V9 lduwa, load word alternate */
4195 #ifndef TARGET_SPARC64
4196 if (IS_IMM)
4197 goto illegal_insn;
4198 if (!supervisor(dc))
4199 goto priv_insn;
4200 #endif
4201 save_state(dc, cpu_cond);
4202 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4203 break;
4204 case 0x11: /* lduba, load unsigned byte alternate */
4205 #ifndef TARGET_SPARC64
4206 if (IS_IMM)
4207 goto illegal_insn;
4208 if (!supervisor(dc))
4209 goto priv_insn;
4210 #endif
4211 save_state(dc, cpu_cond);
4212 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4213 break;
4214 case 0x12: /* lduha, load unsigned halfword alternate */
4215 #ifndef TARGET_SPARC64
4216 if (IS_IMM)
4217 goto illegal_insn;
4218 if (!supervisor(dc))
4219 goto priv_insn;
4220 #endif
4221 save_state(dc, cpu_cond);
4222 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4223 break;
4224 case 0x13: /* ldda, load double word alternate */
4225 #ifndef TARGET_SPARC64
4226 if (IS_IMM)
4227 goto illegal_insn;
4228 if (!supervisor(dc))
4229 goto priv_insn;
4230 #endif
4231 if (rd & 1)
4232 goto illegal_insn;
4233 save_state(dc, cpu_cond);
4234 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4235 goto skip_move;
4236 case 0x19: /* ldsba, load signed byte alternate */
4237 #ifndef TARGET_SPARC64
4238 if (IS_IMM)
4239 goto illegal_insn;
4240 if (!supervisor(dc))
4241 goto priv_insn;
4242 #endif
4243 save_state(dc, cpu_cond);
4244 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4245 break;
4246 case 0x1a: /* ldsha, load signed halfword alternate */
4247 #ifndef TARGET_SPARC64
4248 if (IS_IMM)
4249 goto illegal_insn;
4250 if (!supervisor(dc))
4251 goto priv_insn;
4252 #endif
4253 save_state(dc, cpu_cond);
4254 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4255 break;
4256 case 0x1d: /* ldstuba -- XXX: should be atomically */
4257 #ifndef TARGET_SPARC64
4258 if (IS_IMM)
4259 goto illegal_insn;
4260 if (!supervisor(dc))
4261 goto priv_insn;
4262 #endif
4263 save_state(dc, cpu_cond);
4264 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4265 break;
4266 case 0x1f: /* swapa, swap reg with alt. memory. Also
4267 atomically */
4268 CHECK_IU_FEATURE(dc, SWAP);
4269 #ifndef TARGET_SPARC64
4270 if (IS_IMM)
4271 goto illegal_insn;
4272 if (!supervisor(dc))
4273 goto priv_insn;
4274 #endif
4275 save_state(dc, cpu_cond);
4276 gen_movl_reg_TN(rd, cpu_val);
4277 gen_swap_asi(cpu_val, cpu_addr, insn);
4278 break;
4280 #ifndef TARGET_SPARC64
4281 case 0x30: /* ldc */
4282 case 0x31: /* ldcsr */
4283 case 0x33: /* lddc */
4284 goto ncp_insn;
4285 #endif
4286 #endif
4287 #ifdef TARGET_SPARC64
4288 case 0x08: /* V9 ldsw */
4289 gen_address_mask(dc, cpu_addr);
4290 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4291 break;
4292 case 0x0b: /* V9 ldx */
4293 gen_address_mask(dc, cpu_addr);
4294 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4295 break;
4296 case 0x18: /* V9 ldswa */
4297 save_state(dc, cpu_cond);
4298 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4299 break;
4300 case 0x1b: /* V9 ldxa */
4301 save_state(dc, cpu_cond);
4302 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4303 break;
4304 case 0x2d: /* V9 prefetch, no effect */
4305 goto skip_move;
4306 case 0x30: /* V9 ldfa */
4307 save_state(dc, cpu_cond);
4308 gen_ldf_asi(cpu_addr, insn, 4, rd);
4309 goto skip_move;
4310 case 0x33: /* V9 lddfa */
4311 save_state(dc, cpu_cond);
4312 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4313 goto skip_move;
4314 case 0x3d: /* V9 prefetcha, no effect */
4315 goto skip_move;
4316 case 0x32: /* V9 ldqfa */
4317 CHECK_FPU_FEATURE(dc, FLOAT128);
4318 save_state(dc, cpu_cond);
4319 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4320 goto skip_move;
4321 #endif
4322 default:
4323 goto illegal_insn;
4325 gen_movl_TN_reg(rd, cpu_val);
4326 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4327 skip_move: ;
4328 #endif
4329 } else if (xop >= 0x20 && xop < 0x24) {
4330 if (gen_trap_ifnofpu(dc, cpu_cond))
4331 goto jmp_insn;
4332 save_state(dc, cpu_cond);
4333 switch (xop) {
4334 case 0x20: /* ldf, load fpreg */
4335 gen_address_mask(dc, cpu_addr);
4336 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4337 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4338 break;
4339 case 0x21: /* ldfsr, V9 ldxfsr */
4340 #ifdef TARGET_SPARC64
4341 gen_address_mask(dc, cpu_addr);
4342 if (rd == 1) {
4343 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4344 gen_helper_ldxfsr(cpu_tmp64);
4345 } else
4346 #else
4348 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4349 gen_helper_ldfsr(cpu_tmp32);
4351 #endif
4352 break;
4353 case 0x22: /* ldqf, load quad fpreg */
4355 TCGv_i32 r_const;
4357 CHECK_FPU_FEATURE(dc, FLOAT128);
4358 r_const = tcg_const_i32(dc->mem_idx);
4359 gen_helper_ldqf(cpu_addr, r_const);
4360 tcg_temp_free_i32(r_const);
4361 gen_op_store_QT0_fpr(QFPREG(rd));
4363 break;
4364 case 0x23: /* lddf, load double fpreg */
4366 TCGv_i32 r_const;
4368 r_const = tcg_const_i32(dc->mem_idx);
4369 gen_helper_lddf(cpu_addr, r_const);
4370 tcg_temp_free_i32(r_const);
4371 gen_op_store_DT0_fpr(DFPREG(rd));
4373 break;
4374 default:
4375 goto illegal_insn;
4377 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4378 xop == 0xe || xop == 0x1e) {
4379 gen_movl_reg_TN(rd, cpu_val);
4380 switch (xop) {
4381 case 0x4: /* st, store word */
4382 gen_address_mask(dc, cpu_addr);
4383 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4384 break;
4385 case 0x5: /* stb, store byte */
4386 gen_address_mask(dc, cpu_addr);
4387 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4388 break;
4389 case 0x6: /* sth, store halfword */
4390 gen_address_mask(dc, cpu_addr);
4391 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4392 break;
4393 case 0x7: /* std, store double word */
4394 if (rd & 1)
4395 goto illegal_insn;
4396 else {
4397 TCGv_i32 r_const;
4399 save_state(dc, cpu_cond);
4400 gen_address_mask(dc, cpu_addr);
4401 r_const = tcg_const_i32(7);
4402 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4403 tcg_temp_free_i32(r_const);
4404 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4405 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4406 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4408 break;
4409 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4410 case 0x14: /* sta, V9 stwa, store word alternate */
4411 #ifndef TARGET_SPARC64
4412 if (IS_IMM)
4413 goto illegal_insn;
4414 if (!supervisor(dc))
4415 goto priv_insn;
4416 #endif
4417 save_state(dc, cpu_cond);
4418 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4419 break;
4420 case 0x15: /* stba, store byte alternate */
4421 #ifndef TARGET_SPARC64
4422 if (IS_IMM)
4423 goto illegal_insn;
4424 if (!supervisor(dc))
4425 goto priv_insn;
4426 #endif
4427 save_state(dc, cpu_cond);
4428 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4429 break;
4430 case 0x16: /* stha, store halfword alternate */
4431 #ifndef TARGET_SPARC64
4432 if (IS_IMM)
4433 goto illegal_insn;
4434 if (!supervisor(dc))
4435 goto priv_insn;
4436 #endif
4437 save_state(dc, cpu_cond);
4438 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4439 break;
4440 case 0x17: /* stda, store double word alternate */
4441 #ifndef TARGET_SPARC64
4442 if (IS_IMM)
4443 goto illegal_insn;
4444 if (!supervisor(dc))
4445 goto priv_insn;
4446 #endif
4447 if (rd & 1)
4448 goto illegal_insn;
4449 else {
4450 save_state(dc, cpu_cond);
4451 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4453 break;
4454 #endif
4455 #ifdef TARGET_SPARC64
4456 case 0x0e: /* V9 stx */
4457 gen_address_mask(dc, cpu_addr);
4458 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4459 break;
4460 case 0x1e: /* V9 stxa */
4461 save_state(dc, cpu_cond);
4462 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4463 break;
4464 #endif
4465 default:
4466 goto illegal_insn;
4468 } else if (xop > 0x23 && xop < 0x28) {
4469 if (gen_trap_ifnofpu(dc, cpu_cond))
4470 goto jmp_insn;
4471 save_state(dc, cpu_cond);
4472 switch (xop) {
4473 case 0x24: /* stf, store fpreg */
4474 gen_address_mask(dc, cpu_addr);
4475 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4476 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4477 break;
4478 case 0x25: /* stfsr, V9 stxfsr */
4479 #ifdef TARGET_SPARC64
4480 gen_address_mask(dc, cpu_addr);
4481 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4482 if (rd == 1)
4483 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4484 else
4485 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4486 #else
4487 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4488 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4489 #endif
4490 break;
4491 case 0x26:
4492 #ifdef TARGET_SPARC64
4493 /* V9 stqf, store quad fpreg */
4495 TCGv_i32 r_const;
4497 CHECK_FPU_FEATURE(dc, FLOAT128);
4498 gen_op_load_fpr_QT0(QFPREG(rd));
4499 r_const = tcg_const_i32(dc->mem_idx);
4500 gen_helper_stqf(cpu_addr, r_const);
4501 tcg_temp_free_i32(r_const);
4503 break;
4504 #else /* !TARGET_SPARC64 */
4505 /* stdfq, store floating point queue */
4506 #if defined(CONFIG_USER_ONLY)
4507 goto illegal_insn;
4508 #else
4509 if (!supervisor(dc))
4510 goto priv_insn;
4511 if (gen_trap_ifnofpu(dc, cpu_cond))
4512 goto jmp_insn;
4513 goto nfq_insn;
4514 #endif
4515 #endif
4516 case 0x27: /* stdf, store double fpreg */
4518 TCGv_i32 r_const;
4520 gen_op_load_fpr_DT0(DFPREG(rd));
4521 r_const = tcg_const_i32(dc->mem_idx);
4522 gen_helper_stdf(cpu_addr, r_const);
4523 tcg_temp_free_i32(r_const);
4525 break;
4526 default:
4527 goto illegal_insn;
4529 } else if (xop > 0x33 && xop < 0x3f) {
4530 save_state(dc, cpu_cond);
4531 switch (xop) {
4532 #ifdef TARGET_SPARC64
4533 case 0x34: /* V9 stfa */
4534 gen_stf_asi(cpu_addr, insn, 4, rd);
4535 break;
4536 case 0x36: /* V9 stqfa */
4538 TCGv_i32 r_const;
4540 CHECK_FPU_FEATURE(dc, FLOAT128);
4541 r_const = tcg_const_i32(7);
4542 gen_helper_check_align(cpu_addr, r_const);
4543 tcg_temp_free_i32(r_const);
4544 gen_op_load_fpr_QT0(QFPREG(rd));
4545 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4547 break;
4548 case 0x37: /* V9 stdfa */
4549 gen_op_load_fpr_DT0(DFPREG(rd));
4550 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4551 break;
4552 case 0x3c: /* V9 casa */
4553 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4554 gen_movl_TN_reg(rd, cpu_val);
4555 break;
4556 case 0x3e: /* V9 casxa */
4557 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4558 gen_movl_TN_reg(rd, cpu_val);
4559 break;
4560 #else
4561 case 0x34: /* stc */
4562 case 0x35: /* stcsr */
4563 case 0x36: /* stdcq */
4564 case 0x37: /* stdc */
4565 goto ncp_insn;
4566 #endif
4567 default:
4568 goto illegal_insn;
4570 } else
4571 goto illegal_insn;
4573 break;
4575 /* default case for non jump instructions */
4576 if (dc->npc == DYNAMIC_PC) {
4577 dc->pc = DYNAMIC_PC;
4578 gen_op_next_insn();
4579 } else if (dc->npc == JUMP_PC) {
4580 /* we can do a static jump */
4581 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4582 dc->is_br = 1;
4583 } else {
4584 dc->pc = dc->npc;
4585 dc->npc = dc->npc + 4;
4587 jmp_insn:
4588 return;
4589 illegal_insn:
4591 TCGv_i32 r_const;
4593 save_state(dc, cpu_cond);
4594 r_const = tcg_const_i32(TT_ILL_INSN);
4595 gen_helper_raise_exception(r_const);
4596 tcg_temp_free_i32(r_const);
4597 dc->is_br = 1;
4599 return;
4600 unimp_flush:
4602 TCGv_i32 r_const;
4604 save_state(dc, cpu_cond);
4605 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4606 gen_helper_raise_exception(r_const);
4607 tcg_temp_free_i32(r_const);
4608 dc->is_br = 1;
4610 return;
4611 #if !defined(CONFIG_USER_ONLY)
4612 priv_insn:
4614 TCGv_i32 r_const;
4616 save_state(dc, cpu_cond);
4617 r_const = tcg_const_i32(TT_PRIV_INSN);
4618 gen_helper_raise_exception(r_const);
4619 tcg_temp_free_i32(r_const);
4620 dc->is_br = 1;
4622 return;
4623 #endif
4624 nfpu_insn:
4625 save_state(dc, cpu_cond);
4626 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4627 dc->is_br = 1;
4628 return;
4629 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4630 nfq_insn:
4631 save_state(dc, cpu_cond);
4632 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4633 dc->is_br = 1;
4634 return;
4635 #endif
4636 #ifndef TARGET_SPARC64
4637 ncp_insn:
4639 TCGv r_const;
4641 save_state(dc, cpu_cond);
4642 r_const = tcg_const_i32(TT_NCP_INSN);
4643 gen_helper_raise_exception(r_const);
4644 tcg_temp_free(r_const);
4645 dc->is_br = 1;
4647 return;
4648 #endif
4651 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4652 int spc, CPUSPARCState *env)
4654 target_ulong pc_start, last_pc;
4655 uint16_t *gen_opc_end;
4656 DisasContext dc1, *dc = &dc1;
4657 CPUBreakpoint *bp;
4658 int j, lj = -1;
4659 int num_insns;
4660 int max_insns;
4662 memset(dc, 0, sizeof(DisasContext));
4663 dc->tb = tb;
4664 pc_start = tb->pc;
4665 dc->pc = pc_start;
4666 last_pc = dc->pc;
4667 dc->npc = (target_ulong) tb->cs_base;
4668 dc->cc_op = CC_OP_DYNAMIC;
4669 dc->mem_idx = cpu_mmu_index(env);
4670 dc->def = env->def;
4671 if ((dc->def->features & CPU_FEATURE_FLOAT))
4672 dc->fpu_enabled = cpu_fpu_enabled(env);
4673 else
4674 dc->fpu_enabled = 0;
4675 #ifdef TARGET_SPARC64
4676 dc->address_mask_32bit = env->pstate & PS_AM;
4677 #endif
4678 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4680 cpu_tmp0 = tcg_temp_new();
4681 cpu_tmp32 = tcg_temp_new_i32();
4682 cpu_tmp64 = tcg_temp_new_i64();
4684 cpu_dst = tcg_temp_local_new();
4686 // loads and stores
4687 cpu_val = tcg_temp_local_new();
4688 cpu_addr = tcg_temp_local_new();
4690 num_insns = 0;
4691 max_insns = tb->cflags & CF_COUNT_MASK;
4692 if (max_insns == 0)
4693 max_insns = CF_COUNT_MASK;
4694 gen_icount_start();
4695 do {
4696 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4697 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4698 if (bp->pc == dc->pc) {
4699 if (dc->pc != pc_start)
4700 save_state(dc, cpu_cond);
4701 gen_helper_debug();
4702 tcg_gen_exit_tb(0);
4703 dc->is_br = 1;
4704 goto exit_gen_loop;
4708 if (spc) {
4709 qemu_log("Search PC...\n");
4710 j = gen_opc_ptr - gen_opc_buf;
4711 if (lj < j) {
4712 lj++;
4713 while (lj < j)
4714 gen_opc_instr_start[lj++] = 0;
4715 gen_opc_pc[lj] = dc->pc;
4716 gen_opc_npc[lj] = dc->npc;
4717 gen_opc_instr_start[lj] = 1;
4718 gen_opc_icount[lj] = num_insns;
4721 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4722 gen_io_start();
4723 last_pc = dc->pc;
4724 disas_sparc_insn(dc);
4725 num_insns++;
4727 if (dc->is_br)
4728 break;
4729 /* if the next PC is different, we abort now */
4730 if (dc->pc != (last_pc + 4))
4731 break;
4732 /* if we reach a page boundary, we stop generation so that the
4733 PC of a TT_TFAULT exception is always in the right page */
4734 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4735 break;
4736 /* if single step mode, we generate only one instruction and
4737 generate an exception */
4738 if (env->singlestep_enabled || singlestep) {
4739 tcg_gen_movi_tl(cpu_pc, dc->pc);
4740 tcg_gen_exit_tb(0);
4741 break;
4743 } while ((gen_opc_ptr < gen_opc_end) &&
4744 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4745 num_insns < max_insns);
4747 exit_gen_loop:
4748 tcg_temp_free(cpu_addr);
4749 tcg_temp_free(cpu_val);
4750 tcg_temp_free(cpu_dst);
4751 tcg_temp_free_i64(cpu_tmp64);
4752 tcg_temp_free_i32(cpu_tmp32);
4753 tcg_temp_free(cpu_tmp0);
4754 if (tb->cflags & CF_LAST_IO)
4755 gen_io_end();
4756 if (!dc->is_br) {
4757 if (dc->pc != DYNAMIC_PC &&
4758 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4759 /* static PC and NPC: we can use direct chaining */
4760 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4761 } else {
4762 if (dc->pc != DYNAMIC_PC)
4763 tcg_gen_movi_tl(cpu_pc, dc->pc);
4764 save_npc(dc, cpu_cond);
4765 tcg_gen_exit_tb(0);
4768 gen_icount_end(tb, num_insns);
4769 *gen_opc_ptr = INDEX_op_end;
4770 if (spc) {
4771 j = gen_opc_ptr - gen_opc_buf;
4772 lj++;
4773 while (lj <= j)
4774 gen_opc_instr_start[lj++] = 0;
4775 #if 0
4776 log_page_dump();
4777 #endif
4778 gen_opc_jump_pc[0] = dc->jump_pc[0];
4779 gen_opc_jump_pc[1] = dc->jump_pc[1];
4780 } else {
4781 tb->size = last_pc + 4 - pc_start;
4782 tb->icount = num_insns;
4784 #ifdef DEBUG_DISAS
4785 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4786 qemu_log("--------------\n");
4787 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4788 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4789 qemu_log("\n");
4791 #endif
4794 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4796 gen_intermediate_code_internal(tb, 0, env);
4799 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4801 gen_intermediate_code_internal(tb, 1, env);
4804 void gen_intermediate_code_init(CPUSPARCState *env)
4806 unsigned int i;
4807 static int inited;
4808 static const char * const gregnames[8] = {
4809 NULL, // g0 not used
4810 "g1",
4811 "g2",
4812 "g3",
4813 "g4",
4814 "g5",
4815 "g6",
4816 "g7",
4818 static const char * const fregnames[64] = {
4819 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4820 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4821 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4822 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4823 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4824 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4825 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4826 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4829 /* init various static tables */
4830 if (!inited) {
4831 inited = 1;
4833 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4834 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4835 offsetof(CPUState, regwptr),
4836 "regwptr");
4837 #ifdef TARGET_SPARC64
4838 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4839 "xcc");
4840 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4841 "asi");
4842 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4843 "fprs");
4844 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4845 "gsr");
4846 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4847 offsetof(CPUState, tick_cmpr),
4848 "tick_cmpr");
4849 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4850 offsetof(CPUState, stick_cmpr),
4851 "stick_cmpr");
4852 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4853 offsetof(CPUState, hstick_cmpr),
4854 "hstick_cmpr");
4855 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4856 "hintp");
4857 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4858 "htba");
4859 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4860 "hver");
4861 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4862 offsetof(CPUState, ssr), "ssr");
4863 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4864 offsetof(CPUState, version), "ver");
4865 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4866 offsetof(CPUState, softint),
4867 "softint");
4868 #else
4869 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4870 "wim");
4871 #endif
4872 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4873 "cond");
4874 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4875 "cc_src");
4876 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4877 offsetof(CPUState, cc_src2),
4878 "cc_src2");
4879 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4880 "cc_dst");
4881 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4882 "cc_op");
4883 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4884 "psr");
4885 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4886 "fsr");
4887 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4888 "pc");
4889 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4890 "npc");
4891 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4892 #ifndef CONFIG_USER_ONLY
4893 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4894 "tbr");
4895 #endif
4896 for (i = 1; i < 8; i++)
4897 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4898 offsetof(CPUState, gregs[i]),
4899 gregnames[i]);
4900 for (i = 0; i < TARGET_FPREGS; i++)
4901 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4902 offsetof(CPUState, fpr[i]),
4903 fregnames[i]);
4905 /* register helpers */
4907 #define GEN_HELPER 2
4908 #include "helper.h"
4912 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4913 unsigned long searched_pc, int pc_pos, void *puc)
4915 target_ulong npc;
4916 env->pc = gen_opc_pc[pc_pos];
4917 npc = gen_opc_npc[pc_pos];
4918 if (npc == 1) {
4919 /* dynamic NPC: already stored */
4920 } else if (npc == 2) {
4921 target_ulong t2 = (target_ulong)(unsigned long)puc;
4922 /* jump PC: use T2 and the jump targets of the translation */
4923 if (t2)
4924 env->npc = gen_opc_jump_pc[0];
4925 else
4926 env->npc = gen_opc_jump_pc[1];
4927 } else {
4928 env->npc = npc;
4931 /* flush pending conditional evaluations before exposing cpu state */
4932 if (CC_OP != CC_OP_FLAGS) {
4933 helper_compute_psr();