Merge remote branch 'mst/for_anthony' into staging
[qemu/aliguori-queue.git] / target-sparc / translate.c
blobea7c71b85a4a96559ee35955b4655c23a978ae70
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70 static target_ulong gen_opc_jump_pc[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext {
75 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int is_br;
79 int mem_idx;
80 int fpu_enabled;
81 int address_mask_32bit;
82 int singlestep;
83 uint32_t cc_op; /* current CC operation */
84 struct TranslationBlock *tb;
85 sparc_def_t *def;
86 } DisasContext;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #ifdef TARGET_SPARC64
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #else
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x, int len)
112 len = 32 - len;
113 return (x << len) >> len;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src)
121 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
122 offsetof(CPU_DoubleU, l.upper));
123 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.lower));
127 static void gen_op_load_fpr_DT1(unsigned int src)
129 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
130 offsetof(CPU_DoubleU, l.upper));
131 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
132 offsetof(CPU_DoubleU, l.lower));
135 static void gen_op_store_DT0_fpr(unsigned int dst)
137 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
138 offsetof(CPU_DoubleU, l.upper));
139 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
140 offsetof(CPU_DoubleU, l.lower));
143 static void gen_op_load_fpr_QT0(unsigned int src)
145 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.upmost));
147 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.upper));
149 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
150 offsetof(CPU_QuadU, l.lower));
151 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
152 offsetof(CPU_QuadU, l.lowest));
155 static void gen_op_load_fpr_QT1(unsigned int src)
157 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.upmost));
159 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.upper));
161 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
162 offsetof(CPU_QuadU, l.lower));
163 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
164 offsetof(CPU_QuadU, l.lowest));
167 static void gen_op_store_QT0_fpr(unsigned int dst)
169 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.upmost));
171 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.upper));
173 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
174 offsetof(CPU_QuadU, l.lower));
175 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
176 offsetof(CPU_QuadU, l.lowest));
179 /* moves */
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
184 #endif
185 #else
186 #define supervisor(dc) (dc->mem_idx >= 1)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == 2)
189 #else
190 #endif
191 #endif
193 #ifdef TARGET_SPARC64
194 #ifndef TARGET_ABI32
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #else
197 #define AM_CHECK(dc) (1)
198 #endif
199 #endif
201 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
203 #ifdef TARGET_SPARC64
204 if (AM_CHECK(dc))
205 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
206 #endif
209 static inline void gen_movl_reg_TN(int reg, TCGv tn)
211 if (reg == 0)
212 tcg_gen_movi_tl(tn, 0);
213 else if (reg < 8)
214 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
215 else {
216 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
220 static inline void gen_movl_TN_reg(int reg, TCGv tn)
222 if (reg == 0)
223 return;
224 else if (reg < 8)
225 tcg_gen_mov_tl(cpu_gregs[reg], tn);
226 else {
227 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
231 static inline void gen_goto_tb(DisasContext *s, int tb_num,
232 target_ulong pc, target_ulong npc)
234 TranslationBlock *tb;
236 tb = s->tb;
237 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 !s->singlestep) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
284 TCGv r_temp;
285 TCGv_i32 r_const;
286 int l1;
288 l1 = gen_new_label();
290 r_temp = tcg_temp_new();
291 tcg_gen_xor_tl(r_temp, src1, src2);
292 tcg_gen_not_tl(r_temp, r_temp);
293 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
294 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
295 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
297 r_const = tcg_const_i32(TT_TOVF);
298 gen_helper_raise_exception(r_const);
299 tcg_temp_free_i32(r_const);
300 gen_set_label(l1);
301 tcg_temp_free(r_temp);
304 static inline void gen_tag_tv(TCGv src1, TCGv src2)
306 int l1;
307 TCGv_i32 r_const;
309 l1 = gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0, src1, src2);
311 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
313 r_const = tcg_const_i32(TT_TOVF);
314 gen_helper_raise_exception(r_const);
315 tcg_temp_free_i32(r_const);
316 gen_set_label(l1);
319 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
321 tcg_gen_mov_tl(cpu_cc_src, src1);
322 tcg_gen_movi_tl(cpu_cc_src2, src2);
323 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
324 tcg_gen_mov_tl(dst, cpu_cc_dst);
327 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
329 tcg_gen_mov_tl(cpu_cc_src, src1);
330 tcg_gen_mov_tl(cpu_cc_src2, src2);
331 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
332 tcg_gen_mov_tl(dst, cpu_cc_dst);
335 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
337 gen_helper_compute_C_icc(cpu_tmp0);
338 tcg_gen_mov_tl(cpu_cc_src, src1);
339 tcg_gen_movi_tl(cpu_cc_src2, src2);
340 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
341 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
342 tcg_gen_mov_tl(dst, cpu_cc_dst);
345 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
347 gen_helper_compute_C_icc(cpu_tmp0);
348 tcg_gen_mov_tl(cpu_cc_src, src1);
349 tcg_gen_mov_tl(cpu_cc_src2, src2);
350 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
351 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
352 tcg_gen_mov_tl(dst, cpu_cc_dst);
355 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
357 tcg_gen_mov_tl(cpu_cc_src, src1);
358 tcg_gen_mov_tl(cpu_cc_src2, src2);
359 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
360 tcg_gen_mov_tl(dst, cpu_cc_dst);
363 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
365 tcg_gen_mov_tl(cpu_cc_src, src1);
366 tcg_gen_mov_tl(cpu_cc_src2, src2);
367 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
368 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
369 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
370 tcg_gen_mov_tl(dst, cpu_cc_dst);
373 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
375 TCGv r_temp;
376 TCGv_i32 r_const;
377 int l1;
379 l1 = gen_new_label();
381 r_temp = tcg_temp_new();
382 tcg_gen_xor_tl(r_temp, src1, src2);
383 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
384 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
385 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
386 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
387 r_const = tcg_const_i32(TT_TOVF);
388 gen_helper_raise_exception(r_const);
389 tcg_temp_free_i32(r_const);
390 gen_set_label(l1);
391 tcg_temp_free(r_temp);
394 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
396 tcg_gen_mov_tl(cpu_cc_src, src1);
397 tcg_gen_movi_tl(cpu_cc_src2, src2);
398 if (src2 == 0) {
399 tcg_gen_mov_tl(cpu_cc_dst, src1);
400 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
401 dc->cc_op = CC_OP_LOGIC;
402 } else {
403 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
404 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
405 dc->cc_op = CC_OP_SUB;
407 tcg_gen_mov_tl(dst, cpu_cc_dst);
410 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
412 tcg_gen_mov_tl(cpu_cc_src, src1);
413 tcg_gen_mov_tl(cpu_cc_src2, src2);
414 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
415 tcg_gen_mov_tl(dst, cpu_cc_dst);
418 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
420 gen_helper_compute_C_icc(cpu_tmp0);
421 tcg_gen_mov_tl(cpu_cc_src, src1);
422 tcg_gen_movi_tl(cpu_cc_src2, src2);
423 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
424 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
425 tcg_gen_mov_tl(dst, cpu_cc_dst);
428 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
430 gen_helper_compute_C_icc(cpu_tmp0);
431 tcg_gen_mov_tl(cpu_cc_src, src1);
432 tcg_gen_mov_tl(cpu_cc_src2, src2);
433 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
434 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
435 tcg_gen_mov_tl(dst, cpu_cc_dst);
438 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
440 tcg_gen_mov_tl(cpu_cc_src, src1);
441 tcg_gen_mov_tl(cpu_cc_src2, src2);
442 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
443 tcg_gen_mov_tl(dst, cpu_cc_dst);
446 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
448 tcg_gen_mov_tl(cpu_cc_src, src1);
449 tcg_gen_mov_tl(cpu_cc_src2, src2);
450 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
451 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
452 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453 tcg_gen_mov_tl(dst, cpu_cc_dst);
456 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
458 TCGv r_temp;
459 int l1;
461 l1 = gen_new_label();
462 r_temp = tcg_temp_new();
464 /* old op:
465 if (!(env->y & 1))
466 T1 = 0;
468 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
469 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
470 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
471 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
472 tcg_gen_movi_tl(cpu_cc_src2, 0);
473 gen_set_label(l1);
475 // b2 = T0 & 1;
476 // env->y = (b2 << 31) | (env->y >> 1);
477 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
478 tcg_gen_shli_tl(r_temp, r_temp, 31);
479 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
480 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
481 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
482 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
484 // b1 = N ^ V;
485 gen_mov_reg_N(cpu_tmp0, cpu_psr);
486 gen_mov_reg_V(r_temp, cpu_psr);
487 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
488 tcg_temp_free(r_temp);
490 // T0 = (b1 << 31) | (T0 >> 1);
491 // src1 = T0;
492 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
493 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
494 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
496 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
498 tcg_gen_mov_tl(dst, cpu_cc_dst);
501 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
503 TCGv_i64 r_temp, r_temp2;
505 r_temp = tcg_temp_new_i64();
506 r_temp2 = tcg_temp_new_i64();
508 tcg_gen_extu_tl_i64(r_temp, src2);
509 tcg_gen_extu_tl_i64(r_temp2, src1);
510 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
512 tcg_gen_shri_i64(r_temp, r_temp2, 32);
513 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
514 tcg_temp_free_i64(r_temp);
515 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
516 #ifdef TARGET_SPARC64
517 tcg_gen_mov_i64(dst, r_temp2);
518 #else
519 tcg_gen_trunc_i64_tl(dst, r_temp2);
520 #endif
521 tcg_temp_free_i64(r_temp2);
524 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
526 TCGv_i64 r_temp, r_temp2;
528 r_temp = tcg_temp_new_i64();
529 r_temp2 = tcg_temp_new_i64();
531 tcg_gen_ext_tl_i64(r_temp, src2);
532 tcg_gen_ext_tl_i64(r_temp2, src1);
533 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
535 tcg_gen_shri_i64(r_temp, r_temp2, 32);
536 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
537 tcg_temp_free_i64(r_temp);
538 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
539 #ifdef TARGET_SPARC64
540 tcg_gen_mov_i64(dst, r_temp2);
541 #else
542 tcg_gen_trunc_i64_tl(dst, r_temp2);
543 #endif
544 tcg_temp_free_i64(r_temp2);
547 #ifdef TARGET_SPARC64
548 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
550 TCGv_i32 r_const;
551 int l1;
553 l1 = gen_new_label();
554 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
555 r_const = tcg_const_i32(TT_DIV_ZERO);
556 gen_helper_raise_exception(r_const);
557 tcg_temp_free_i32(r_const);
558 gen_set_label(l1);
561 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
563 int l1, l2;
565 l1 = gen_new_label();
566 l2 = gen_new_label();
567 tcg_gen_mov_tl(cpu_cc_src, src1);
568 tcg_gen_mov_tl(cpu_cc_src2, src2);
569 gen_trap_ifdivzero_tl(cpu_cc_src2);
570 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
571 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
572 tcg_gen_movi_i64(dst, INT64_MIN);
573 tcg_gen_br(l2);
574 gen_set_label(l1);
575 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
576 gen_set_label(l2);
578 #endif
580 // 1
581 static inline void gen_op_eval_ba(TCGv dst)
583 tcg_gen_movi_tl(dst, 1);
586 // Z
587 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
589 gen_mov_reg_Z(dst, src);
592 // Z | (N ^ V)
593 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
595 gen_mov_reg_N(cpu_tmp0, src);
596 gen_mov_reg_V(dst, src);
597 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
598 gen_mov_reg_Z(cpu_tmp0, src);
599 tcg_gen_or_tl(dst, dst, cpu_tmp0);
602 // N ^ V
603 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
605 gen_mov_reg_V(cpu_tmp0, src);
606 gen_mov_reg_N(dst, src);
607 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
610 // C | Z
611 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
613 gen_mov_reg_Z(cpu_tmp0, src);
614 gen_mov_reg_C(dst, src);
615 tcg_gen_or_tl(dst, dst, cpu_tmp0);
618 // C
619 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
621 gen_mov_reg_C(dst, src);
624 // V
625 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
627 gen_mov_reg_V(dst, src);
630 // 0
631 static inline void gen_op_eval_bn(TCGv dst)
633 tcg_gen_movi_tl(dst, 0);
636 // N
637 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
639 gen_mov_reg_N(dst, src);
642 // !Z
643 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
645 gen_mov_reg_Z(dst, src);
646 tcg_gen_xori_tl(dst, dst, 0x1);
649 // !(Z | (N ^ V))
650 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
652 gen_mov_reg_N(cpu_tmp0, src);
653 gen_mov_reg_V(dst, src);
654 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
655 gen_mov_reg_Z(cpu_tmp0, src);
656 tcg_gen_or_tl(dst, dst, cpu_tmp0);
657 tcg_gen_xori_tl(dst, dst, 0x1);
660 // !(N ^ V)
661 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
663 gen_mov_reg_V(cpu_tmp0, src);
664 gen_mov_reg_N(dst, src);
665 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
666 tcg_gen_xori_tl(dst, dst, 0x1);
669 // !(C | Z)
670 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
672 gen_mov_reg_Z(cpu_tmp0, src);
673 gen_mov_reg_C(dst, src);
674 tcg_gen_or_tl(dst, dst, cpu_tmp0);
675 tcg_gen_xori_tl(dst, dst, 0x1);
678 // !C
679 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
681 gen_mov_reg_C(dst, src);
682 tcg_gen_xori_tl(dst, dst, 0x1);
685 // !N
686 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
688 gen_mov_reg_N(dst, src);
689 tcg_gen_xori_tl(dst, dst, 0x1);
692 // !V
693 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
695 gen_mov_reg_V(dst, src);
696 tcg_gen_xori_tl(dst, dst, 0x1);
700 FPSR bit field FCC1 | FCC0:
704 3 unordered
706 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
707 unsigned int fcc_offset)
709 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
710 tcg_gen_andi_tl(reg, reg, 0x1);
713 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
714 unsigned int fcc_offset)
716 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
717 tcg_gen_andi_tl(reg, reg, 0x1);
720 // !0: FCC0 | FCC1
721 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
722 unsigned int fcc_offset)
724 gen_mov_reg_FCC0(dst, src, fcc_offset);
725 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
726 tcg_gen_or_tl(dst, dst, cpu_tmp0);
729 // 1 or 2: FCC0 ^ FCC1
730 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
731 unsigned int fcc_offset)
733 gen_mov_reg_FCC0(dst, src, fcc_offset);
734 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
735 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
738 // 1 or 3: FCC0
739 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
740 unsigned int fcc_offset)
742 gen_mov_reg_FCC0(dst, src, fcc_offset);
745 // 1: FCC0 & !FCC1
746 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
747 unsigned int fcc_offset)
749 gen_mov_reg_FCC0(dst, src, fcc_offset);
750 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
751 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
752 tcg_gen_and_tl(dst, dst, cpu_tmp0);
755 // 2 or 3: FCC1
756 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
757 unsigned int fcc_offset)
759 gen_mov_reg_FCC1(dst, src, fcc_offset);
762 // 2: !FCC0 & FCC1
763 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
764 unsigned int fcc_offset)
766 gen_mov_reg_FCC0(dst, src, fcc_offset);
767 tcg_gen_xori_tl(dst, dst, 0x1);
768 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
769 tcg_gen_and_tl(dst, dst, cpu_tmp0);
772 // 3: FCC0 & FCC1
773 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
774 unsigned int fcc_offset)
776 gen_mov_reg_FCC0(dst, src, fcc_offset);
777 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
778 tcg_gen_and_tl(dst, dst, cpu_tmp0);
781 // 0: !(FCC0 | FCC1)
782 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
783 unsigned int fcc_offset)
785 gen_mov_reg_FCC0(dst, src, fcc_offset);
786 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
787 tcg_gen_or_tl(dst, dst, cpu_tmp0);
788 tcg_gen_xori_tl(dst, dst, 0x1);
791 // 0 or 3: !(FCC0 ^ FCC1)
792 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
793 unsigned int fcc_offset)
795 gen_mov_reg_FCC0(dst, src, fcc_offset);
796 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
797 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
798 tcg_gen_xori_tl(dst, dst, 0x1);
801 // 0 or 2: !FCC0
802 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
803 unsigned int fcc_offset)
805 gen_mov_reg_FCC0(dst, src, fcc_offset);
806 tcg_gen_xori_tl(dst, dst, 0x1);
809 // !1: !(FCC0 & !FCC1)
810 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
811 unsigned int fcc_offset)
813 gen_mov_reg_FCC0(dst, src, fcc_offset);
814 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
815 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
816 tcg_gen_and_tl(dst, dst, cpu_tmp0);
817 tcg_gen_xori_tl(dst, dst, 0x1);
820 // 0 or 1: !FCC1
821 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
822 unsigned int fcc_offset)
824 gen_mov_reg_FCC1(dst, src, fcc_offset);
825 tcg_gen_xori_tl(dst, dst, 0x1);
828 // !2: !(!FCC0 & FCC1)
829 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
830 unsigned int fcc_offset)
832 gen_mov_reg_FCC0(dst, src, fcc_offset);
833 tcg_gen_xori_tl(dst, dst, 0x1);
834 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
835 tcg_gen_and_tl(dst, dst, cpu_tmp0);
836 tcg_gen_xori_tl(dst, dst, 0x1);
839 // !3: !(FCC0 & FCC1)
840 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
841 unsigned int fcc_offset)
843 gen_mov_reg_FCC0(dst, src, fcc_offset);
844 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
845 tcg_gen_and_tl(dst, dst, cpu_tmp0);
846 tcg_gen_xori_tl(dst, dst, 0x1);
849 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
850 target_ulong pc2, TCGv r_cond)
852 int l1;
854 l1 = gen_new_label();
856 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
858 gen_goto_tb(dc, 0, pc1, pc1 + 4);
860 gen_set_label(l1);
861 gen_goto_tb(dc, 1, pc2, pc2 + 4);
864 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
865 target_ulong pc2, TCGv r_cond)
867 int l1;
869 l1 = gen_new_label();
871 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
873 gen_goto_tb(dc, 0, pc2, pc1);
875 gen_set_label(l1);
876 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
879 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
880 TCGv r_cond)
882 int l1, l2;
884 l1 = gen_new_label();
885 l2 = gen_new_label();
887 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
889 tcg_gen_movi_tl(cpu_npc, npc1);
890 tcg_gen_br(l2);
892 gen_set_label(l1);
893 tcg_gen_movi_tl(cpu_npc, npc2);
894 gen_set_label(l2);
897 /* call this function before using the condition register as it may
898 have been set for a jump */
899 static inline void flush_cond(DisasContext *dc, TCGv cond)
901 if (dc->npc == JUMP_PC) {
902 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
903 dc->npc = DYNAMIC_PC;
907 static inline void save_npc(DisasContext *dc, TCGv cond)
909 if (dc->npc == JUMP_PC) {
910 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
911 dc->npc = DYNAMIC_PC;
912 } else if (dc->npc != DYNAMIC_PC) {
913 tcg_gen_movi_tl(cpu_npc, dc->npc);
917 static inline void save_state(DisasContext *dc, TCGv cond)
919 tcg_gen_movi_tl(cpu_pc, dc->pc);
920 /* flush pending conditional evaluations before exposing cpu state */
921 if (dc->cc_op != CC_OP_FLAGS) {
922 dc->cc_op = CC_OP_FLAGS;
923 gen_helper_compute_psr();
925 save_npc(dc, cond);
928 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
930 if (dc->npc == JUMP_PC) {
931 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
932 tcg_gen_mov_tl(cpu_pc, cpu_npc);
933 dc->pc = DYNAMIC_PC;
934 } else if (dc->npc == DYNAMIC_PC) {
935 tcg_gen_mov_tl(cpu_pc, cpu_npc);
936 dc->pc = DYNAMIC_PC;
937 } else {
938 dc->pc = dc->npc;
942 static inline void gen_op_next_insn(void)
944 tcg_gen_mov_tl(cpu_pc, cpu_npc);
945 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
948 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
949 DisasContext *dc)
951 TCGv_i32 r_src;
953 #ifdef TARGET_SPARC64
954 if (cc)
955 r_src = cpu_xcc;
956 else
957 r_src = cpu_psr;
958 #else
959 r_src = cpu_psr;
960 #endif
961 switch (dc->cc_op) {
962 case CC_OP_FLAGS:
963 break;
964 default:
965 gen_helper_compute_psr();
966 dc->cc_op = CC_OP_FLAGS;
967 break;
969 switch (cond) {
970 case 0x0:
971 gen_op_eval_bn(r_dst);
972 break;
973 case 0x1:
974 gen_op_eval_be(r_dst, r_src);
975 break;
976 case 0x2:
977 gen_op_eval_ble(r_dst, r_src);
978 break;
979 case 0x3:
980 gen_op_eval_bl(r_dst, r_src);
981 break;
982 case 0x4:
983 gen_op_eval_bleu(r_dst, r_src);
984 break;
985 case 0x5:
986 gen_op_eval_bcs(r_dst, r_src);
987 break;
988 case 0x6:
989 gen_op_eval_bneg(r_dst, r_src);
990 break;
991 case 0x7:
992 gen_op_eval_bvs(r_dst, r_src);
993 break;
994 case 0x8:
995 gen_op_eval_ba(r_dst);
996 break;
997 case 0x9:
998 gen_op_eval_bne(r_dst, r_src);
999 break;
1000 case 0xa:
1001 gen_op_eval_bg(r_dst, r_src);
1002 break;
1003 case 0xb:
1004 gen_op_eval_bge(r_dst, r_src);
1005 break;
1006 case 0xc:
1007 gen_op_eval_bgu(r_dst, r_src);
1008 break;
1009 case 0xd:
1010 gen_op_eval_bcc(r_dst, r_src);
1011 break;
1012 case 0xe:
1013 gen_op_eval_bpos(r_dst, r_src);
1014 break;
1015 case 0xf:
1016 gen_op_eval_bvc(r_dst, r_src);
1017 break;
1021 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1023 unsigned int offset;
1025 switch (cc) {
1026 default:
1027 case 0x0:
1028 offset = 0;
1029 break;
1030 case 0x1:
1031 offset = 32 - 10;
1032 break;
1033 case 0x2:
1034 offset = 34 - 10;
1035 break;
1036 case 0x3:
1037 offset = 36 - 10;
1038 break;
1041 switch (cond) {
1042 case 0x0:
1043 gen_op_eval_bn(r_dst);
1044 break;
1045 case 0x1:
1046 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1047 break;
1048 case 0x2:
1049 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1050 break;
1051 case 0x3:
1052 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1053 break;
1054 case 0x4:
1055 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1056 break;
1057 case 0x5:
1058 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1059 break;
1060 case 0x6:
1061 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1062 break;
1063 case 0x7:
1064 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1065 break;
1066 case 0x8:
1067 gen_op_eval_ba(r_dst);
1068 break;
1069 case 0x9:
1070 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1071 break;
1072 case 0xa:
1073 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1074 break;
1075 case 0xb:
1076 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1077 break;
1078 case 0xc:
1079 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1080 break;
1081 case 0xd:
1082 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1083 break;
1084 case 0xe:
1085 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1086 break;
1087 case 0xf:
1088 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1089 break;
1093 #ifdef TARGET_SPARC64
1094 // Inverted logic
1095 static const int gen_tcg_cond_reg[8] = {
1097 TCG_COND_NE,
1098 TCG_COND_GT,
1099 TCG_COND_GE,
1101 TCG_COND_EQ,
1102 TCG_COND_LE,
1103 TCG_COND_LT,
1106 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1108 int l1;
1110 l1 = gen_new_label();
1111 tcg_gen_movi_tl(r_dst, 0);
1112 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1113 tcg_gen_movi_tl(r_dst, 1);
1114 gen_set_label(l1);
1116 #endif
1118 /* XXX: potentially incorrect if dynamic npc */
1119 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1120 TCGv r_cond)
1122 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1123 target_ulong target = dc->pc + offset;
1125 if (cond == 0x0) {
1126 /* unconditional not taken */
1127 if (a) {
1128 dc->pc = dc->npc + 4;
1129 dc->npc = dc->pc + 4;
1130 } else {
1131 dc->pc = dc->npc;
1132 dc->npc = dc->pc + 4;
1134 } else if (cond == 0x8) {
1135 /* unconditional taken */
1136 if (a) {
1137 dc->pc = target;
1138 dc->npc = dc->pc + 4;
1139 } else {
1140 dc->pc = dc->npc;
1141 dc->npc = target;
1142 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1144 } else {
1145 flush_cond(dc, r_cond);
1146 gen_cond(r_cond, cc, cond, dc);
1147 if (a) {
1148 gen_branch_a(dc, target, dc->npc, r_cond);
1149 dc->is_br = 1;
1150 } else {
1151 dc->pc = dc->npc;
1152 dc->jump_pc[0] = target;
1153 dc->jump_pc[1] = dc->npc + 4;
1154 dc->npc = JUMP_PC;
1159 /* XXX: potentially incorrect if dynamic npc */
1160 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1161 TCGv r_cond)
1163 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1164 target_ulong target = dc->pc + offset;
1166 if (cond == 0x0) {
1167 /* unconditional not taken */
1168 if (a) {
1169 dc->pc = dc->npc + 4;
1170 dc->npc = dc->pc + 4;
1171 } else {
1172 dc->pc = dc->npc;
1173 dc->npc = dc->pc + 4;
1175 } else if (cond == 0x8) {
1176 /* unconditional taken */
1177 if (a) {
1178 dc->pc = target;
1179 dc->npc = dc->pc + 4;
1180 } else {
1181 dc->pc = dc->npc;
1182 dc->npc = target;
1183 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1185 } else {
1186 flush_cond(dc, r_cond);
1187 gen_fcond(r_cond, cc, cond);
1188 if (a) {
1189 gen_branch_a(dc, target, dc->npc, r_cond);
1190 dc->is_br = 1;
1191 } else {
1192 dc->pc = dc->npc;
1193 dc->jump_pc[0] = target;
1194 dc->jump_pc[1] = dc->npc + 4;
1195 dc->npc = JUMP_PC;
1200 #ifdef TARGET_SPARC64
1201 /* XXX: potentially incorrect if dynamic npc */
1202 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1203 TCGv r_cond, TCGv r_reg)
1205 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1206 target_ulong target = dc->pc + offset;
1208 flush_cond(dc, r_cond);
1209 gen_cond_reg(r_cond, cond, r_reg);
1210 if (a) {
1211 gen_branch_a(dc, target, dc->npc, r_cond);
1212 dc->is_br = 1;
1213 } else {
1214 dc->pc = dc->npc;
1215 dc->jump_pc[0] = target;
1216 dc->jump_pc[1] = dc->npc + 4;
1217 dc->npc = JUMP_PC;
1221 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1223 switch (fccno) {
1224 case 0:
1225 gen_helper_fcmps(r_rs1, r_rs2);
1226 break;
1227 case 1:
1228 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1229 break;
1230 case 2:
1231 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1232 break;
1233 case 3:
1234 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1235 break;
1239 static inline void gen_op_fcmpd(int fccno)
1241 switch (fccno) {
1242 case 0:
1243 gen_helper_fcmpd();
1244 break;
1245 case 1:
1246 gen_helper_fcmpd_fcc1();
1247 break;
1248 case 2:
1249 gen_helper_fcmpd_fcc2();
1250 break;
1251 case 3:
1252 gen_helper_fcmpd_fcc3();
1253 break;
1257 static inline void gen_op_fcmpq(int fccno)
1259 switch (fccno) {
1260 case 0:
1261 gen_helper_fcmpq();
1262 break;
1263 case 1:
1264 gen_helper_fcmpq_fcc1();
1265 break;
1266 case 2:
1267 gen_helper_fcmpq_fcc2();
1268 break;
1269 case 3:
1270 gen_helper_fcmpq_fcc3();
1271 break;
1275 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1277 switch (fccno) {
1278 case 0:
1279 gen_helper_fcmpes(r_rs1, r_rs2);
1280 break;
1281 case 1:
1282 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1283 break;
1284 case 2:
1285 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1286 break;
1287 case 3:
1288 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1289 break;
1293 static inline void gen_op_fcmped(int fccno)
1295 switch (fccno) {
1296 case 0:
1297 gen_helper_fcmped();
1298 break;
1299 case 1:
1300 gen_helper_fcmped_fcc1();
1301 break;
1302 case 2:
1303 gen_helper_fcmped_fcc2();
1304 break;
1305 case 3:
1306 gen_helper_fcmped_fcc3();
1307 break;
1311 static inline void gen_op_fcmpeq(int fccno)
1313 switch (fccno) {
1314 case 0:
1315 gen_helper_fcmpeq();
1316 break;
1317 case 1:
1318 gen_helper_fcmpeq_fcc1();
1319 break;
1320 case 2:
1321 gen_helper_fcmpeq_fcc2();
1322 break;
1323 case 3:
1324 gen_helper_fcmpeq_fcc3();
1325 break;
1329 #else
1331 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1333 gen_helper_fcmps(r_rs1, r_rs2);
1336 static inline void gen_op_fcmpd(int fccno)
1338 gen_helper_fcmpd();
1341 static inline void gen_op_fcmpq(int fccno)
1343 gen_helper_fcmpq();
1346 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1348 gen_helper_fcmpes(r_rs1, r_rs2);
1351 static inline void gen_op_fcmped(int fccno)
1353 gen_helper_fcmped();
1356 static inline void gen_op_fcmpeq(int fccno)
1358 gen_helper_fcmpeq();
1360 #endif
1362 static inline void gen_op_fpexception_im(int fsr_flags)
1364 TCGv_i32 r_const;
1366 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1367 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1368 r_const = tcg_const_i32(TT_FP_EXCP);
1369 gen_helper_raise_exception(r_const);
1370 tcg_temp_free_i32(r_const);
1373 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1375 #if !defined(CONFIG_USER_ONLY)
1376 if (!dc->fpu_enabled) {
1377 TCGv_i32 r_const;
1379 save_state(dc, r_cond);
1380 r_const = tcg_const_i32(TT_NFPU_INSN);
1381 gen_helper_raise_exception(r_const);
1382 tcg_temp_free_i32(r_const);
1383 dc->is_br = 1;
1384 return 1;
1386 #endif
1387 return 0;
1390 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1392 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1395 static inline void gen_clear_float_exceptions(void)
1397 gen_helper_clear_float_exceptions();
1400 /* asi moves */
1401 #ifdef TARGET_SPARC64
1402 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1404 int asi;
1405 TCGv_i32 r_asi;
1407 if (IS_IMM) {
1408 r_asi = tcg_temp_new_i32();
1409 tcg_gen_mov_i32(r_asi, cpu_asi);
1410 } else {
1411 asi = GET_FIELD(insn, 19, 26);
1412 r_asi = tcg_const_i32(asi);
1414 return r_asi;
1417 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1418 int sign)
1420 TCGv_i32 r_asi, r_size, r_sign;
1422 r_asi = gen_get_asi(insn, addr);
1423 r_size = tcg_const_i32(size);
1424 r_sign = tcg_const_i32(sign);
1425 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1426 tcg_temp_free_i32(r_sign);
1427 tcg_temp_free_i32(r_size);
1428 tcg_temp_free_i32(r_asi);
1431 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1433 TCGv_i32 r_asi, r_size;
1435 r_asi = gen_get_asi(insn, addr);
1436 r_size = tcg_const_i32(size);
1437 gen_helper_st_asi(addr, src, r_asi, r_size);
1438 tcg_temp_free_i32(r_size);
1439 tcg_temp_free_i32(r_asi);
1442 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1444 TCGv_i32 r_asi, r_size, r_rd;
1446 r_asi = gen_get_asi(insn, addr);
1447 r_size = tcg_const_i32(size);
1448 r_rd = tcg_const_i32(rd);
1449 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1450 tcg_temp_free_i32(r_rd);
1451 tcg_temp_free_i32(r_size);
1452 tcg_temp_free_i32(r_asi);
1455 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1457 TCGv_i32 r_asi, r_size, r_rd;
1459 r_asi = gen_get_asi(insn, addr);
1460 r_size = tcg_const_i32(size);
1461 r_rd = tcg_const_i32(rd);
1462 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1463 tcg_temp_free_i32(r_rd);
1464 tcg_temp_free_i32(r_size);
1465 tcg_temp_free_i32(r_asi);
1468 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1470 TCGv_i32 r_asi, r_size, r_sign;
1472 r_asi = gen_get_asi(insn, addr);
1473 r_size = tcg_const_i32(4);
1474 r_sign = tcg_const_i32(0);
1475 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1476 tcg_temp_free_i32(r_sign);
1477 gen_helper_st_asi(addr, dst, r_asi, r_size);
1478 tcg_temp_free_i32(r_size);
1479 tcg_temp_free_i32(r_asi);
1480 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1483 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1485 TCGv_i32 r_asi, r_rd;
1487 r_asi = gen_get_asi(insn, addr);
1488 r_rd = tcg_const_i32(rd);
1489 gen_helper_ldda_asi(addr, r_asi, r_rd);
1490 tcg_temp_free_i32(r_rd);
1491 tcg_temp_free_i32(r_asi);
1494 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1496 TCGv_i32 r_asi, r_size;
1498 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1499 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1500 r_asi = gen_get_asi(insn, addr);
1501 r_size = tcg_const_i32(8);
1502 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1503 tcg_temp_free_i32(r_size);
1504 tcg_temp_free_i32(r_asi);
1507 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1508 int rd)
1510 TCGv r_val1;
1511 TCGv_i32 r_asi;
1513 r_val1 = tcg_temp_new();
1514 gen_movl_reg_TN(rd, r_val1);
1515 r_asi = gen_get_asi(insn, addr);
1516 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1517 tcg_temp_free_i32(r_asi);
1518 tcg_temp_free(r_val1);
1521 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1522 int rd)
1524 TCGv_i32 r_asi;
1526 gen_movl_reg_TN(rd, cpu_tmp64);
1527 r_asi = gen_get_asi(insn, addr);
1528 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1529 tcg_temp_free_i32(r_asi);
1532 #elif !defined(CONFIG_USER_ONLY)
1534 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1535 int sign)
1537 TCGv_i32 r_asi, r_size, r_sign;
1539 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1540 r_size = tcg_const_i32(size);
1541 r_sign = tcg_const_i32(sign);
1542 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1543 tcg_temp_free(r_sign);
1544 tcg_temp_free(r_size);
1545 tcg_temp_free(r_asi);
1546 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1549 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1551 TCGv_i32 r_asi, r_size;
1553 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1554 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1555 r_size = tcg_const_i32(size);
1556 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1557 tcg_temp_free(r_size);
1558 tcg_temp_free(r_asi);
1561 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1563 TCGv_i32 r_asi, r_size, r_sign;
1564 TCGv_i64 r_val;
1566 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1567 r_size = tcg_const_i32(4);
1568 r_sign = tcg_const_i32(0);
1569 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1570 tcg_temp_free(r_sign);
1571 r_val = tcg_temp_new_i64();
1572 tcg_gen_extu_tl_i64(r_val, dst);
1573 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1574 tcg_temp_free_i64(r_val);
1575 tcg_temp_free(r_size);
1576 tcg_temp_free(r_asi);
1577 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1580 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1582 TCGv_i32 r_asi, r_size, r_sign;
1584 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1585 r_size = tcg_const_i32(8);
1586 r_sign = tcg_const_i32(0);
1587 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1588 tcg_temp_free(r_sign);
1589 tcg_temp_free(r_size);
1590 tcg_temp_free(r_asi);
1591 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1592 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1593 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1594 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1595 gen_movl_TN_reg(rd, hi);
1598 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1600 TCGv_i32 r_asi, r_size;
1602 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1603 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1604 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1605 r_size = tcg_const_i32(8);
1606 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1607 tcg_temp_free(r_size);
1608 tcg_temp_free(r_asi);
1610 #endif
1612 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1613 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1615 TCGv_i64 r_val;
1616 TCGv_i32 r_asi, r_size;
1618 gen_ld_asi(dst, addr, insn, 1, 0);
1620 r_val = tcg_const_i64(0xffULL);
1621 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1622 r_size = tcg_const_i32(1);
1623 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1624 tcg_temp_free_i32(r_size);
1625 tcg_temp_free_i32(r_asi);
1626 tcg_temp_free_i64(r_val);
1628 #endif
1630 static inline TCGv get_src1(unsigned int insn, TCGv def)
1632 TCGv r_rs1 = def;
1633 unsigned int rs1;
1635 rs1 = GET_FIELD(insn, 13, 17);
1636 if (rs1 == 0) {
1637 tcg_gen_movi_tl(def, 0);
1638 } else if (rs1 < 8) {
1639 r_rs1 = cpu_gregs[rs1];
1640 } else {
1641 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1643 return r_rs1;
1646 static inline TCGv get_src2(unsigned int insn, TCGv def)
1648 TCGv r_rs2 = def;
1650 if (IS_IMM) { /* immediate */
1651 target_long simm = GET_FIELDs(insn, 19, 31);
1652 tcg_gen_movi_tl(def, simm);
1653 } else { /* register */
1654 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1655 if (rs2 == 0) {
1656 tcg_gen_movi_tl(def, 0);
1657 } else if (rs2 < 8) {
1658 r_rs2 = cpu_gregs[rs2];
1659 } else {
1660 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1663 return r_rs2;
1666 #ifdef TARGET_SPARC64
1667 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1669 TCGv_i32 r_tl = tcg_temp_new_i32();
1671 /* load env->tl into r_tl */
1672 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1674 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1675 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1677 /* calculate offset to current trap state from env->ts, reuse r_tl */
1678 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1679 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1681 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1683 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1684 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1685 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1686 tcg_temp_free_ptr(r_tl_tmp);
1689 tcg_temp_free_i32(r_tl);
1691 #endif
1693 #define CHECK_IU_FEATURE(dc, FEATURE) \
1694 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1695 goto illegal_insn;
1696 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1697 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1698 goto nfpu_insn;
1700 /* before an instruction, dc->pc must be static */
1701 static void disas_sparc_insn(DisasContext * dc)
1703 unsigned int insn, opc, rs1, rs2, rd;
1704 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1705 target_long simm;
1707 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1708 tcg_gen_debug_insn_start(dc->pc);
1709 insn = ldl_code(dc->pc);
1710 opc = GET_FIELD(insn, 0, 1);
1712 rd = GET_FIELD(insn, 2, 6);
1714 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1715 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1717 switch (opc) {
1718 case 0: /* branches/sethi */
1720 unsigned int xop = GET_FIELD(insn, 7, 9);
1721 int32_t target;
1722 switch (xop) {
1723 #ifdef TARGET_SPARC64
1724 case 0x1: /* V9 BPcc */
1726 int cc;
1728 target = GET_FIELD_SP(insn, 0, 18);
1729 target = sign_extend(target, 18);
1730 target <<= 2;
1731 cc = GET_FIELD_SP(insn, 20, 21);
1732 if (cc == 0)
1733 do_branch(dc, target, insn, 0, cpu_cond);
1734 else if (cc == 2)
1735 do_branch(dc, target, insn, 1, cpu_cond);
1736 else
1737 goto illegal_insn;
1738 goto jmp_insn;
1740 case 0x3: /* V9 BPr */
1742 target = GET_FIELD_SP(insn, 0, 13) |
1743 (GET_FIELD_SP(insn, 20, 21) << 14);
1744 target = sign_extend(target, 16);
1745 target <<= 2;
1746 cpu_src1 = get_src1(insn, cpu_src1);
1747 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1748 goto jmp_insn;
1750 case 0x5: /* V9 FBPcc */
1752 int cc = GET_FIELD_SP(insn, 20, 21);
1753 if (gen_trap_ifnofpu(dc, cpu_cond))
1754 goto jmp_insn;
1755 target = GET_FIELD_SP(insn, 0, 18);
1756 target = sign_extend(target, 19);
1757 target <<= 2;
1758 do_fbranch(dc, target, insn, cc, cpu_cond);
1759 goto jmp_insn;
1761 #else
1762 case 0x7: /* CBN+x */
1764 goto ncp_insn;
1766 #endif
1767 case 0x2: /* BN+x */
1769 target = GET_FIELD(insn, 10, 31);
1770 target = sign_extend(target, 22);
1771 target <<= 2;
1772 do_branch(dc, target, insn, 0, cpu_cond);
1773 goto jmp_insn;
1775 case 0x6: /* FBN+x */
1777 if (gen_trap_ifnofpu(dc, cpu_cond))
1778 goto jmp_insn;
1779 target = GET_FIELD(insn, 10, 31);
1780 target = sign_extend(target, 22);
1781 target <<= 2;
1782 do_fbranch(dc, target, insn, 0, cpu_cond);
1783 goto jmp_insn;
1785 case 0x4: /* SETHI */
1786 if (rd) { // nop
1787 uint32_t value = GET_FIELD(insn, 10, 31);
1788 TCGv r_const;
1790 r_const = tcg_const_tl(value << 10);
1791 gen_movl_TN_reg(rd, r_const);
1792 tcg_temp_free(r_const);
1794 break;
1795 case 0x0: /* UNIMPL */
1796 default:
1797 goto illegal_insn;
1799 break;
1801 break;
1802 case 1: /*CALL*/
1804 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1805 TCGv r_const;
1807 r_const = tcg_const_tl(dc->pc);
1808 gen_movl_TN_reg(15, r_const);
1809 tcg_temp_free(r_const);
1810 target += dc->pc;
1811 gen_mov_pc_npc(dc, cpu_cond);
1812 dc->npc = target;
1814 goto jmp_insn;
1815 case 2: /* FPU & Logical Operations */
1817 unsigned int xop = GET_FIELD(insn, 7, 12);
1818 if (xop == 0x3a) { /* generate trap */
1819 int cond;
1821 cpu_src1 = get_src1(insn, cpu_src1);
1822 if (IS_IMM) {
1823 rs2 = GET_FIELD(insn, 25, 31);
1824 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1825 } else {
1826 rs2 = GET_FIELD(insn, 27, 31);
1827 if (rs2 != 0) {
1828 gen_movl_reg_TN(rs2, cpu_src2);
1829 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1830 } else
1831 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1833 cond = GET_FIELD(insn, 3, 6);
1834 if (cond == 0x8) {
1835 save_state(dc, cpu_cond);
1836 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1837 supervisor(dc))
1838 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1839 else
1840 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1841 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1842 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1843 gen_helper_raise_exception(cpu_tmp32);
1844 } else if (cond != 0) {
1845 TCGv r_cond = tcg_temp_new();
1846 int l1;
1847 #ifdef TARGET_SPARC64
1848 /* V9 icc/xcc */
1849 int cc = GET_FIELD_SP(insn, 11, 12);
1851 save_state(dc, cpu_cond);
1852 if (cc == 0)
1853 gen_cond(r_cond, 0, cond, dc);
1854 else if (cc == 2)
1855 gen_cond(r_cond, 1, cond, dc);
1856 else
1857 goto illegal_insn;
1858 #else
1859 save_state(dc, cpu_cond);
1860 gen_cond(r_cond, 0, cond, dc);
1861 #endif
1862 l1 = gen_new_label();
1863 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1865 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1866 supervisor(dc))
1867 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1868 else
1869 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1870 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1871 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1872 gen_helper_raise_exception(cpu_tmp32);
1874 gen_set_label(l1);
1875 tcg_temp_free(r_cond);
1877 gen_op_next_insn();
1878 tcg_gen_exit_tb(0);
1879 dc->is_br = 1;
1880 goto jmp_insn;
1881 } else if (xop == 0x28) {
1882 rs1 = GET_FIELD(insn, 13, 17);
1883 switch(rs1) {
1884 case 0: /* rdy */
1885 #ifndef TARGET_SPARC64
1886 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1887 manual, rdy on the microSPARC
1888 II */
1889 case 0x0f: /* stbar in the SPARCv8 manual,
1890 rdy on the microSPARC II */
1891 case 0x10 ... 0x1f: /* implementation-dependent in the
1892 SPARCv8 manual, rdy on the
1893 microSPARC II */
1894 #endif
1895 gen_movl_TN_reg(rd, cpu_y);
1896 break;
1897 #ifdef TARGET_SPARC64
1898 case 0x2: /* V9 rdccr */
1899 gen_helper_compute_psr();
1900 gen_helper_rdccr(cpu_dst);
1901 gen_movl_TN_reg(rd, cpu_dst);
1902 break;
1903 case 0x3: /* V9 rdasi */
1904 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1905 gen_movl_TN_reg(rd, cpu_dst);
1906 break;
1907 case 0x4: /* V9 rdtick */
1909 TCGv_ptr r_tickptr;
1911 r_tickptr = tcg_temp_new_ptr();
1912 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1913 offsetof(CPUState, tick));
1914 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1915 tcg_temp_free_ptr(r_tickptr);
1916 gen_movl_TN_reg(rd, cpu_dst);
1918 break;
1919 case 0x5: /* V9 rdpc */
1921 TCGv r_const;
1923 r_const = tcg_const_tl(dc->pc);
1924 gen_movl_TN_reg(rd, r_const);
1925 tcg_temp_free(r_const);
1927 break;
1928 case 0x6: /* V9 rdfprs */
1929 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1930 gen_movl_TN_reg(rd, cpu_dst);
1931 break;
1932 case 0xf: /* V9 membar */
1933 break; /* no effect */
1934 case 0x13: /* Graphics Status */
1935 if (gen_trap_ifnofpu(dc, cpu_cond))
1936 goto jmp_insn;
1937 gen_movl_TN_reg(rd, cpu_gsr);
1938 break;
1939 case 0x16: /* Softint */
1940 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1941 gen_movl_TN_reg(rd, cpu_dst);
1942 break;
1943 case 0x17: /* Tick compare */
1944 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1945 break;
1946 case 0x18: /* System tick */
1948 TCGv_ptr r_tickptr;
1950 r_tickptr = tcg_temp_new_ptr();
1951 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1952 offsetof(CPUState, stick));
1953 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1954 tcg_temp_free_ptr(r_tickptr);
1955 gen_movl_TN_reg(rd, cpu_dst);
1957 break;
1958 case 0x19: /* System tick compare */
1959 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1960 break;
1961 case 0x10: /* Performance Control */
1962 case 0x11: /* Performance Instrumentation Counter */
1963 case 0x12: /* Dispatch Control */
1964 case 0x14: /* Softint set, WO */
1965 case 0x15: /* Softint clear, WO */
1966 #endif
1967 default:
1968 goto illegal_insn;
1970 #if !defined(CONFIG_USER_ONLY)
1971 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1972 #ifndef TARGET_SPARC64
1973 if (!supervisor(dc))
1974 goto priv_insn;
1975 gen_helper_compute_psr();
1976 dc->cc_op = CC_OP_FLAGS;
1977 gen_helper_rdpsr(cpu_dst);
1978 #else
1979 CHECK_IU_FEATURE(dc, HYPV);
1980 if (!hypervisor(dc))
1981 goto priv_insn;
1982 rs1 = GET_FIELD(insn, 13, 17);
1983 switch (rs1) {
1984 case 0: // hpstate
1985 // gen_op_rdhpstate();
1986 break;
1987 case 1: // htstate
1988 // gen_op_rdhtstate();
1989 break;
1990 case 3: // hintp
1991 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1992 break;
1993 case 5: // htba
1994 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1995 break;
1996 case 6: // hver
1997 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1998 break;
1999 case 31: // hstick_cmpr
2000 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2001 break;
2002 default:
2003 goto illegal_insn;
2005 #endif
2006 gen_movl_TN_reg(rd, cpu_dst);
2007 break;
2008 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2009 if (!supervisor(dc))
2010 goto priv_insn;
2011 #ifdef TARGET_SPARC64
2012 rs1 = GET_FIELD(insn, 13, 17);
2013 switch (rs1) {
2014 case 0: // tpc
2016 TCGv_ptr r_tsptr;
2018 r_tsptr = tcg_temp_new_ptr();
2019 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2020 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2021 offsetof(trap_state, tpc));
2022 tcg_temp_free_ptr(r_tsptr);
2024 break;
2025 case 1: // tnpc
2027 TCGv_ptr r_tsptr;
2029 r_tsptr = tcg_temp_new_ptr();
2030 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2031 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2032 offsetof(trap_state, tnpc));
2033 tcg_temp_free_ptr(r_tsptr);
2035 break;
2036 case 2: // tstate
2038 TCGv_ptr r_tsptr;
2040 r_tsptr = tcg_temp_new_ptr();
2041 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2042 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2043 offsetof(trap_state, tstate));
2044 tcg_temp_free_ptr(r_tsptr);
2046 break;
2047 case 3: // tt
2049 TCGv_ptr r_tsptr;
2051 r_tsptr = tcg_temp_new_ptr();
2052 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2053 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2054 offsetof(trap_state, tt));
2055 tcg_temp_free_ptr(r_tsptr);
2056 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2058 break;
2059 case 4: // tick
2061 TCGv_ptr r_tickptr;
2063 r_tickptr = tcg_temp_new_ptr();
2064 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2065 offsetof(CPUState, tick));
2066 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2067 gen_movl_TN_reg(rd, cpu_tmp0);
2068 tcg_temp_free_ptr(r_tickptr);
2070 break;
2071 case 5: // tba
2072 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2073 break;
2074 case 6: // pstate
2075 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2076 offsetof(CPUSPARCState, pstate));
2077 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2078 break;
2079 case 7: // tl
2080 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2081 offsetof(CPUSPARCState, tl));
2082 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2083 break;
2084 case 8: // pil
2085 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2086 offsetof(CPUSPARCState, psrpil));
2087 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2088 break;
2089 case 9: // cwp
2090 gen_helper_rdcwp(cpu_tmp0);
2091 break;
2092 case 10: // cansave
2093 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2094 offsetof(CPUSPARCState, cansave));
2095 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2096 break;
2097 case 11: // canrestore
2098 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2099 offsetof(CPUSPARCState, canrestore));
2100 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2101 break;
2102 case 12: // cleanwin
2103 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2104 offsetof(CPUSPARCState, cleanwin));
2105 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2106 break;
2107 case 13: // otherwin
2108 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2109 offsetof(CPUSPARCState, otherwin));
2110 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2111 break;
2112 case 14: // wstate
2113 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2114 offsetof(CPUSPARCState, wstate));
2115 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2116 break;
2117 case 16: // UA2005 gl
2118 CHECK_IU_FEATURE(dc, GL);
2119 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2120 offsetof(CPUSPARCState, gl));
2121 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2122 break;
2123 case 26: // UA2005 strand status
2124 CHECK_IU_FEATURE(dc, HYPV);
2125 if (!hypervisor(dc))
2126 goto priv_insn;
2127 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2128 break;
2129 case 31: // ver
2130 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2131 break;
2132 case 15: // fq
2133 default:
2134 goto illegal_insn;
2136 #else
2137 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2138 #endif
2139 gen_movl_TN_reg(rd, cpu_tmp0);
2140 break;
2141 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2142 #ifdef TARGET_SPARC64
2143 save_state(dc, cpu_cond);
2144 gen_helper_flushw();
2145 #else
2146 if (!supervisor(dc))
2147 goto priv_insn;
2148 gen_movl_TN_reg(rd, cpu_tbr);
2149 #endif
2150 break;
2151 #endif
2152 } else if (xop == 0x34) { /* FPU Operations */
2153 if (gen_trap_ifnofpu(dc, cpu_cond))
2154 goto jmp_insn;
2155 gen_op_clear_ieee_excp_and_FTT();
2156 rs1 = GET_FIELD(insn, 13, 17);
2157 rs2 = GET_FIELD(insn, 27, 31);
2158 xop = GET_FIELD(insn, 18, 26);
2159 save_state(dc, cpu_cond);
2160 switch (xop) {
2161 case 0x1: /* fmovs */
2162 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2163 break;
2164 case 0x5: /* fnegs */
2165 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2166 break;
2167 case 0x9: /* fabss */
2168 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2169 break;
2170 case 0x29: /* fsqrts */
2171 CHECK_FPU_FEATURE(dc, FSQRT);
2172 gen_clear_float_exceptions();
2173 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2174 gen_helper_check_ieee_exceptions();
2175 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2176 break;
2177 case 0x2a: /* fsqrtd */
2178 CHECK_FPU_FEATURE(dc, FSQRT);
2179 gen_op_load_fpr_DT1(DFPREG(rs2));
2180 gen_clear_float_exceptions();
2181 gen_helper_fsqrtd();
2182 gen_helper_check_ieee_exceptions();
2183 gen_op_store_DT0_fpr(DFPREG(rd));
2184 break;
2185 case 0x2b: /* fsqrtq */
2186 CHECK_FPU_FEATURE(dc, FLOAT128);
2187 gen_op_load_fpr_QT1(QFPREG(rs2));
2188 gen_clear_float_exceptions();
2189 gen_helper_fsqrtq();
2190 gen_helper_check_ieee_exceptions();
2191 gen_op_store_QT0_fpr(QFPREG(rd));
2192 break;
2193 case 0x41: /* fadds */
2194 gen_clear_float_exceptions();
2195 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2196 gen_helper_check_ieee_exceptions();
2197 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2198 break;
2199 case 0x42: /* faddd */
2200 gen_op_load_fpr_DT0(DFPREG(rs1));
2201 gen_op_load_fpr_DT1(DFPREG(rs2));
2202 gen_clear_float_exceptions();
2203 gen_helper_faddd();
2204 gen_helper_check_ieee_exceptions();
2205 gen_op_store_DT0_fpr(DFPREG(rd));
2206 break;
2207 case 0x43: /* faddq */
2208 CHECK_FPU_FEATURE(dc, FLOAT128);
2209 gen_op_load_fpr_QT0(QFPREG(rs1));
2210 gen_op_load_fpr_QT1(QFPREG(rs2));
2211 gen_clear_float_exceptions();
2212 gen_helper_faddq();
2213 gen_helper_check_ieee_exceptions();
2214 gen_op_store_QT0_fpr(QFPREG(rd));
2215 break;
2216 case 0x45: /* fsubs */
2217 gen_clear_float_exceptions();
2218 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2219 gen_helper_check_ieee_exceptions();
2220 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2221 break;
2222 case 0x46: /* fsubd */
2223 gen_op_load_fpr_DT0(DFPREG(rs1));
2224 gen_op_load_fpr_DT1(DFPREG(rs2));
2225 gen_clear_float_exceptions();
2226 gen_helper_fsubd();
2227 gen_helper_check_ieee_exceptions();
2228 gen_op_store_DT0_fpr(DFPREG(rd));
2229 break;
2230 case 0x47: /* fsubq */
2231 CHECK_FPU_FEATURE(dc, FLOAT128);
2232 gen_op_load_fpr_QT0(QFPREG(rs1));
2233 gen_op_load_fpr_QT1(QFPREG(rs2));
2234 gen_clear_float_exceptions();
2235 gen_helper_fsubq();
2236 gen_helper_check_ieee_exceptions();
2237 gen_op_store_QT0_fpr(QFPREG(rd));
2238 break;
2239 case 0x49: /* fmuls */
2240 CHECK_FPU_FEATURE(dc, FMUL);
2241 gen_clear_float_exceptions();
2242 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2243 gen_helper_check_ieee_exceptions();
2244 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2245 break;
2246 case 0x4a: /* fmuld */
2247 CHECK_FPU_FEATURE(dc, FMUL);
2248 gen_op_load_fpr_DT0(DFPREG(rs1));
2249 gen_op_load_fpr_DT1(DFPREG(rs2));
2250 gen_clear_float_exceptions();
2251 gen_helper_fmuld();
2252 gen_helper_check_ieee_exceptions();
2253 gen_op_store_DT0_fpr(DFPREG(rd));
2254 break;
2255 case 0x4b: /* fmulq */
2256 CHECK_FPU_FEATURE(dc, FLOAT128);
2257 CHECK_FPU_FEATURE(dc, FMUL);
2258 gen_op_load_fpr_QT0(QFPREG(rs1));
2259 gen_op_load_fpr_QT1(QFPREG(rs2));
2260 gen_clear_float_exceptions();
2261 gen_helper_fmulq();
2262 gen_helper_check_ieee_exceptions();
2263 gen_op_store_QT0_fpr(QFPREG(rd));
2264 break;
2265 case 0x4d: /* fdivs */
2266 gen_clear_float_exceptions();
2267 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2268 gen_helper_check_ieee_exceptions();
2269 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2270 break;
2271 case 0x4e: /* fdivd */
2272 gen_op_load_fpr_DT0(DFPREG(rs1));
2273 gen_op_load_fpr_DT1(DFPREG(rs2));
2274 gen_clear_float_exceptions();
2275 gen_helper_fdivd();
2276 gen_helper_check_ieee_exceptions();
2277 gen_op_store_DT0_fpr(DFPREG(rd));
2278 break;
2279 case 0x4f: /* fdivq */
2280 CHECK_FPU_FEATURE(dc, FLOAT128);
2281 gen_op_load_fpr_QT0(QFPREG(rs1));
2282 gen_op_load_fpr_QT1(QFPREG(rs2));
2283 gen_clear_float_exceptions();
2284 gen_helper_fdivq();
2285 gen_helper_check_ieee_exceptions();
2286 gen_op_store_QT0_fpr(QFPREG(rd));
2287 break;
2288 case 0x69: /* fsmuld */
2289 CHECK_FPU_FEATURE(dc, FSMULD);
2290 gen_clear_float_exceptions();
2291 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2292 gen_helper_check_ieee_exceptions();
2293 gen_op_store_DT0_fpr(DFPREG(rd));
2294 break;
2295 case 0x6e: /* fdmulq */
2296 CHECK_FPU_FEATURE(dc, FLOAT128);
2297 gen_op_load_fpr_DT0(DFPREG(rs1));
2298 gen_op_load_fpr_DT1(DFPREG(rs2));
2299 gen_clear_float_exceptions();
2300 gen_helper_fdmulq();
2301 gen_helper_check_ieee_exceptions();
2302 gen_op_store_QT0_fpr(QFPREG(rd));
2303 break;
2304 case 0xc4: /* fitos */
2305 gen_clear_float_exceptions();
2306 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2307 gen_helper_check_ieee_exceptions();
2308 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2309 break;
2310 case 0xc6: /* fdtos */
2311 gen_op_load_fpr_DT1(DFPREG(rs2));
2312 gen_clear_float_exceptions();
2313 gen_helper_fdtos(cpu_tmp32);
2314 gen_helper_check_ieee_exceptions();
2315 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2316 break;
2317 case 0xc7: /* fqtos */
2318 CHECK_FPU_FEATURE(dc, FLOAT128);
2319 gen_op_load_fpr_QT1(QFPREG(rs2));
2320 gen_clear_float_exceptions();
2321 gen_helper_fqtos(cpu_tmp32);
2322 gen_helper_check_ieee_exceptions();
2323 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2324 break;
2325 case 0xc8: /* fitod */
2326 gen_helper_fitod(cpu_fpr[rs2]);
2327 gen_op_store_DT0_fpr(DFPREG(rd));
2328 break;
2329 case 0xc9: /* fstod */
2330 gen_helper_fstod(cpu_fpr[rs2]);
2331 gen_op_store_DT0_fpr(DFPREG(rd));
2332 break;
2333 case 0xcb: /* fqtod */
2334 CHECK_FPU_FEATURE(dc, FLOAT128);
2335 gen_op_load_fpr_QT1(QFPREG(rs2));
2336 gen_clear_float_exceptions();
2337 gen_helper_fqtod();
2338 gen_helper_check_ieee_exceptions();
2339 gen_op_store_DT0_fpr(DFPREG(rd));
2340 break;
2341 case 0xcc: /* fitoq */
2342 CHECK_FPU_FEATURE(dc, FLOAT128);
2343 gen_helper_fitoq(cpu_fpr[rs2]);
2344 gen_op_store_QT0_fpr(QFPREG(rd));
2345 break;
2346 case 0xcd: /* fstoq */
2347 CHECK_FPU_FEATURE(dc, FLOAT128);
2348 gen_helper_fstoq(cpu_fpr[rs2]);
2349 gen_op_store_QT0_fpr(QFPREG(rd));
2350 break;
2351 case 0xce: /* fdtoq */
2352 CHECK_FPU_FEATURE(dc, FLOAT128);
2353 gen_op_load_fpr_DT1(DFPREG(rs2));
2354 gen_helper_fdtoq();
2355 gen_op_store_QT0_fpr(QFPREG(rd));
2356 break;
2357 case 0xd1: /* fstoi */
2358 gen_clear_float_exceptions();
2359 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0xd2: /* fdtoi */
2364 gen_op_load_fpr_DT1(DFPREG(rs2));
2365 gen_clear_float_exceptions();
2366 gen_helper_fdtoi(cpu_tmp32);
2367 gen_helper_check_ieee_exceptions();
2368 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2369 break;
2370 case 0xd3: /* fqtoi */
2371 CHECK_FPU_FEATURE(dc, FLOAT128);
2372 gen_op_load_fpr_QT1(QFPREG(rs2));
2373 gen_clear_float_exceptions();
2374 gen_helper_fqtoi(cpu_tmp32);
2375 gen_helper_check_ieee_exceptions();
2376 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2377 break;
2378 #ifdef TARGET_SPARC64
2379 case 0x2: /* V9 fmovd */
2380 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2381 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2382 cpu_fpr[DFPREG(rs2) + 1]);
2383 break;
2384 case 0x3: /* V9 fmovq */
2385 CHECK_FPU_FEATURE(dc, FLOAT128);
2386 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2387 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2388 cpu_fpr[QFPREG(rs2) + 1]);
2389 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2390 cpu_fpr[QFPREG(rs2) + 2]);
2391 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2392 cpu_fpr[QFPREG(rs2) + 3]);
2393 break;
2394 case 0x6: /* V9 fnegd */
2395 gen_op_load_fpr_DT1(DFPREG(rs2));
2396 gen_helper_fnegd();
2397 gen_op_store_DT0_fpr(DFPREG(rd));
2398 break;
2399 case 0x7: /* V9 fnegq */
2400 CHECK_FPU_FEATURE(dc, FLOAT128);
2401 gen_op_load_fpr_QT1(QFPREG(rs2));
2402 gen_helper_fnegq();
2403 gen_op_store_QT0_fpr(QFPREG(rd));
2404 break;
2405 case 0xa: /* V9 fabsd */
2406 gen_op_load_fpr_DT1(DFPREG(rs2));
2407 gen_helper_fabsd();
2408 gen_op_store_DT0_fpr(DFPREG(rd));
2409 break;
2410 case 0xb: /* V9 fabsq */
2411 CHECK_FPU_FEATURE(dc, FLOAT128);
2412 gen_op_load_fpr_QT1(QFPREG(rs2));
2413 gen_helper_fabsq();
2414 gen_op_store_QT0_fpr(QFPREG(rd));
2415 break;
2416 case 0x81: /* V9 fstox */
2417 gen_clear_float_exceptions();
2418 gen_helper_fstox(cpu_fpr[rs2]);
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_DT0_fpr(DFPREG(rd));
2421 break;
2422 case 0x82: /* V9 fdtox */
2423 gen_op_load_fpr_DT1(DFPREG(rs2));
2424 gen_clear_float_exceptions();
2425 gen_helper_fdtox();
2426 gen_helper_check_ieee_exceptions();
2427 gen_op_store_DT0_fpr(DFPREG(rd));
2428 break;
2429 case 0x83: /* V9 fqtox */
2430 CHECK_FPU_FEATURE(dc, FLOAT128);
2431 gen_op_load_fpr_QT1(QFPREG(rs2));
2432 gen_clear_float_exceptions();
2433 gen_helper_fqtox();
2434 gen_helper_check_ieee_exceptions();
2435 gen_op_store_DT0_fpr(DFPREG(rd));
2436 break;
2437 case 0x84: /* V9 fxtos */
2438 gen_op_load_fpr_DT1(DFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 gen_helper_fxtos(cpu_tmp32);
2441 gen_helper_check_ieee_exceptions();
2442 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2443 break;
2444 case 0x88: /* V9 fxtod */
2445 gen_op_load_fpr_DT1(DFPREG(rs2));
2446 gen_clear_float_exceptions();
2447 gen_helper_fxtod();
2448 gen_helper_check_ieee_exceptions();
2449 gen_op_store_DT0_fpr(DFPREG(rd));
2450 break;
2451 case 0x8c: /* V9 fxtoq */
2452 CHECK_FPU_FEATURE(dc, FLOAT128);
2453 gen_op_load_fpr_DT1(DFPREG(rs2));
2454 gen_clear_float_exceptions();
2455 gen_helper_fxtoq();
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_QT0_fpr(QFPREG(rd));
2458 break;
2459 #endif
2460 default:
2461 goto illegal_insn;
2463 } else if (xop == 0x35) { /* FPU Operations */
2464 #ifdef TARGET_SPARC64
2465 int cond;
2466 #endif
2467 if (gen_trap_ifnofpu(dc, cpu_cond))
2468 goto jmp_insn;
2469 gen_op_clear_ieee_excp_and_FTT();
2470 rs1 = GET_FIELD(insn, 13, 17);
2471 rs2 = GET_FIELD(insn, 27, 31);
2472 xop = GET_FIELD(insn, 18, 26);
2473 save_state(dc, cpu_cond);
2474 #ifdef TARGET_SPARC64
2475 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2476 int l1;
2478 l1 = gen_new_label();
2479 cond = GET_FIELD_SP(insn, 14, 17);
2480 cpu_src1 = get_src1(insn, cpu_src1);
2481 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2482 0, l1);
2483 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2484 gen_set_label(l1);
2485 break;
2486 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2487 int l1;
2489 l1 = gen_new_label();
2490 cond = GET_FIELD_SP(insn, 14, 17);
2491 cpu_src1 = get_src1(insn, cpu_src1);
2492 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2493 0, l1);
2494 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2495 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2496 gen_set_label(l1);
2497 break;
2498 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2499 int l1;
2501 CHECK_FPU_FEATURE(dc, FLOAT128);
2502 l1 = gen_new_label();
2503 cond = GET_FIELD_SP(insn, 14, 17);
2504 cpu_src1 = get_src1(insn, cpu_src1);
2505 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2506 0, l1);
2507 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2508 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2509 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2510 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2511 gen_set_label(l1);
2512 break;
2514 #endif
2515 switch (xop) {
2516 #ifdef TARGET_SPARC64
2517 #define FMOVSCC(fcc) \
2519 TCGv r_cond; \
2520 int l1; \
2522 l1 = gen_new_label(); \
2523 r_cond = tcg_temp_new(); \
2524 cond = GET_FIELD_SP(insn, 14, 17); \
2525 gen_fcond(r_cond, fcc, cond); \
2526 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2527 0, l1); \
2528 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2529 gen_set_label(l1); \
2530 tcg_temp_free(r_cond); \
2532 #define FMOVDCC(fcc) \
2534 TCGv r_cond; \
2535 int l1; \
2537 l1 = gen_new_label(); \
2538 r_cond = tcg_temp_new(); \
2539 cond = GET_FIELD_SP(insn, 14, 17); \
2540 gen_fcond(r_cond, fcc, cond); \
2541 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2542 0, l1); \
2543 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2544 cpu_fpr[DFPREG(rs2)]); \
2545 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2546 cpu_fpr[DFPREG(rs2) + 1]); \
2547 gen_set_label(l1); \
2548 tcg_temp_free(r_cond); \
2550 #define FMOVQCC(fcc) \
2552 TCGv r_cond; \
2553 int l1; \
2555 l1 = gen_new_label(); \
2556 r_cond = tcg_temp_new(); \
2557 cond = GET_FIELD_SP(insn, 14, 17); \
2558 gen_fcond(r_cond, fcc, cond); \
2559 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2560 0, l1); \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2562 cpu_fpr[QFPREG(rs2)]); \
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2564 cpu_fpr[QFPREG(rs2) + 1]); \
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2566 cpu_fpr[QFPREG(rs2) + 2]); \
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2568 cpu_fpr[QFPREG(rs2) + 3]); \
2569 gen_set_label(l1); \
2570 tcg_temp_free(r_cond); \
2572 case 0x001: /* V9 fmovscc %fcc0 */
2573 FMOVSCC(0);
2574 break;
2575 case 0x002: /* V9 fmovdcc %fcc0 */
2576 FMOVDCC(0);
2577 break;
2578 case 0x003: /* V9 fmovqcc %fcc0 */
2579 CHECK_FPU_FEATURE(dc, FLOAT128);
2580 FMOVQCC(0);
2581 break;
2582 case 0x041: /* V9 fmovscc %fcc1 */
2583 FMOVSCC(1);
2584 break;
2585 case 0x042: /* V9 fmovdcc %fcc1 */
2586 FMOVDCC(1);
2587 break;
2588 case 0x043: /* V9 fmovqcc %fcc1 */
2589 CHECK_FPU_FEATURE(dc, FLOAT128);
2590 FMOVQCC(1);
2591 break;
2592 case 0x081: /* V9 fmovscc %fcc2 */
2593 FMOVSCC(2);
2594 break;
2595 case 0x082: /* V9 fmovdcc %fcc2 */
2596 FMOVDCC(2);
2597 break;
2598 case 0x083: /* V9 fmovqcc %fcc2 */
2599 CHECK_FPU_FEATURE(dc, FLOAT128);
2600 FMOVQCC(2);
2601 break;
2602 case 0x0c1: /* V9 fmovscc %fcc3 */
2603 FMOVSCC(3);
2604 break;
2605 case 0x0c2: /* V9 fmovdcc %fcc3 */
2606 FMOVDCC(3);
2607 break;
2608 case 0x0c3: /* V9 fmovqcc %fcc3 */
2609 CHECK_FPU_FEATURE(dc, FLOAT128);
2610 FMOVQCC(3);
2611 break;
2612 #undef FMOVSCC
2613 #undef FMOVDCC
2614 #undef FMOVQCC
2615 #define FMOVSCC(icc) \
2617 TCGv r_cond; \
2618 int l1; \
2620 l1 = gen_new_label(); \
2621 r_cond = tcg_temp_new(); \
2622 cond = GET_FIELD_SP(insn, 14, 17); \
2623 gen_cond(r_cond, icc, cond, dc); \
2624 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2625 0, l1); \
2626 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2627 gen_set_label(l1); \
2628 tcg_temp_free(r_cond); \
2630 #define FMOVDCC(icc) \
2632 TCGv r_cond; \
2633 int l1; \
2635 l1 = gen_new_label(); \
2636 r_cond = tcg_temp_new(); \
2637 cond = GET_FIELD_SP(insn, 14, 17); \
2638 gen_cond(r_cond, icc, cond, dc); \
2639 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2640 0, l1); \
2641 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2642 cpu_fpr[DFPREG(rs2)]); \
2643 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2644 cpu_fpr[DFPREG(rs2) + 1]); \
2645 gen_set_label(l1); \
2646 tcg_temp_free(r_cond); \
2648 #define FMOVQCC(icc) \
2650 TCGv r_cond; \
2651 int l1; \
2653 l1 = gen_new_label(); \
2654 r_cond = tcg_temp_new(); \
2655 cond = GET_FIELD_SP(insn, 14, 17); \
2656 gen_cond(r_cond, icc, cond, dc); \
2657 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2658 0, l1); \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2660 cpu_fpr[QFPREG(rs2)]); \
2661 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2662 cpu_fpr[QFPREG(rs2) + 1]); \
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2664 cpu_fpr[QFPREG(rs2) + 2]); \
2665 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2666 cpu_fpr[QFPREG(rs2) + 3]); \
2667 gen_set_label(l1); \
2668 tcg_temp_free(r_cond); \
2671 case 0x101: /* V9 fmovscc %icc */
2672 FMOVSCC(0);
2673 break;
2674 case 0x102: /* V9 fmovdcc %icc */
2675 FMOVDCC(0);
2676 case 0x103: /* V9 fmovqcc %icc */
2677 CHECK_FPU_FEATURE(dc, FLOAT128);
2678 FMOVQCC(0);
2679 break;
2680 case 0x181: /* V9 fmovscc %xcc */
2681 FMOVSCC(1);
2682 break;
2683 case 0x182: /* V9 fmovdcc %xcc */
2684 FMOVDCC(1);
2685 break;
2686 case 0x183: /* V9 fmovqcc %xcc */
2687 CHECK_FPU_FEATURE(dc, FLOAT128);
2688 FMOVQCC(1);
2689 break;
2690 #undef FMOVSCC
2691 #undef FMOVDCC
2692 #undef FMOVQCC
2693 #endif
2694 case 0x51: /* fcmps, V9 %fcc */
2695 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2696 break;
2697 case 0x52: /* fcmpd, V9 %fcc */
2698 gen_op_load_fpr_DT0(DFPREG(rs1));
2699 gen_op_load_fpr_DT1(DFPREG(rs2));
2700 gen_op_fcmpd(rd & 3);
2701 break;
2702 case 0x53: /* fcmpq, V9 %fcc */
2703 CHECK_FPU_FEATURE(dc, FLOAT128);
2704 gen_op_load_fpr_QT0(QFPREG(rs1));
2705 gen_op_load_fpr_QT1(QFPREG(rs2));
2706 gen_op_fcmpq(rd & 3);
2707 break;
2708 case 0x55: /* fcmpes, V9 %fcc */
2709 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2710 break;
2711 case 0x56: /* fcmped, V9 %fcc */
2712 gen_op_load_fpr_DT0(DFPREG(rs1));
2713 gen_op_load_fpr_DT1(DFPREG(rs2));
2714 gen_op_fcmped(rd & 3);
2715 break;
2716 case 0x57: /* fcmpeq, V9 %fcc */
2717 CHECK_FPU_FEATURE(dc, FLOAT128);
2718 gen_op_load_fpr_QT0(QFPREG(rs1));
2719 gen_op_load_fpr_QT1(QFPREG(rs2));
2720 gen_op_fcmpeq(rd & 3);
2721 break;
2722 default:
2723 goto illegal_insn;
2725 } else if (xop == 0x2) {
2726 // clr/mov shortcut
2728 rs1 = GET_FIELD(insn, 13, 17);
2729 if (rs1 == 0) {
2730 // or %g0, x, y -> mov T0, x; mov y, T0
2731 if (IS_IMM) { /* immediate */
2732 TCGv r_const;
2734 simm = GET_FIELDs(insn, 19, 31);
2735 r_const = tcg_const_tl(simm);
2736 gen_movl_TN_reg(rd, r_const);
2737 tcg_temp_free(r_const);
2738 } else { /* register */
2739 rs2 = GET_FIELD(insn, 27, 31);
2740 gen_movl_reg_TN(rs2, cpu_dst);
2741 gen_movl_TN_reg(rd, cpu_dst);
2743 } else {
2744 cpu_src1 = get_src1(insn, cpu_src1);
2745 if (IS_IMM) { /* immediate */
2746 simm = GET_FIELDs(insn, 19, 31);
2747 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2748 gen_movl_TN_reg(rd, cpu_dst);
2749 } else { /* register */
2750 // or x, %g0, y -> mov T1, x; mov y, T1
2751 rs2 = GET_FIELD(insn, 27, 31);
2752 if (rs2 != 0) {
2753 gen_movl_reg_TN(rs2, cpu_src2);
2754 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2755 gen_movl_TN_reg(rd, cpu_dst);
2756 } else
2757 gen_movl_TN_reg(rd, cpu_src1);
2760 #ifdef TARGET_SPARC64
2761 } else if (xop == 0x25) { /* sll, V9 sllx */
2762 cpu_src1 = get_src1(insn, cpu_src1);
2763 if (IS_IMM) { /* immediate */
2764 simm = GET_FIELDs(insn, 20, 31);
2765 if (insn & (1 << 12)) {
2766 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2767 } else {
2768 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2770 } else { /* register */
2771 rs2 = GET_FIELD(insn, 27, 31);
2772 gen_movl_reg_TN(rs2, cpu_src2);
2773 if (insn & (1 << 12)) {
2774 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2775 } else {
2776 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2778 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2780 gen_movl_TN_reg(rd, cpu_dst);
2781 } else if (xop == 0x26) { /* srl, V9 srlx */
2782 cpu_src1 = get_src1(insn, cpu_src1);
2783 if (IS_IMM) { /* immediate */
2784 simm = GET_FIELDs(insn, 20, 31);
2785 if (insn & (1 << 12)) {
2786 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2787 } else {
2788 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2789 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2791 } else { /* register */
2792 rs2 = GET_FIELD(insn, 27, 31);
2793 gen_movl_reg_TN(rs2, cpu_src2);
2794 if (insn & (1 << 12)) {
2795 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2796 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2797 } else {
2798 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2799 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2800 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2803 gen_movl_TN_reg(rd, cpu_dst);
2804 } else if (xop == 0x27) { /* sra, V9 srax */
2805 cpu_src1 = get_src1(insn, cpu_src1);
2806 if (IS_IMM) { /* immediate */
2807 simm = GET_FIELDs(insn, 20, 31);
2808 if (insn & (1 << 12)) {
2809 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2810 } else {
2811 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2812 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2813 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2815 } else { /* register */
2816 rs2 = GET_FIELD(insn, 27, 31);
2817 gen_movl_reg_TN(rs2, cpu_src2);
2818 if (insn & (1 << 12)) {
2819 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2820 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2821 } else {
2822 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2823 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2824 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2825 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2828 gen_movl_TN_reg(rd, cpu_dst);
2829 #endif
2830 } else if (xop < 0x36) {
2831 if (xop < 0x20) {
2832 cpu_src1 = get_src1(insn, cpu_src1);
2833 cpu_src2 = get_src2(insn, cpu_src2);
2834 switch (xop & ~0x10) {
2835 case 0x0: /* add */
2836 if (IS_IMM) {
2837 simm = GET_FIELDs(insn, 19, 31);
2838 if (xop & 0x10) {
2839 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2840 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2841 dc->cc_op = CC_OP_ADD;
2842 } else {
2843 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2845 } else {
2846 if (xop & 0x10) {
2847 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2848 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2849 dc->cc_op = CC_OP_ADD;
2850 } else {
2851 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2854 break;
2855 case 0x1: /* and */
2856 if (IS_IMM) {
2857 simm = GET_FIELDs(insn, 19, 31);
2858 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2859 } else {
2860 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2862 if (xop & 0x10) {
2863 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2864 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2865 dc->cc_op = CC_OP_LOGIC;
2867 break;
2868 case 0x2: /* or */
2869 if (IS_IMM) {
2870 simm = GET_FIELDs(insn, 19, 31);
2871 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2872 } else {
2873 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2875 if (xop & 0x10) {
2876 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2877 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2878 dc->cc_op = CC_OP_LOGIC;
2880 break;
2881 case 0x3: /* xor */
2882 if (IS_IMM) {
2883 simm = GET_FIELDs(insn, 19, 31);
2884 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2885 } else {
2886 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2888 if (xop & 0x10) {
2889 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2890 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2891 dc->cc_op = CC_OP_LOGIC;
2893 break;
2894 case 0x4: /* sub */
2895 if (IS_IMM) {
2896 simm = GET_FIELDs(insn, 19, 31);
2897 if (xop & 0x10) {
2898 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2899 } else {
2900 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2902 } else {
2903 if (xop & 0x10) {
2904 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2905 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2906 dc->cc_op = CC_OP_SUB;
2907 } else {
2908 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2911 break;
2912 case 0x5: /* andn */
2913 if (IS_IMM) {
2914 simm = GET_FIELDs(insn, 19, 31);
2915 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2916 } else {
2917 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2919 if (xop & 0x10) {
2920 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2921 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2922 dc->cc_op = CC_OP_LOGIC;
2924 break;
2925 case 0x6: /* orn */
2926 if (IS_IMM) {
2927 simm = GET_FIELDs(insn, 19, 31);
2928 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2929 } else {
2930 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2932 if (xop & 0x10) {
2933 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2934 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2935 dc->cc_op = CC_OP_LOGIC;
2937 break;
2938 case 0x7: /* xorn */
2939 if (IS_IMM) {
2940 simm = GET_FIELDs(insn, 19, 31);
2941 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2942 } else {
2943 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2944 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2946 if (xop & 0x10) {
2947 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2948 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2949 dc->cc_op = CC_OP_LOGIC;
2951 break;
2952 case 0x8: /* addx, V9 addc */
2953 if (IS_IMM) {
2954 simm = GET_FIELDs(insn, 19, 31);
2955 if (xop & 0x10) {
2956 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2957 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2958 dc->cc_op = CC_OP_ADDX;
2959 } else {
2960 gen_helper_compute_C_icc(cpu_tmp0);
2961 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2962 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2964 } else {
2965 if (xop & 0x10) {
2966 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2967 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2968 dc->cc_op = CC_OP_ADDX;
2969 } else {
2970 gen_helper_compute_C_icc(cpu_tmp0);
2971 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2972 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2975 break;
2976 #ifdef TARGET_SPARC64
2977 case 0x9: /* V9 mulx */
2978 if (IS_IMM) {
2979 simm = GET_FIELDs(insn, 19, 31);
2980 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2981 } else {
2982 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2984 break;
2985 #endif
2986 case 0xa: /* umul */
2987 CHECK_IU_FEATURE(dc, MUL);
2988 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2989 if (xop & 0x10) {
2990 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2991 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2992 dc->cc_op = CC_OP_LOGIC;
2994 break;
2995 case 0xb: /* smul */
2996 CHECK_IU_FEATURE(dc, MUL);
2997 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2998 if (xop & 0x10) {
2999 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3000 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3001 dc->cc_op = CC_OP_LOGIC;
3003 break;
3004 case 0xc: /* subx, V9 subc */
3005 if (IS_IMM) {
3006 simm = GET_FIELDs(insn, 19, 31);
3007 if (xop & 0x10) {
3008 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3009 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3010 dc->cc_op = CC_OP_SUBX;
3011 } else {
3012 gen_helper_compute_C_icc(cpu_tmp0);
3013 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3014 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3016 } else {
3017 if (xop & 0x10) {
3018 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3019 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3020 dc->cc_op = CC_OP_SUBX;
3021 } else {
3022 gen_helper_compute_C_icc(cpu_tmp0);
3023 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3024 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3027 break;
3028 #ifdef TARGET_SPARC64
3029 case 0xd: /* V9 udivx */
3030 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3031 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3032 gen_trap_ifdivzero_tl(cpu_cc_src2);
3033 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3034 break;
3035 #endif
3036 case 0xe: /* udiv */
3037 CHECK_IU_FEATURE(dc, DIV);
3038 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3039 if (xop & 0x10) {
3040 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3041 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3042 dc->cc_op = CC_OP_DIV;
3044 break;
3045 case 0xf: /* sdiv */
3046 CHECK_IU_FEATURE(dc, DIV);
3047 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3048 if (xop & 0x10) {
3049 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3050 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3051 dc->cc_op = CC_OP_DIV;
3053 break;
3054 default:
3055 goto illegal_insn;
3057 gen_movl_TN_reg(rd, cpu_dst);
3058 } else {
3059 cpu_src1 = get_src1(insn, cpu_src1);
3060 cpu_src2 = get_src2(insn, cpu_src2);
3061 switch (xop) {
3062 case 0x20: /* taddcc */
3063 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3064 gen_movl_TN_reg(rd, cpu_dst);
3065 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3066 dc->cc_op = CC_OP_TADD;
3067 break;
3068 case 0x21: /* tsubcc */
3069 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3070 gen_movl_TN_reg(rd, cpu_dst);
3071 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3072 dc->cc_op = CC_OP_TSUB;
3073 break;
3074 case 0x22: /* taddcctv */
3075 save_state(dc, cpu_cond);
3076 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3077 gen_movl_TN_reg(rd, cpu_dst);
3078 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3079 dc->cc_op = CC_OP_TADDTV;
3080 break;
3081 case 0x23: /* tsubcctv */
3082 save_state(dc, cpu_cond);
3083 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3084 gen_movl_TN_reg(rd, cpu_dst);
3085 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3086 dc->cc_op = CC_OP_TSUBTV;
3087 break;
3088 case 0x24: /* mulscc */
3089 gen_helper_compute_psr();
3090 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3091 gen_movl_TN_reg(rd, cpu_dst);
3092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3093 dc->cc_op = CC_OP_ADD;
3094 break;
3095 #ifndef TARGET_SPARC64
3096 case 0x25: /* sll */
3097 if (IS_IMM) { /* immediate */
3098 simm = GET_FIELDs(insn, 20, 31);
3099 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3100 } else { /* register */
3101 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3102 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3104 gen_movl_TN_reg(rd, cpu_dst);
3105 break;
3106 case 0x26: /* srl */
3107 if (IS_IMM) { /* immediate */
3108 simm = GET_FIELDs(insn, 20, 31);
3109 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3110 } else { /* register */
3111 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3112 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3114 gen_movl_TN_reg(rd, cpu_dst);
3115 break;
3116 case 0x27: /* sra */
3117 if (IS_IMM) { /* immediate */
3118 simm = GET_FIELDs(insn, 20, 31);
3119 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3120 } else { /* register */
3121 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3122 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3124 gen_movl_TN_reg(rd, cpu_dst);
3125 break;
3126 #endif
3127 case 0x30:
3129 switch(rd) {
3130 case 0: /* wry */
3131 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3132 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3133 break;
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3136 SPARCv8 manual, nop
3137 on the microSPARC
3138 II */
3139 case 0x10 ... 0x1f: /* implementation-dependent
3140 in the SPARCv8
3141 manual, nop on the
3142 microSPARC II */
3143 break;
3144 #else
3145 case 0x2: /* V9 wrccr */
3146 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3147 gen_helper_wrccr(cpu_dst);
3148 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3149 dc->cc_op = CC_OP_FLAGS;
3150 break;
3151 case 0x3: /* V9 wrasi */
3152 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3153 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3154 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3155 break;
3156 case 0x6: /* V9 wrfprs */
3157 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3158 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3159 save_state(dc, cpu_cond);
3160 gen_op_next_insn();
3161 tcg_gen_exit_tb(0);
3162 dc->is_br = 1;
3163 break;
3164 case 0xf: /* V9 sir, nop if user */
3165 #if !defined(CONFIG_USER_ONLY)
3166 if (supervisor(dc)) {
3167 ; // XXX
3169 #endif
3170 break;
3171 case 0x13: /* Graphics Status */
3172 if (gen_trap_ifnofpu(dc, cpu_cond))
3173 goto jmp_insn;
3174 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3175 break;
3176 case 0x14: /* Softint set */
3177 if (!supervisor(dc))
3178 goto illegal_insn;
3179 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3180 gen_helper_set_softint(cpu_tmp64);
3181 break;
3182 case 0x15: /* Softint clear */
3183 if (!supervisor(dc))
3184 goto illegal_insn;
3185 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3186 gen_helper_clear_softint(cpu_tmp64);
3187 break;
3188 case 0x16: /* Softint write */
3189 if (!supervisor(dc))
3190 goto illegal_insn;
3191 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3192 gen_helper_write_softint(cpu_tmp64);
3193 break;
3194 case 0x17: /* Tick compare */
3195 #if !defined(CONFIG_USER_ONLY)
3196 if (!supervisor(dc))
3197 goto illegal_insn;
3198 #endif
3200 TCGv_ptr r_tickptr;
3202 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3203 cpu_src2);
3204 r_tickptr = tcg_temp_new_ptr();
3205 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3206 offsetof(CPUState, tick));
3207 gen_helper_tick_set_limit(r_tickptr,
3208 cpu_tick_cmpr);
3209 tcg_temp_free_ptr(r_tickptr);
3211 break;
3212 case 0x18: /* System tick */
3213 #if !defined(CONFIG_USER_ONLY)
3214 if (!supervisor(dc))
3215 goto illegal_insn;
3216 #endif
3218 TCGv_ptr r_tickptr;
3220 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3221 cpu_src2);
3222 r_tickptr = tcg_temp_new_ptr();
3223 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3224 offsetof(CPUState, stick));
3225 gen_helper_tick_set_count(r_tickptr,
3226 cpu_dst);
3227 tcg_temp_free_ptr(r_tickptr);
3229 break;
3230 case 0x19: /* System tick compare */
3231 #if !defined(CONFIG_USER_ONLY)
3232 if (!supervisor(dc))
3233 goto illegal_insn;
3234 #endif
3236 TCGv_ptr r_tickptr;
3238 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3239 cpu_src2);
3240 r_tickptr = tcg_temp_new_ptr();
3241 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3242 offsetof(CPUState, stick));
3243 gen_helper_tick_set_limit(r_tickptr,
3244 cpu_stick_cmpr);
3245 tcg_temp_free_ptr(r_tickptr);
3247 break;
3249 case 0x10: /* Performance Control */
3250 case 0x11: /* Performance Instrumentation
3251 Counter */
3252 case 0x12: /* Dispatch Control */
3253 #endif
3254 default:
3255 goto illegal_insn;
3258 break;
3259 #if !defined(CONFIG_USER_ONLY)
3260 case 0x31: /* wrpsr, V9 saved, restored */
3262 if (!supervisor(dc))
3263 goto priv_insn;
3264 #ifdef TARGET_SPARC64
3265 switch (rd) {
3266 case 0:
3267 gen_helper_saved();
3268 break;
3269 case 1:
3270 gen_helper_restored();
3271 break;
3272 case 2: /* UA2005 allclean */
3273 case 3: /* UA2005 otherw */
3274 case 4: /* UA2005 normalw */
3275 case 5: /* UA2005 invalw */
3276 // XXX
3277 default:
3278 goto illegal_insn;
3280 #else
3281 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3282 gen_helper_wrpsr(cpu_dst);
3283 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3284 dc->cc_op = CC_OP_FLAGS;
3285 save_state(dc, cpu_cond);
3286 gen_op_next_insn();
3287 tcg_gen_exit_tb(0);
3288 dc->is_br = 1;
3289 #endif
3291 break;
3292 case 0x32: /* wrwim, V9 wrpr */
3294 if (!supervisor(dc))
3295 goto priv_insn;
3296 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3297 #ifdef TARGET_SPARC64
3298 switch (rd) {
3299 case 0: // tpc
3301 TCGv_ptr r_tsptr;
3303 r_tsptr = tcg_temp_new_ptr();
3304 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3305 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3306 offsetof(trap_state, tpc));
3307 tcg_temp_free_ptr(r_tsptr);
3309 break;
3310 case 1: // tnpc
3312 TCGv_ptr r_tsptr;
3314 r_tsptr = tcg_temp_new_ptr();
3315 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3316 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3317 offsetof(trap_state, tnpc));
3318 tcg_temp_free_ptr(r_tsptr);
3320 break;
3321 case 2: // tstate
3323 TCGv_ptr r_tsptr;
3325 r_tsptr = tcg_temp_new_ptr();
3326 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3327 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3328 offsetof(trap_state,
3329 tstate));
3330 tcg_temp_free_ptr(r_tsptr);
3332 break;
3333 case 3: // tt
3335 TCGv_ptr r_tsptr;
3337 r_tsptr = tcg_temp_new_ptr();
3338 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3339 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3340 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3341 offsetof(trap_state, tt));
3342 tcg_temp_free_ptr(r_tsptr);
3344 break;
3345 case 4: // tick
3347 TCGv_ptr r_tickptr;
3349 r_tickptr = tcg_temp_new_ptr();
3350 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3351 offsetof(CPUState, tick));
3352 gen_helper_tick_set_count(r_tickptr,
3353 cpu_tmp0);
3354 tcg_temp_free_ptr(r_tickptr);
3356 break;
3357 case 5: // tba
3358 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3359 break;
3360 case 6: // pstate
3361 save_state(dc, cpu_cond);
3362 gen_helper_wrpstate(cpu_tmp0);
3363 gen_op_next_insn();
3364 tcg_gen_exit_tb(0);
3365 dc->is_br = 1;
3366 break;
3367 case 7: // tl
3368 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3369 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3370 offsetof(CPUSPARCState, tl));
3371 break;
3372 case 8: // pil
3373 gen_helper_wrpil(cpu_tmp0);
3374 break;
3375 case 9: // cwp
3376 gen_helper_wrcwp(cpu_tmp0);
3377 break;
3378 case 10: // cansave
3379 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3380 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3381 offsetof(CPUSPARCState,
3382 cansave));
3383 break;
3384 case 11: // canrestore
3385 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3386 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3387 offsetof(CPUSPARCState,
3388 canrestore));
3389 break;
3390 case 12: // cleanwin
3391 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3392 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3393 offsetof(CPUSPARCState,
3394 cleanwin));
3395 break;
3396 case 13: // otherwin
3397 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3398 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3399 offsetof(CPUSPARCState,
3400 otherwin));
3401 break;
3402 case 14: // wstate
3403 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3404 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3405 offsetof(CPUSPARCState,
3406 wstate));
3407 break;
3408 case 16: // UA2005 gl
3409 CHECK_IU_FEATURE(dc, GL);
3410 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3411 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3412 offsetof(CPUSPARCState, gl));
3413 break;
3414 case 26: // UA2005 strand status
3415 CHECK_IU_FEATURE(dc, HYPV);
3416 if (!hypervisor(dc))
3417 goto priv_insn;
3418 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3419 break;
3420 default:
3421 goto illegal_insn;
3423 #else
3424 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3425 if (dc->def->nwindows != 32)
3426 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3427 (1 << dc->def->nwindows) - 1);
3428 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3429 #endif
3431 break;
3432 case 0x33: /* wrtbr, UA2005 wrhpr */
3434 #ifndef TARGET_SPARC64
3435 if (!supervisor(dc))
3436 goto priv_insn;
3437 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3438 #else
3439 CHECK_IU_FEATURE(dc, HYPV);
3440 if (!hypervisor(dc))
3441 goto priv_insn;
3442 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3443 switch (rd) {
3444 case 0: // hpstate
3445 // XXX gen_op_wrhpstate();
3446 save_state(dc, cpu_cond);
3447 gen_op_next_insn();
3448 tcg_gen_exit_tb(0);
3449 dc->is_br = 1;
3450 break;
3451 case 1: // htstate
3452 // XXX gen_op_wrhtstate();
3453 break;
3454 case 3: // hintp
3455 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3456 break;
3457 case 5: // htba
3458 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3459 break;
3460 case 31: // hstick_cmpr
3462 TCGv_ptr r_tickptr;
3464 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3465 r_tickptr = tcg_temp_new_ptr();
3466 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3467 offsetof(CPUState, hstick));
3468 gen_helper_tick_set_limit(r_tickptr,
3469 cpu_hstick_cmpr);
3470 tcg_temp_free_ptr(r_tickptr);
3472 break;
3473 case 6: // hver readonly
3474 default:
3475 goto illegal_insn;
3477 #endif
3479 break;
3480 #endif
3481 #ifdef TARGET_SPARC64
3482 case 0x2c: /* V9 movcc */
3484 int cc = GET_FIELD_SP(insn, 11, 12);
3485 int cond = GET_FIELD_SP(insn, 14, 17);
3486 TCGv r_cond;
3487 int l1;
3489 r_cond = tcg_temp_new();
3490 if (insn & (1 << 18)) {
3491 if (cc == 0)
3492 gen_cond(r_cond, 0, cond, dc);
3493 else if (cc == 2)
3494 gen_cond(r_cond, 1, cond, dc);
3495 else
3496 goto illegal_insn;
3497 } else {
3498 gen_fcond(r_cond, cc, cond);
3501 l1 = gen_new_label();
3503 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3504 if (IS_IMM) { /* immediate */
3505 TCGv r_const;
3507 simm = GET_FIELD_SPs(insn, 0, 10);
3508 r_const = tcg_const_tl(simm);
3509 gen_movl_TN_reg(rd, r_const);
3510 tcg_temp_free(r_const);
3511 } else {
3512 rs2 = GET_FIELD_SP(insn, 0, 4);
3513 gen_movl_reg_TN(rs2, cpu_tmp0);
3514 gen_movl_TN_reg(rd, cpu_tmp0);
3516 gen_set_label(l1);
3517 tcg_temp_free(r_cond);
3518 break;
3520 case 0x2d: /* V9 sdivx */
3521 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3522 gen_movl_TN_reg(rd, cpu_dst);
3523 break;
3524 case 0x2e: /* V9 popc */
3526 cpu_src2 = get_src2(insn, cpu_src2);
3527 gen_helper_popc(cpu_dst, cpu_src2);
3528 gen_movl_TN_reg(rd, cpu_dst);
3530 case 0x2f: /* V9 movr */
3532 int cond = GET_FIELD_SP(insn, 10, 12);
3533 int l1;
3535 cpu_src1 = get_src1(insn, cpu_src1);
3537 l1 = gen_new_label();
3539 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3540 cpu_src1, 0, l1);
3541 if (IS_IMM) { /* immediate */
3542 TCGv r_const;
3544 simm = GET_FIELD_SPs(insn, 0, 9);
3545 r_const = tcg_const_tl(simm);
3546 gen_movl_TN_reg(rd, r_const);
3547 tcg_temp_free(r_const);
3548 } else {
3549 rs2 = GET_FIELD_SP(insn, 0, 4);
3550 gen_movl_reg_TN(rs2, cpu_tmp0);
3551 gen_movl_TN_reg(rd, cpu_tmp0);
3553 gen_set_label(l1);
3554 break;
3556 #endif
3557 default:
3558 goto illegal_insn;
3561 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3562 #ifdef TARGET_SPARC64
3563 int opf = GET_FIELD_SP(insn, 5, 13);
3564 rs1 = GET_FIELD(insn, 13, 17);
3565 rs2 = GET_FIELD(insn, 27, 31);
3566 if (gen_trap_ifnofpu(dc, cpu_cond))
3567 goto jmp_insn;
3569 switch (opf) {
3570 case 0x000: /* VIS I edge8cc */
3571 case 0x001: /* VIS II edge8n */
3572 case 0x002: /* VIS I edge8lcc */
3573 case 0x003: /* VIS II edge8ln */
3574 case 0x004: /* VIS I edge16cc */
3575 case 0x005: /* VIS II edge16n */
3576 case 0x006: /* VIS I edge16lcc */
3577 case 0x007: /* VIS II edge16ln */
3578 case 0x008: /* VIS I edge32cc */
3579 case 0x009: /* VIS II edge32n */
3580 case 0x00a: /* VIS I edge32lcc */
3581 case 0x00b: /* VIS II edge32ln */
3582 // XXX
3583 goto illegal_insn;
3584 case 0x010: /* VIS I array8 */
3585 CHECK_FPU_FEATURE(dc, VIS1);
3586 cpu_src1 = get_src1(insn, cpu_src1);
3587 gen_movl_reg_TN(rs2, cpu_src2);
3588 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3589 gen_movl_TN_reg(rd, cpu_dst);
3590 break;
3591 case 0x012: /* VIS I array16 */
3592 CHECK_FPU_FEATURE(dc, VIS1);
3593 cpu_src1 = get_src1(insn, cpu_src1);
3594 gen_movl_reg_TN(rs2, cpu_src2);
3595 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3596 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3597 gen_movl_TN_reg(rd, cpu_dst);
3598 break;
3599 case 0x014: /* VIS I array32 */
3600 CHECK_FPU_FEATURE(dc, VIS1);
3601 cpu_src1 = get_src1(insn, cpu_src1);
3602 gen_movl_reg_TN(rs2, cpu_src2);
3603 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3604 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3605 gen_movl_TN_reg(rd, cpu_dst);
3606 break;
3607 case 0x018: /* VIS I alignaddr */
3608 CHECK_FPU_FEATURE(dc, VIS1);
3609 cpu_src1 = get_src1(insn, cpu_src1);
3610 gen_movl_reg_TN(rs2, cpu_src2);
3611 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3612 gen_movl_TN_reg(rd, cpu_dst);
3613 break;
3614 case 0x019: /* VIS II bmask */
3615 case 0x01a: /* VIS I alignaddrl */
3616 // XXX
3617 goto illegal_insn;
3618 case 0x020: /* VIS I fcmple16 */
3619 CHECK_FPU_FEATURE(dc, VIS1);
3620 gen_op_load_fpr_DT0(DFPREG(rs1));
3621 gen_op_load_fpr_DT1(DFPREG(rs2));
3622 gen_helper_fcmple16();
3623 gen_op_store_DT0_fpr(DFPREG(rd));
3624 break;
3625 case 0x022: /* VIS I fcmpne16 */
3626 CHECK_FPU_FEATURE(dc, VIS1);
3627 gen_op_load_fpr_DT0(DFPREG(rs1));
3628 gen_op_load_fpr_DT1(DFPREG(rs2));
3629 gen_helper_fcmpne16();
3630 gen_op_store_DT0_fpr(DFPREG(rd));
3631 break;
3632 case 0x024: /* VIS I fcmple32 */
3633 CHECK_FPU_FEATURE(dc, VIS1);
3634 gen_op_load_fpr_DT0(DFPREG(rs1));
3635 gen_op_load_fpr_DT1(DFPREG(rs2));
3636 gen_helper_fcmple32();
3637 gen_op_store_DT0_fpr(DFPREG(rd));
3638 break;
3639 case 0x026: /* VIS I fcmpne32 */
3640 CHECK_FPU_FEATURE(dc, VIS1);
3641 gen_op_load_fpr_DT0(DFPREG(rs1));
3642 gen_op_load_fpr_DT1(DFPREG(rs2));
3643 gen_helper_fcmpne32();
3644 gen_op_store_DT0_fpr(DFPREG(rd));
3645 break;
3646 case 0x028: /* VIS I fcmpgt16 */
3647 CHECK_FPU_FEATURE(dc, VIS1);
3648 gen_op_load_fpr_DT0(DFPREG(rs1));
3649 gen_op_load_fpr_DT1(DFPREG(rs2));
3650 gen_helper_fcmpgt16();
3651 gen_op_store_DT0_fpr(DFPREG(rd));
3652 break;
3653 case 0x02a: /* VIS I fcmpeq16 */
3654 CHECK_FPU_FEATURE(dc, VIS1);
3655 gen_op_load_fpr_DT0(DFPREG(rs1));
3656 gen_op_load_fpr_DT1(DFPREG(rs2));
3657 gen_helper_fcmpeq16();
3658 gen_op_store_DT0_fpr(DFPREG(rd));
3659 break;
3660 case 0x02c: /* VIS I fcmpgt32 */
3661 CHECK_FPU_FEATURE(dc, VIS1);
3662 gen_op_load_fpr_DT0(DFPREG(rs1));
3663 gen_op_load_fpr_DT1(DFPREG(rs2));
3664 gen_helper_fcmpgt32();
3665 gen_op_store_DT0_fpr(DFPREG(rd));
3666 break;
3667 case 0x02e: /* VIS I fcmpeq32 */
3668 CHECK_FPU_FEATURE(dc, VIS1);
3669 gen_op_load_fpr_DT0(DFPREG(rs1));
3670 gen_op_load_fpr_DT1(DFPREG(rs2));
3671 gen_helper_fcmpeq32();
3672 gen_op_store_DT0_fpr(DFPREG(rd));
3673 break;
3674 case 0x031: /* VIS I fmul8x16 */
3675 CHECK_FPU_FEATURE(dc, VIS1);
3676 gen_op_load_fpr_DT0(DFPREG(rs1));
3677 gen_op_load_fpr_DT1(DFPREG(rs2));
3678 gen_helper_fmul8x16();
3679 gen_op_store_DT0_fpr(DFPREG(rd));
3680 break;
3681 case 0x033: /* VIS I fmul8x16au */
3682 CHECK_FPU_FEATURE(dc, VIS1);
3683 gen_op_load_fpr_DT0(DFPREG(rs1));
3684 gen_op_load_fpr_DT1(DFPREG(rs2));
3685 gen_helper_fmul8x16au();
3686 gen_op_store_DT0_fpr(DFPREG(rd));
3687 break;
3688 case 0x035: /* VIS I fmul8x16al */
3689 CHECK_FPU_FEATURE(dc, VIS1);
3690 gen_op_load_fpr_DT0(DFPREG(rs1));
3691 gen_op_load_fpr_DT1(DFPREG(rs2));
3692 gen_helper_fmul8x16al();
3693 gen_op_store_DT0_fpr(DFPREG(rd));
3694 break;
3695 case 0x036: /* VIS I fmul8sux16 */
3696 CHECK_FPU_FEATURE(dc, VIS1);
3697 gen_op_load_fpr_DT0(DFPREG(rs1));
3698 gen_op_load_fpr_DT1(DFPREG(rs2));
3699 gen_helper_fmul8sux16();
3700 gen_op_store_DT0_fpr(DFPREG(rd));
3701 break;
3702 case 0x037: /* VIS I fmul8ulx16 */
3703 CHECK_FPU_FEATURE(dc, VIS1);
3704 gen_op_load_fpr_DT0(DFPREG(rs1));
3705 gen_op_load_fpr_DT1(DFPREG(rs2));
3706 gen_helper_fmul8ulx16();
3707 gen_op_store_DT0_fpr(DFPREG(rd));
3708 break;
3709 case 0x038: /* VIS I fmuld8sux16 */
3710 CHECK_FPU_FEATURE(dc, VIS1);
3711 gen_op_load_fpr_DT0(DFPREG(rs1));
3712 gen_op_load_fpr_DT1(DFPREG(rs2));
3713 gen_helper_fmuld8sux16();
3714 gen_op_store_DT0_fpr(DFPREG(rd));
3715 break;
3716 case 0x039: /* VIS I fmuld8ulx16 */
3717 CHECK_FPU_FEATURE(dc, VIS1);
3718 gen_op_load_fpr_DT0(DFPREG(rs1));
3719 gen_op_load_fpr_DT1(DFPREG(rs2));
3720 gen_helper_fmuld8ulx16();
3721 gen_op_store_DT0_fpr(DFPREG(rd));
3722 break;
3723 case 0x03a: /* VIS I fpack32 */
3724 case 0x03b: /* VIS I fpack16 */
3725 case 0x03d: /* VIS I fpackfix */
3726 case 0x03e: /* VIS I pdist */
3727 // XXX
3728 goto illegal_insn;
3729 case 0x048: /* VIS I faligndata */
3730 CHECK_FPU_FEATURE(dc, VIS1);
3731 gen_op_load_fpr_DT0(DFPREG(rs1));
3732 gen_op_load_fpr_DT1(DFPREG(rs2));
3733 gen_helper_faligndata();
3734 gen_op_store_DT0_fpr(DFPREG(rd));
3735 break;
3736 case 0x04b: /* VIS I fpmerge */
3737 CHECK_FPU_FEATURE(dc, VIS1);
3738 gen_op_load_fpr_DT0(DFPREG(rs1));
3739 gen_op_load_fpr_DT1(DFPREG(rs2));
3740 gen_helper_fpmerge();
3741 gen_op_store_DT0_fpr(DFPREG(rd));
3742 break;
3743 case 0x04c: /* VIS II bshuffle */
3744 // XXX
3745 goto illegal_insn;
3746 case 0x04d: /* VIS I fexpand */
3747 CHECK_FPU_FEATURE(dc, VIS1);
3748 gen_op_load_fpr_DT0(DFPREG(rs1));
3749 gen_op_load_fpr_DT1(DFPREG(rs2));
3750 gen_helper_fexpand();
3751 gen_op_store_DT0_fpr(DFPREG(rd));
3752 break;
3753 case 0x050: /* VIS I fpadd16 */
3754 CHECK_FPU_FEATURE(dc, VIS1);
3755 gen_op_load_fpr_DT0(DFPREG(rs1));
3756 gen_op_load_fpr_DT1(DFPREG(rs2));
3757 gen_helper_fpadd16();
3758 gen_op_store_DT0_fpr(DFPREG(rd));
3759 break;
3760 case 0x051: /* VIS I fpadd16s */
3761 CHECK_FPU_FEATURE(dc, VIS1);
3762 gen_helper_fpadd16s(cpu_fpr[rd],
3763 cpu_fpr[rs1], cpu_fpr[rs2]);
3764 break;
3765 case 0x052: /* VIS I fpadd32 */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 gen_helper_fpadd32();
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3771 break;
3772 case 0x053: /* VIS I fpadd32s */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_helper_fpadd32s(cpu_fpr[rd],
3775 cpu_fpr[rs1], cpu_fpr[rs2]);
3776 break;
3777 case 0x054: /* VIS I fpsub16 */
3778 CHECK_FPU_FEATURE(dc, VIS1);
3779 gen_op_load_fpr_DT0(DFPREG(rs1));
3780 gen_op_load_fpr_DT1(DFPREG(rs2));
3781 gen_helper_fpsub16();
3782 gen_op_store_DT0_fpr(DFPREG(rd));
3783 break;
3784 case 0x055: /* VIS I fpsub16s */
3785 CHECK_FPU_FEATURE(dc, VIS1);
3786 gen_helper_fpsub16s(cpu_fpr[rd],
3787 cpu_fpr[rs1], cpu_fpr[rs2]);
3788 break;
3789 case 0x056: /* VIS I fpsub32 */
3790 CHECK_FPU_FEATURE(dc, VIS1);
3791 gen_op_load_fpr_DT0(DFPREG(rs1));
3792 gen_op_load_fpr_DT1(DFPREG(rs2));
3793 gen_helper_fpsub32();
3794 gen_op_store_DT0_fpr(DFPREG(rd));
3795 break;
3796 case 0x057: /* VIS I fpsub32s */
3797 CHECK_FPU_FEATURE(dc, VIS1);
3798 gen_helper_fpsub32s(cpu_fpr[rd],
3799 cpu_fpr[rs1], cpu_fpr[rs2]);
3800 break;
3801 case 0x060: /* VIS I fzero */
3802 CHECK_FPU_FEATURE(dc, VIS1);
3803 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3804 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3805 break;
3806 case 0x061: /* VIS I fzeros */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3809 break;
3810 case 0x062: /* VIS I fnor */
3811 CHECK_FPU_FEATURE(dc, VIS1);
3812 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3813 cpu_fpr[DFPREG(rs2)]);
3814 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3815 cpu_fpr[DFPREG(rs2) + 1]);
3816 break;
3817 case 0x063: /* VIS I fnors */
3818 CHECK_FPU_FEATURE(dc, VIS1);
3819 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3820 break;
3821 case 0x064: /* VIS I fandnot2 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3824 cpu_fpr[DFPREG(rs2)]);
3825 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3826 cpu_fpr[DFPREG(rs1) + 1],
3827 cpu_fpr[DFPREG(rs2) + 1]);
3828 break;
3829 case 0x065: /* VIS I fandnot2s */
3830 CHECK_FPU_FEATURE(dc, VIS1);
3831 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3832 break;
3833 case 0x066: /* VIS I fnot2 */
3834 CHECK_FPU_FEATURE(dc, VIS1);
3835 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3836 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3837 cpu_fpr[DFPREG(rs2) + 1]);
3838 break;
3839 case 0x067: /* VIS I fnot2s */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3842 break;
3843 case 0x068: /* VIS I fandnot1 */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3846 cpu_fpr[DFPREG(rs1)]);
3847 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3848 cpu_fpr[DFPREG(rs2) + 1],
3849 cpu_fpr[DFPREG(rs1) + 1]);
3850 break;
3851 case 0x069: /* VIS I fandnot1s */
3852 CHECK_FPU_FEATURE(dc, VIS1);
3853 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3854 break;
3855 case 0x06a: /* VIS I fnot1 */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3858 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3859 cpu_fpr[DFPREG(rs1) + 1]);
3860 break;
3861 case 0x06b: /* VIS I fnot1s */
3862 CHECK_FPU_FEATURE(dc, VIS1);
3863 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3864 break;
3865 case 0x06c: /* VIS I fxor */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3868 cpu_fpr[DFPREG(rs2)]);
3869 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3870 cpu_fpr[DFPREG(rs1) + 1],
3871 cpu_fpr[DFPREG(rs2) + 1]);
3872 break;
3873 case 0x06d: /* VIS I fxors */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3876 break;
3877 case 0x06e: /* VIS I fnand */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3880 cpu_fpr[DFPREG(rs2)]);
3881 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3882 cpu_fpr[DFPREG(rs2) + 1]);
3883 break;
3884 case 0x06f: /* VIS I fnands */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3887 break;
3888 case 0x070: /* VIS I fand */
3889 CHECK_FPU_FEATURE(dc, VIS1);
3890 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3891 cpu_fpr[DFPREG(rs2)]);
3892 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3893 cpu_fpr[DFPREG(rs1) + 1],
3894 cpu_fpr[DFPREG(rs2) + 1]);
3895 break;
3896 case 0x071: /* VIS I fands */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3899 break;
3900 case 0x072: /* VIS I fxnor */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3903 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3904 cpu_fpr[DFPREG(rs1)]);
3905 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3906 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3907 cpu_fpr[DFPREG(rs1) + 1]);
3908 break;
3909 case 0x073: /* VIS I fxnors */
3910 CHECK_FPU_FEATURE(dc, VIS1);
3911 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3912 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3913 break;
3914 case 0x074: /* VIS I fsrc1 */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3917 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3918 cpu_fpr[DFPREG(rs1) + 1]);
3919 break;
3920 case 0x075: /* VIS I fsrc1s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3923 break;
3924 case 0x076: /* VIS I fornot2 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3927 cpu_fpr[DFPREG(rs2)]);
3928 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3929 cpu_fpr[DFPREG(rs1) + 1],
3930 cpu_fpr[DFPREG(rs2) + 1]);
3931 break;
3932 case 0x077: /* VIS I fornot2s */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3935 break;
3936 case 0x078: /* VIS I fsrc2 */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 gen_op_load_fpr_DT0(DFPREG(rs2));
3939 gen_op_store_DT0_fpr(DFPREG(rd));
3940 break;
3941 case 0x079: /* VIS I fsrc2s */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3944 break;
3945 case 0x07a: /* VIS I fornot1 */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3948 cpu_fpr[DFPREG(rs1)]);
3949 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3950 cpu_fpr[DFPREG(rs2) + 1],
3951 cpu_fpr[DFPREG(rs1) + 1]);
3952 break;
3953 case 0x07b: /* VIS I fornot1s */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3956 break;
3957 case 0x07c: /* VIS I for */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3960 cpu_fpr[DFPREG(rs2)]);
3961 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3962 cpu_fpr[DFPREG(rs1) + 1],
3963 cpu_fpr[DFPREG(rs2) + 1]);
3964 break;
3965 case 0x07d: /* VIS I fors */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3968 break;
3969 case 0x07e: /* VIS I fone */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3972 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3973 break;
3974 case 0x07f: /* VIS I fones */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3977 break;
3978 case 0x080: /* VIS I shutdown */
3979 case 0x081: /* VIS II siam */
3980 // XXX
3981 goto illegal_insn;
3982 default:
3983 goto illegal_insn;
3985 #else
3986 goto ncp_insn;
3987 #endif
3988 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3989 #ifdef TARGET_SPARC64
3990 goto illegal_insn;
3991 #else
3992 goto ncp_insn;
3993 #endif
3994 #ifdef TARGET_SPARC64
3995 } else if (xop == 0x39) { /* V9 return */
3996 TCGv_i32 r_const;
3998 save_state(dc, cpu_cond);
3999 cpu_src1 = get_src1(insn, cpu_src1);
4000 if (IS_IMM) { /* immediate */
4001 simm = GET_FIELDs(insn, 19, 31);
4002 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4003 } else { /* register */
4004 rs2 = GET_FIELD(insn, 27, 31);
4005 if (rs2) {
4006 gen_movl_reg_TN(rs2, cpu_src2);
4007 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4008 } else
4009 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4011 gen_helper_restore();
4012 gen_mov_pc_npc(dc, cpu_cond);
4013 r_const = tcg_const_i32(3);
4014 gen_helper_check_align(cpu_dst, r_const);
4015 tcg_temp_free_i32(r_const);
4016 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4017 dc->npc = DYNAMIC_PC;
4018 goto jmp_insn;
4019 #endif
4020 } else {
4021 cpu_src1 = get_src1(insn, cpu_src1);
4022 if (IS_IMM) { /* immediate */
4023 simm = GET_FIELDs(insn, 19, 31);
4024 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4025 } else { /* register */
4026 rs2 = GET_FIELD(insn, 27, 31);
4027 if (rs2) {
4028 gen_movl_reg_TN(rs2, cpu_src2);
4029 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4030 } else
4031 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4033 switch (xop) {
4034 case 0x38: /* jmpl */
4036 TCGv r_pc;
4037 TCGv_i32 r_const;
4039 r_pc = tcg_const_tl(dc->pc);
4040 gen_movl_TN_reg(rd, r_pc);
4041 tcg_temp_free(r_pc);
4042 gen_mov_pc_npc(dc, cpu_cond);
4043 r_const = tcg_const_i32(3);
4044 gen_helper_check_align(cpu_dst, r_const);
4045 tcg_temp_free_i32(r_const);
4046 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4047 dc->npc = DYNAMIC_PC;
4049 goto jmp_insn;
4050 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4051 case 0x39: /* rett, V9 return */
4053 TCGv_i32 r_const;
4055 if (!supervisor(dc))
4056 goto priv_insn;
4057 gen_mov_pc_npc(dc, cpu_cond);
4058 r_const = tcg_const_i32(3);
4059 gen_helper_check_align(cpu_dst, r_const);
4060 tcg_temp_free_i32(r_const);
4061 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4062 dc->npc = DYNAMIC_PC;
4063 gen_helper_rett();
4065 goto jmp_insn;
4066 #endif
4067 case 0x3b: /* flush */
4068 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4069 goto unimp_flush;
4070 gen_helper_flush(cpu_dst);
4071 break;
4072 case 0x3c: /* save */
4073 save_state(dc, cpu_cond);
4074 gen_helper_save();
4075 gen_movl_TN_reg(rd, cpu_dst);
4076 break;
4077 case 0x3d: /* restore */
4078 save_state(dc, cpu_cond);
4079 gen_helper_restore();
4080 gen_movl_TN_reg(rd, cpu_dst);
4081 break;
4082 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4083 case 0x3e: /* V9 done/retry */
4085 switch (rd) {
4086 case 0:
4087 if (!supervisor(dc))
4088 goto priv_insn;
4089 dc->npc = DYNAMIC_PC;
4090 dc->pc = DYNAMIC_PC;
4091 gen_helper_done();
4092 goto jmp_insn;
4093 case 1:
4094 if (!supervisor(dc))
4095 goto priv_insn;
4096 dc->npc = DYNAMIC_PC;
4097 dc->pc = DYNAMIC_PC;
4098 gen_helper_retry();
4099 goto jmp_insn;
4100 default:
4101 goto illegal_insn;
4104 break;
4105 #endif
4106 default:
4107 goto illegal_insn;
4110 break;
4112 break;
4113 case 3: /* load/store instructions */
4115 unsigned int xop = GET_FIELD(insn, 7, 12);
4117 /* flush pending conditional evaluations before exposing
4118 cpu state */
4119 if (dc->cc_op != CC_OP_FLAGS) {
4120 dc->cc_op = CC_OP_FLAGS;
4121 gen_helper_compute_psr();
4123 cpu_src1 = get_src1(insn, cpu_src1);
4124 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4125 rs2 = GET_FIELD(insn, 27, 31);
4126 gen_movl_reg_TN(rs2, cpu_src2);
4127 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4128 } else if (IS_IMM) { /* immediate */
4129 simm = GET_FIELDs(insn, 19, 31);
4130 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4131 } else { /* register */
4132 rs2 = GET_FIELD(insn, 27, 31);
4133 if (rs2 != 0) {
4134 gen_movl_reg_TN(rs2, cpu_src2);
4135 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4136 } else
4137 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4139 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4140 (xop > 0x17 && xop <= 0x1d ) ||
4141 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4142 switch (xop) {
4143 case 0x0: /* ld, V9 lduw, load unsigned word */
4144 gen_address_mask(dc, cpu_addr);
4145 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4146 break;
4147 case 0x1: /* ldub, load unsigned byte */
4148 gen_address_mask(dc, cpu_addr);
4149 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4150 break;
4151 case 0x2: /* lduh, load unsigned halfword */
4152 gen_address_mask(dc, cpu_addr);
4153 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4154 break;
4155 case 0x3: /* ldd, load double word */
4156 if (rd & 1)
4157 goto illegal_insn;
4158 else {
4159 TCGv_i32 r_const;
4161 save_state(dc, cpu_cond);
4162 r_const = tcg_const_i32(7);
4163 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4164 tcg_temp_free_i32(r_const);
4165 gen_address_mask(dc, cpu_addr);
4166 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4167 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4168 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4169 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4170 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4171 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4172 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4174 break;
4175 case 0x9: /* ldsb, load signed byte */
4176 gen_address_mask(dc, cpu_addr);
4177 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4178 break;
4179 case 0xa: /* ldsh, load signed halfword */
4180 gen_address_mask(dc, cpu_addr);
4181 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4182 break;
4183 case 0xd: /* ldstub -- XXX: should be atomically */
4185 TCGv r_const;
4187 gen_address_mask(dc, cpu_addr);
4188 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4189 r_const = tcg_const_tl(0xff);
4190 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4191 tcg_temp_free(r_const);
4193 break;
4194 case 0x0f: /* swap, swap register with memory. Also
4195 atomically */
4196 CHECK_IU_FEATURE(dc, SWAP);
4197 gen_movl_reg_TN(rd, cpu_val);
4198 gen_address_mask(dc, cpu_addr);
4199 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4200 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4201 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4202 break;
4203 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4204 case 0x10: /* lda, V9 lduwa, load word alternate */
4205 #ifndef TARGET_SPARC64
4206 if (IS_IMM)
4207 goto illegal_insn;
4208 if (!supervisor(dc))
4209 goto priv_insn;
4210 #endif
4211 save_state(dc, cpu_cond);
4212 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4213 break;
4214 case 0x11: /* lduba, load unsigned byte alternate */
4215 #ifndef TARGET_SPARC64
4216 if (IS_IMM)
4217 goto illegal_insn;
4218 if (!supervisor(dc))
4219 goto priv_insn;
4220 #endif
4221 save_state(dc, cpu_cond);
4222 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4223 break;
4224 case 0x12: /* lduha, load unsigned halfword alternate */
4225 #ifndef TARGET_SPARC64
4226 if (IS_IMM)
4227 goto illegal_insn;
4228 if (!supervisor(dc))
4229 goto priv_insn;
4230 #endif
4231 save_state(dc, cpu_cond);
4232 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4233 break;
4234 case 0x13: /* ldda, load double word alternate */
4235 #ifndef TARGET_SPARC64
4236 if (IS_IMM)
4237 goto illegal_insn;
4238 if (!supervisor(dc))
4239 goto priv_insn;
4240 #endif
4241 if (rd & 1)
4242 goto illegal_insn;
4243 save_state(dc, cpu_cond);
4244 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4245 goto skip_move;
4246 case 0x19: /* ldsba, load signed byte alternate */
4247 #ifndef TARGET_SPARC64
4248 if (IS_IMM)
4249 goto illegal_insn;
4250 if (!supervisor(dc))
4251 goto priv_insn;
4252 #endif
4253 save_state(dc, cpu_cond);
4254 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4255 break;
4256 case 0x1a: /* ldsha, load signed halfword alternate */
4257 #ifndef TARGET_SPARC64
4258 if (IS_IMM)
4259 goto illegal_insn;
4260 if (!supervisor(dc))
4261 goto priv_insn;
4262 #endif
4263 save_state(dc, cpu_cond);
4264 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4265 break;
4266 case 0x1d: /* ldstuba -- XXX: should be atomically */
4267 #ifndef TARGET_SPARC64
4268 if (IS_IMM)
4269 goto illegal_insn;
4270 if (!supervisor(dc))
4271 goto priv_insn;
4272 #endif
4273 save_state(dc, cpu_cond);
4274 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4275 break;
4276 case 0x1f: /* swapa, swap reg with alt. memory. Also
4277 atomically */
4278 CHECK_IU_FEATURE(dc, SWAP);
4279 #ifndef TARGET_SPARC64
4280 if (IS_IMM)
4281 goto illegal_insn;
4282 if (!supervisor(dc))
4283 goto priv_insn;
4284 #endif
4285 save_state(dc, cpu_cond);
4286 gen_movl_reg_TN(rd, cpu_val);
4287 gen_swap_asi(cpu_val, cpu_addr, insn);
4288 break;
4290 #ifndef TARGET_SPARC64
4291 case 0x30: /* ldc */
4292 case 0x31: /* ldcsr */
4293 case 0x33: /* lddc */
4294 goto ncp_insn;
4295 #endif
4296 #endif
4297 #ifdef TARGET_SPARC64
4298 case 0x08: /* V9 ldsw */
4299 gen_address_mask(dc, cpu_addr);
4300 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4301 break;
4302 case 0x0b: /* V9 ldx */
4303 gen_address_mask(dc, cpu_addr);
4304 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4305 break;
4306 case 0x18: /* V9 ldswa */
4307 save_state(dc, cpu_cond);
4308 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4309 break;
4310 case 0x1b: /* V9 ldxa */
4311 save_state(dc, cpu_cond);
4312 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4313 break;
4314 case 0x2d: /* V9 prefetch, no effect */
4315 goto skip_move;
4316 case 0x30: /* V9 ldfa */
4317 save_state(dc, cpu_cond);
4318 gen_ldf_asi(cpu_addr, insn, 4, rd);
4319 goto skip_move;
4320 case 0x33: /* V9 lddfa */
4321 save_state(dc, cpu_cond);
4322 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4323 goto skip_move;
4324 case 0x3d: /* V9 prefetcha, no effect */
4325 goto skip_move;
4326 case 0x32: /* V9 ldqfa */
4327 CHECK_FPU_FEATURE(dc, FLOAT128);
4328 save_state(dc, cpu_cond);
4329 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4330 goto skip_move;
4331 #endif
4332 default:
4333 goto illegal_insn;
4335 gen_movl_TN_reg(rd, cpu_val);
4336 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4337 skip_move: ;
4338 #endif
4339 } else if (xop >= 0x20 && xop < 0x24) {
4340 if (gen_trap_ifnofpu(dc, cpu_cond))
4341 goto jmp_insn;
4342 save_state(dc, cpu_cond);
4343 switch (xop) {
4344 case 0x20: /* ldf, load fpreg */
4345 gen_address_mask(dc, cpu_addr);
4346 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4347 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4348 break;
4349 case 0x21: /* ldfsr, V9 ldxfsr */
4350 #ifdef TARGET_SPARC64
4351 gen_address_mask(dc, cpu_addr);
4352 if (rd == 1) {
4353 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4354 gen_helper_ldxfsr(cpu_tmp64);
4355 } else
4356 #else
4358 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4359 gen_helper_ldfsr(cpu_tmp32);
4361 #endif
4362 break;
4363 case 0x22: /* ldqf, load quad fpreg */
4365 TCGv_i32 r_const;
4367 CHECK_FPU_FEATURE(dc, FLOAT128);
4368 r_const = tcg_const_i32(dc->mem_idx);
4369 gen_helper_ldqf(cpu_addr, r_const);
4370 tcg_temp_free_i32(r_const);
4371 gen_op_store_QT0_fpr(QFPREG(rd));
4373 break;
4374 case 0x23: /* lddf, load double fpreg */
4376 TCGv_i32 r_const;
4378 r_const = tcg_const_i32(dc->mem_idx);
4379 gen_helper_lddf(cpu_addr, r_const);
4380 tcg_temp_free_i32(r_const);
4381 gen_op_store_DT0_fpr(DFPREG(rd));
4383 break;
4384 default:
4385 goto illegal_insn;
4387 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4388 xop == 0xe || xop == 0x1e) {
4389 gen_movl_reg_TN(rd, cpu_val);
4390 switch (xop) {
4391 case 0x4: /* st, store word */
4392 gen_address_mask(dc, cpu_addr);
4393 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4394 break;
4395 case 0x5: /* stb, store byte */
4396 gen_address_mask(dc, cpu_addr);
4397 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4398 break;
4399 case 0x6: /* sth, store halfword */
4400 gen_address_mask(dc, cpu_addr);
4401 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4402 break;
4403 case 0x7: /* std, store double word */
4404 if (rd & 1)
4405 goto illegal_insn;
4406 else {
4407 TCGv_i32 r_const;
4409 save_state(dc, cpu_cond);
4410 gen_address_mask(dc, cpu_addr);
4411 r_const = tcg_const_i32(7);
4412 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4413 tcg_temp_free_i32(r_const);
4414 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4415 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4416 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4418 break;
4419 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4420 case 0x14: /* sta, V9 stwa, store word alternate */
4421 #ifndef TARGET_SPARC64
4422 if (IS_IMM)
4423 goto illegal_insn;
4424 if (!supervisor(dc))
4425 goto priv_insn;
4426 #endif
4427 save_state(dc, cpu_cond);
4428 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4429 break;
4430 case 0x15: /* stba, store byte alternate */
4431 #ifndef TARGET_SPARC64
4432 if (IS_IMM)
4433 goto illegal_insn;
4434 if (!supervisor(dc))
4435 goto priv_insn;
4436 #endif
4437 save_state(dc, cpu_cond);
4438 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4439 break;
4440 case 0x16: /* stha, store halfword alternate */
4441 #ifndef TARGET_SPARC64
4442 if (IS_IMM)
4443 goto illegal_insn;
4444 if (!supervisor(dc))
4445 goto priv_insn;
4446 #endif
4447 save_state(dc, cpu_cond);
4448 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4449 break;
4450 case 0x17: /* stda, store double word alternate */
4451 #ifndef TARGET_SPARC64
4452 if (IS_IMM)
4453 goto illegal_insn;
4454 if (!supervisor(dc))
4455 goto priv_insn;
4456 #endif
4457 if (rd & 1)
4458 goto illegal_insn;
4459 else {
4460 save_state(dc, cpu_cond);
4461 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4463 break;
4464 #endif
4465 #ifdef TARGET_SPARC64
4466 case 0x0e: /* V9 stx */
4467 gen_address_mask(dc, cpu_addr);
4468 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4469 break;
4470 case 0x1e: /* V9 stxa */
4471 save_state(dc, cpu_cond);
4472 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4473 break;
4474 #endif
4475 default:
4476 goto illegal_insn;
4478 } else if (xop > 0x23 && xop < 0x28) {
4479 if (gen_trap_ifnofpu(dc, cpu_cond))
4480 goto jmp_insn;
4481 save_state(dc, cpu_cond);
4482 switch (xop) {
4483 case 0x24: /* stf, store fpreg */
4484 gen_address_mask(dc, cpu_addr);
4485 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4486 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4487 break;
4488 case 0x25: /* stfsr, V9 stxfsr */
4489 #ifdef TARGET_SPARC64
4490 gen_address_mask(dc, cpu_addr);
4491 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4492 if (rd == 1)
4493 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4494 else
4495 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4496 #else
4497 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4498 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4499 #endif
4500 break;
4501 case 0x26:
4502 #ifdef TARGET_SPARC64
4503 /* V9 stqf, store quad fpreg */
4505 TCGv_i32 r_const;
4507 CHECK_FPU_FEATURE(dc, FLOAT128);
4508 gen_op_load_fpr_QT0(QFPREG(rd));
4509 r_const = tcg_const_i32(dc->mem_idx);
4510 gen_helper_stqf(cpu_addr, r_const);
4511 tcg_temp_free_i32(r_const);
4513 break;
4514 #else /* !TARGET_SPARC64 */
4515 /* stdfq, store floating point queue */
4516 #if defined(CONFIG_USER_ONLY)
4517 goto illegal_insn;
4518 #else
4519 if (!supervisor(dc))
4520 goto priv_insn;
4521 if (gen_trap_ifnofpu(dc, cpu_cond))
4522 goto jmp_insn;
4523 goto nfq_insn;
4524 #endif
4525 #endif
4526 case 0x27: /* stdf, store double fpreg */
4528 TCGv_i32 r_const;
4530 gen_op_load_fpr_DT0(DFPREG(rd));
4531 r_const = tcg_const_i32(dc->mem_idx);
4532 gen_helper_stdf(cpu_addr, r_const);
4533 tcg_temp_free_i32(r_const);
4535 break;
4536 default:
4537 goto illegal_insn;
4539 } else if (xop > 0x33 && xop < 0x3f) {
4540 save_state(dc, cpu_cond);
4541 switch (xop) {
4542 #ifdef TARGET_SPARC64
4543 case 0x34: /* V9 stfa */
4544 gen_stf_asi(cpu_addr, insn, 4, rd);
4545 break;
4546 case 0x36: /* V9 stqfa */
4548 TCGv_i32 r_const;
4550 CHECK_FPU_FEATURE(dc, FLOAT128);
4551 r_const = tcg_const_i32(7);
4552 gen_helper_check_align(cpu_addr, r_const);
4553 tcg_temp_free_i32(r_const);
4554 gen_op_load_fpr_QT0(QFPREG(rd));
4555 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4557 break;
4558 case 0x37: /* V9 stdfa */
4559 gen_op_load_fpr_DT0(DFPREG(rd));
4560 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4561 break;
4562 case 0x3c: /* V9 casa */
4563 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4564 gen_movl_TN_reg(rd, cpu_val);
4565 break;
4566 case 0x3e: /* V9 casxa */
4567 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4568 gen_movl_TN_reg(rd, cpu_val);
4569 break;
4570 #else
4571 case 0x34: /* stc */
4572 case 0x35: /* stcsr */
4573 case 0x36: /* stdcq */
4574 case 0x37: /* stdc */
4575 goto ncp_insn;
4576 #endif
4577 default:
4578 goto illegal_insn;
4580 } else
4581 goto illegal_insn;
4583 break;
4585 /* default case for non jump instructions */
4586 if (dc->npc == DYNAMIC_PC) {
4587 dc->pc = DYNAMIC_PC;
4588 gen_op_next_insn();
4589 } else if (dc->npc == JUMP_PC) {
4590 /* we can do a static jump */
4591 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4592 dc->is_br = 1;
4593 } else {
4594 dc->pc = dc->npc;
4595 dc->npc = dc->npc + 4;
4597 jmp_insn:
4598 goto egress;
4599 illegal_insn:
4601 TCGv_i32 r_const;
4603 save_state(dc, cpu_cond);
4604 r_const = tcg_const_i32(TT_ILL_INSN);
4605 gen_helper_raise_exception(r_const);
4606 tcg_temp_free_i32(r_const);
4607 dc->is_br = 1;
4609 goto egress;
4610 unimp_flush:
4612 TCGv_i32 r_const;
4614 save_state(dc, cpu_cond);
4615 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4616 gen_helper_raise_exception(r_const);
4617 tcg_temp_free_i32(r_const);
4618 dc->is_br = 1;
4620 goto egress;
4621 #if !defined(CONFIG_USER_ONLY)
4622 priv_insn:
4624 TCGv_i32 r_const;
4626 save_state(dc, cpu_cond);
4627 r_const = tcg_const_i32(TT_PRIV_INSN);
4628 gen_helper_raise_exception(r_const);
4629 tcg_temp_free_i32(r_const);
4630 dc->is_br = 1;
4632 goto egress;
4633 #endif
4634 nfpu_insn:
4635 save_state(dc, cpu_cond);
4636 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4637 dc->is_br = 1;
4638 goto egress;
4639 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4640 nfq_insn:
4641 save_state(dc, cpu_cond);
4642 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4643 dc->is_br = 1;
4644 goto egress;
4645 #endif
4646 #ifndef TARGET_SPARC64
4647 ncp_insn:
4649 TCGv r_const;
4651 save_state(dc, cpu_cond);
4652 r_const = tcg_const_i32(TT_NCP_INSN);
4653 gen_helper_raise_exception(r_const);
4654 tcg_temp_free(r_const);
4655 dc->is_br = 1;
4657 goto egress;
4658 #endif
4659 egress:
4660 tcg_temp_free(cpu_tmp1);
4661 tcg_temp_free(cpu_tmp2);
4664 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4665 int spc, CPUSPARCState *env)
4667 target_ulong pc_start, last_pc;
4668 uint16_t *gen_opc_end;
4669 DisasContext dc1, *dc = &dc1;
4670 CPUBreakpoint *bp;
4671 int j, lj = -1;
4672 int num_insns;
4673 int max_insns;
4675 memset(dc, 0, sizeof(DisasContext));
4676 dc->tb = tb;
4677 pc_start = tb->pc;
4678 dc->pc = pc_start;
4679 last_pc = dc->pc;
4680 dc->npc = (target_ulong) tb->cs_base;
4681 dc->cc_op = CC_OP_DYNAMIC;
4682 dc->mem_idx = cpu_mmu_index(env);
4683 dc->def = env->def;
4684 if ((dc->def->features & CPU_FEATURE_FLOAT))
4685 dc->fpu_enabled = cpu_fpu_enabled(env);
4686 else
4687 dc->fpu_enabled = 0;
4688 #ifdef TARGET_SPARC64
4689 dc->address_mask_32bit = env->pstate & PS_AM;
4690 #endif
4691 dc->singlestep = (env->singlestep_enabled || singlestep);
4692 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4694 cpu_tmp0 = tcg_temp_new();
4695 cpu_tmp32 = tcg_temp_new_i32();
4696 cpu_tmp64 = tcg_temp_new_i64();
4698 cpu_dst = tcg_temp_local_new();
4700 // loads and stores
4701 cpu_val = tcg_temp_local_new();
4702 cpu_addr = tcg_temp_local_new();
4704 num_insns = 0;
4705 max_insns = tb->cflags & CF_COUNT_MASK;
4706 if (max_insns == 0)
4707 max_insns = CF_COUNT_MASK;
4708 gen_icount_start();
4709 do {
4710 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4711 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4712 if (bp->pc == dc->pc) {
4713 if (dc->pc != pc_start)
4714 save_state(dc, cpu_cond);
4715 gen_helper_debug();
4716 tcg_gen_exit_tb(0);
4717 dc->is_br = 1;
4718 goto exit_gen_loop;
4722 if (spc) {
4723 qemu_log("Search PC...\n");
4724 j = gen_opc_ptr - gen_opc_buf;
4725 if (lj < j) {
4726 lj++;
4727 while (lj < j)
4728 gen_opc_instr_start[lj++] = 0;
4729 gen_opc_pc[lj] = dc->pc;
4730 gen_opc_npc[lj] = dc->npc;
4731 gen_opc_instr_start[lj] = 1;
4732 gen_opc_icount[lj] = num_insns;
4735 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4736 gen_io_start();
4737 last_pc = dc->pc;
4738 disas_sparc_insn(dc);
4739 num_insns++;
4741 if (dc->is_br)
4742 break;
4743 /* if the next PC is different, we abort now */
4744 if (dc->pc != (last_pc + 4))
4745 break;
4746 /* if we reach a page boundary, we stop generation so that the
4747 PC of a TT_TFAULT exception is always in the right page */
4748 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4749 break;
4750 /* if single step mode, we generate only one instruction and
4751 generate an exception */
4752 if (dc->singlestep) {
4753 break;
4755 } while ((gen_opc_ptr < gen_opc_end) &&
4756 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4757 num_insns < max_insns);
4759 exit_gen_loop:
4760 tcg_temp_free(cpu_addr);
4761 tcg_temp_free(cpu_val);
4762 tcg_temp_free(cpu_dst);
4763 tcg_temp_free_i64(cpu_tmp64);
4764 tcg_temp_free_i32(cpu_tmp32);
4765 tcg_temp_free(cpu_tmp0);
4766 if (tb->cflags & CF_LAST_IO)
4767 gen_io_end();
4768 if (!dc->is_br) {
4769 if (dc->pc != DYNAMIC_PC &&
4770 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4771 /* static PC and NPC: we can use direct chaining */
4772 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4773 } else {
4774 if (dc->pc != DYNAMIC_PC)
4775 tcg_gen_movi_tl(cpu_pc, dc->pc);
4776 save_npc(dc, cpu_cond);
4777 tcg_gen_exit_tb(0);
4780 gen_icount_end(tb, num_insns);
4781 *gen_opc_ptr = INDEX_op_end;
4782 if (spc) {
4783 j = gen_opc_ptr - gen_opc_buf;
4784 lj++;
4785 while (lj <= j)
4786 gen_opc_instr_start[lj++] = 0;
4787 #if 0
4788 log_page_dump();
4789 #endif
4790 gen_opc_jump_pc[0] = dc->jump_pc[0];
4791 gen_opc_jump_pc[1] = dc->jump_pc[1];
4792 } else {
4793 tb->size = last_pc + 4 - pc_start;
4794 tb->icount = num_insns;
4796 #ifdef DEBUG_DISAS
4797 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4798 qemu_log("--------------\n");
4799 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4800 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4801 qemu_log("\n");
4803 #endif
4806 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4808 gen_intermediate_code_internal(tb, 0, env);
4811 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4813 gen_intermediate_code_internal(tb, 1, env);
4816 void gen_intermediate_code_init(CPUSPARCState *env)
4818 unsigned int i;
4819 static int inited;
4820 static const char * const gregnames[8] = {
4821 NULL, // g0 not used
4822 "g1",
4823 "g2",
4824 "g3",
4825 "g4",
4826 "g5",
4827 "g6",
4828 "g7",
4830 static const char * const fregnames[64] = {
4831 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4832 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4833 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4834 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4835 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4836 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4837 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4838 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4841 /* init various static tables */
4842 if (!inited) {
4843 inited = 1;
4845 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4846 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4847 offsetof(CPUState, regwptr),
4848 "regwptr");
4849 #ifdef TARGET_SPARC64
4850 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4851 "xcc");
4852 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4853 "asi");
4854 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4855 "fprs");
4856 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4857 "gsr");
4858 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4859 offsetof(CPUState, tick_cmpr),
4860 "tick_cmpr");
4861 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4862 offsetof(CPUState, stick_cmpr),
4863 "stick_cmpr");
4864 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4865 offsetof(CPUState, hstick_cmpr),
4866 "hstick_cmpr");
4867 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4868 "hintp");
4869 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4870 "htba");
4871 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4872 "hver");
4873 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4874 offsetof(CPUState, ssr), "ssr");
4875 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4876 offsetof(CPUState, version), "ver");
4877 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4878 offsetof(CPUState, softint),
4879 "softint");
4880 #else
4881 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4882 "wim");
4883 #endif
4884 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4885 "cond");
4886 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4887 "cc_src");
4888 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4889 offsetof(CPUState, cc_src2),
4890 "cc_src2");
4891 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4892 "cc_dst");
4893 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4894 "cc_op");
4895 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4896 "psr");
4897 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4898 "fsr");
4899 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4900 "pc");
4901 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4902 "npc");
4903 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4904 #ifndef CONFIG_USER_ONLY
4905 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4906 "tbr");
4907 #endif
4908 for (i = 1; i < 8; i++)
4909 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4910 offsetof(CPUState, gregs[i]),
4911 gregnames[i]);
4912 for (i = 0; i < TARGET_FPREGS; i++)
4913 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4914 offsetof(CPUState, fpr[i]),
4915 fregnames[i]);
4917 /* register helpers */
4919 #define GEN_HELPER 2
4920 #include "helper.h"
4924 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4925 unsigned long searched_pc, int pc_pos, void *puc)
4927 target_ulong npc;
4928 env->pc = gen_opc_pc[pc_pos];
4929 npc = gen_opc_npc[pc_pos];
4930 if (npc == 1) {
4931 /* dynamic NPC: already stored */
4932 } else if (npc == 2) {
4933 /* jump PC: use 'cond' and the jump targets of the translation */
4934 if (env->cond) {
4935 env->npc = gen_opc_jump_pc[0];
4936 } else {
4937 env->npc = gen_opc_jump_pc[1];
4939 } else {
4940 env->npc = npc;
4943 /* flush pending conditional evaluations before exposing cpu state */
4944 if (CC_OP != CC_OP_FLAGS) {
4945 helper_compute_psr();