qemu:virtio-net: Enable filtering based on MAC, promisc, broadcast and allmulti ...
[qemu.git] / target-sparc / translate.c
blob53997ae9366a5c28ddfa6a973fc1e8c81d40bafd
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 } DisasContext;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #ifdef TARGET_SPARC64
95 #define FFPREG(r) (r)
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define FFPREG(r) (r)
100 #define DFPREG(r) (r & 0x1e)
101 #define QFPREG(r) (r & 0x1c)
102 #endif
104 #define UA2005_HTRAP_MASK 0xff
105 #define V8_TRAP_MASK 0x7f
107 static int sign_extend(int x, int len)
109 len = 32 - len;
110 return (x << len) >> len;
113 #define IS_IMM (insn & (1<<13))
115 /* floating point registers moves */
116 static void gen_op_load_fpr_DT0(unsigned int src)
118 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.upper));
120 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.lower));
124 static void gen_op_load_fpr_DT1(unsigned int src)
126 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.upper));
128 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.lower));
132 static void gen_op_store_DT0_fpr(unsigned int dst)
134 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.upper));
136 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.lower));
140 static void gen_op_load_fpr_QT0(unsigned int src)
142 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upmost));
144 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upper));
146 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lower));
148 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lowest));
152 static void gen_op_load_fpr_QT1(unsigned int src)
154 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upmost));
156 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upper));
158 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lower));
160 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lowest));
164 static void gen_op_store_QT0_fpr(unsigned int dst)
166 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upmost));
168 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upper));
170 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lower));
172 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lowest));
176 /* moves */
177 #ifdef CONFIG_USER_ONLY
178 #define supervisor(dc) 0
179 #ifdef TARGET_SPARC64
180 #define hypervisor(dc) 0
181 #endif
182 #else
183 #define supervisor(dc) (dc->mem_idx >= 1)
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) (dc->mem_idx == 2)
186 #else
187 #endif
188 #endif
190 #ifdef TARGET_SPARC64
191 #ifndef TARGET_ABI32
192 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
193 #else
194 #define AM_CHECK(dc) (1)
195 #endif
196 #endif
198 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
200 #ifdef TARGET_SPARC64
201 if (AM_CHECK(dc))
202 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
203 #endif
206 static inline void gen_movl_reg_TN(int reg, TCGv tn)
208 if (reg == 0)
209 tcg_gen_movi_tl(tn, 0);
210 else if (reg < 8)
211 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
212 else {
213 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_movl_TN_reg(int reg, TCGv tn)
219 if (reg == 0)
220 return;
221 else if (reg < 8)
222 tcg_gen_mov_tl(cpu_gregs[reg], tn);
223 else {
224 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
228 static inline void gen_goto_tb(DisasContext *s, int tb_num,
229 target_ulong pc, target_ulong npc)
231 TranslationBlock *tb;
233 tb = s->tb;
234 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
235 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
236 /* jump to same page: we can use a direct jump */
237 tcg_gen_goto_tb(tb_num);
238 tcg_gen_movi_tl(cpu_pc, pc);
239 tcg_gen_movi_tl(cpu_npc, npc);
240 tcg_gen_exit_tb((long)tb + tb_num);
241 } else {
242 /* jump to another page: currently not optimized */
243 tcg_gen_movi_tl(cpu_pc, pc);
244 tcg_gen_movi_tl(cpu_npc, npc);
245 tcg_gen_exit_tb(0);
249 // XXX suboptimal
250 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
252 tcg_gen_extu_i32_tl(reg, src);
253 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
254 tcg_gen_andi_tl(reg, reg, 0x1);
257 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
259 tcg_gen_extu_i32_tl(reg, src);
260 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
261 tcg_gen_andi_tl(reg, reg, 0x1);
264 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
266 tcg_gen_extu_i32_tl(reg, src);
267 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
268 tcg_gen_andi_tl(reg, reg, 0x1);
271 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
273 tcg_gen_extu_i32_tl(reg, src);
274 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
275 tcg_gen_andi_tl(reg, reg, 0x1);
278 static inline void gen_cc_clear_icc(void)
280 tcg_gen_movi_i32(cpu_psr, 0);
283 #ifdef TARGET_SPARC64
284 static inline void gen_cc_clear_xcc(void)
286 tcg_gen_movi_i32(cpu_xcc, 0);
288 #endif
290 /* old op:
291 if (!T0)
292 env->psr |= PSR_ZERO;
293 if ((int32_t) T0 < 0)
294 env->psr |= PSR_NEG;
296 static inline void gen_cc_NZ_icc(TCGv dst)
298 TCGv r_temp;
299 int l1, l2;
301 l1 = gen_new_label();
302 l2 = gen_new_label();
303 r_temp = tcg_temp_new();
304 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
305 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
306 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
307 gen_set_label(l1);
308 tcg_gen_ext32s_tl(r_temp, dst);
309 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
310 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
311 gen_set_label(l2);
312 tcg_temp_free(r_temp);
315 #ifdef TARGET_SPARC64
316 static inline void gen_cc_NZ_xcc(TCGv dst)
318 int l1, l2;
320 l1 = gen_new_label();
321 l2 = gen_new_label();
322 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
323 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
324 gen_set_label(l1);
325 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
326 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
327 gen_set_label(l2);
329 #endif
331 /* old op:
332 if (T0 < src1)
333 env->psr |= PSR_CARRY;
335 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
337 TCGv r_temp1, r_temp2;
338 int l1;
340 l1 = gen_new_label();
341 r_temp1 = tcg_temp_new();
342 r_temp2 = tcg_temp_new();
343 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
344 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
345 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
346 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
347 gen_set_label(l1);
348 tcg_temp_free(r_temp1);
349 tcg_temp_free(r_temp2);
352 #ifdef TARGET_SPARC64
353 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
355 int l1;
357 l1 = gen_new_label();
358 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
359 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
360 gen_set_label(l1);
362 #endif
364 /* old op:
365 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
366 env->psr |= PSR_OVF;
368 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
370 TCGv r_temp;
372 r_temp = tcg_temp_new();
373 tcg_gen_xor_tl(r_temp, src1, src2);
374 tcg_gen_not_tl(r_temp, r_temp);
375 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
376 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
377 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
378 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
379 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
380 tcg_temp_free(r_temp);
381 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
384 #ifdef TARGET_SPARC64
385 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
387 TCGv r_temp;
389 r_temp = tcg_temp_new();
390 tcg_gen_xor_tl(r_temp, src1, src2);
391 tcg_gen_not_tl(r_temp, r_temp);
392 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
393 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
394 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
395 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
396 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
397 tcg_temp_free(r_temp);
398 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
400 #endif
402 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
404 TCGv r_temp;
405 TCGv_i32 r_const;
406 int l1;
408 l1 = gen_new_label();
410 r_temp = tcg_temp_new();
411 tcg_gen_xor_tl(r_temp, src1, src2);
412 tcg_gen_not_tl(r_temp, r_temp);
413 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
414 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
415 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
416 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
417 r_const = tcg_const_i32(TT_TOVF);
418 gen_helper_raise_exception(r_const);
419 tcg_temp_free_i32(r_const);
420 gen_set_label(l1);
421 tcg_temp_free(r_temp);
424 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
426 int l1;
428 l1 = gen_new_label();
429 tcg_gen_or_tl(cpu_tmp0, src1, src2);
430 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
431 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
432 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
433 gen_set_label(l1);
436 static inline void gen_tag_tv(TCGv src1, TCGv src2)
438 int l1;
439 TCGv_i32 r_const;
441 l1 = gen_new_label();
442 tcg_gen_or_tl(cpu_tmp0, src1, src2);
443 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
444 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
445 r_const = tcg_const_i32(TT_TOVF);
446 gen_helper_raise_exception(r_const);
447 tcg_temp_free_i32(r_const);
448 gen_set_label(l1);
451 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
453 tcg_gen_mov_tl(cpu_cc_src, src1);
454 tcg_gen_mov_tl(cpu_cc_src2, src2);
455 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
456 gen_cc_clear_icc();
457 gen_cc_NZ_icc(cpu_cc_dst);
458 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
459 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
460 #ifdef TARGET_SPARC64
461 gen_cc_clear_xcc();
462 gen_cc_NZ_xcc(cpu_cc_dst);
463 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
464 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
465 #endif
466 tcg_gen_mov_tl(dst, cpu_cc_dst);
469 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
471 tcg_gen_mov_tl(cpu_cc_src, src1);
472 tcg_gen_mov_tl(cpu_cc_src2, src2);
473 gen_mov_reg_C(cpu_tmp0, cpu_psr);
474 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
475 gen_cc_clear_icc();
476 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
477 #ifdef TARGET_SPARC64
478 gen_cc_clear_xcc();
479 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
480 #endif
481 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
482 gen_cc_NZ_icc(cpu_cc_dst);
483 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
484 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst);
487 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
488 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 #endif
490 tcg_gen_mov_tl(dst, cpu_cc_dst);
493 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
495 tcg_gen_mov_tl(cpu_cc_src, src1);
496 tcg_gen_mov_tl(cpu_cc_src2, src2);
497 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
498 gen_cc_clear_icc();
499 gen_cc_NZ_icc(cpu_cc_dst);
500 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
501 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
503 #ifdef TARGET_SPARC64
504 gen_cc_clear_xcc();
505 gen_cc_NZ_xcc(cpu_cc_dst);
506 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
507 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
508 #endif
509 tcg_gen_mov_tl(dst, cpu_cc_dst);
512 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
514 tcg_gen_mov_tl(cpu_cc_src, src1);
515 tcg_gen_mov_tl(cpu_cc_src2, src2);
516 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
519 gen_cc_clear_icc();
520 gen_cc_NZ_icc(cpu_cc_dst);
521 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
522 #ifdef TARGET_SPARC64
523 gen_cc_clear_xcc();
524 gen_cc_NZ_xcc(cpu_cc_dst);
525 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
526 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 #endif
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
531 /* old op:
532 if (src1 < T1)
533 env->psr |= PSR_CARRY;
535 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
537 TCGv r_temp1, r_temp2;
538 int l1;
540 l1 = gen_new_label();
541 r_temp1 = tcg_temp_new();
542 r_temp2 = tcg_temp_new();
543 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
544 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
545 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
546 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
547 gen_set_label(l1);
548 tcg_temp_free(r_temp1);
549 tcg_temp_free(r_temp2);
552 #ifdef TARGET_SPARC64
553 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
555 int l1;
557 l1 = gen_new_label();
558 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
559 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
560 gen_set_label(l1);
562 #endif
564 /* old op:
565 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
566 env->psr |= PSR_OVF;
568 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
570 TCGv r_temp;
572 r_temp = tcg_temp_new();
573 tcg_gen_xor_tl(r_temp, src1, src2);
574 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
575 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
576 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
577 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
578 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
579 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
580 tcg_temp_free(r_temp);
583 #ifdef TARGET_SPARC64
584 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
586 TCGv r_temp;
588 r_temp = tcg_temp_new();
589 tcg_gen_xor_tl(r_temp, src1, src2);
590 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
591 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
592 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
593 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
594 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
595 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
596 tcg_temp_free(r_temp);
598 #endif
600 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
602 TCGv r_temp;
603 TCGv_i32 r_const;
604 int l1;
606 l1 = gen_new_label();
608 r_temp = tcg_temp_new();
609 tcg_gen_xor_tl(r_temp, src1, src2);
610 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
611 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
612 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
613 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
614 r_const = tcg_const_i32(TT_TOVF);
615 gen_helper_raise_exception(r_const);
616 tcg_temp_free_i32(r_const);
617 gen_set_label(l1);
618 tcg_temp_free(r_temp);
621 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
623 tcg_gen_mov_tl(cpu_cc_src, src1);
624 tcg_gen_mov_tl(cpu_cc_src2, src2);
625 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
626 gen_cc_clear_icc();
627 gen_cc_NZ_icc(cpu_cc_dst);
628 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
629 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
630 #ifdef TARGET_SPARC64
631 gen_cc_clear_xcc();
632 gen_cc_NZ_xcc(cpu_cc_dst);
633 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
634 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
635 #endif
636 tcg_gen_mov_tl(dst, cpu_cc_dst);
639 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
641 tcg_gen_mov_tl(cpu_cc_src, src1);
642 tcg_gen_mov_tl(cpu_cc_src2, src2);
643 gen_mov_reg_C(cpu_tmp0, cpu_psr);
644 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
645 gen_cc_clear_icc();
646 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
647 #ifdef TARGET_SPARC64
648 gen_cc_clear_xcc();
649 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
650 #endif
651 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
652 gen_cc_NZ_icc(cpu_cc_dst);
653 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
654 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 #ifdef TARGET_SPARC64
656 gen_cc_NZ_xcc(cpu_cc_dst);
657 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
658 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 #endif
660 tcg_gen_mov_tl(dst, cpu_cc_dst);
663 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
665 tcg_gen_mov_tl(cpu_cc_src, src1);
666 tcg_gen_mov_tl(cpu_cc_src2, src2);
667 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
668 gen_cc_clear_icc();
669 gen_cc_NZ_icc(cpu_cc_dst);
670 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
671 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
672 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
673 #ifdef TARGET_SPARC64
674 gen_cc_clear_xcc();
675 gen_cc_NZ_xcc(cpu_cc_dst);
676 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
677 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
678 #endif
679 tcg_gen_mov_tl(dst, cpu_cc_dst);
682 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
684 tcg_gen_mov_tl(cpu_cc_src, src1);
685 tcg_gen_mov_tl(cpu_cc_src2, src2);
686 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
687 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
688 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
689 gen_cc_clear_icc();
690 gen_cc_NZ_icc(cpu_cc_dst);
691 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
692 #ifdef TARGET_SPARC64
693 gen_cc_clear_xcc();
694 gen_cc_NZ_xcc(cpu_cc_dst);
695 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
696 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
697 #endif
698 tcg_gen_mov_tl(dst, cpu_cc_dst);
701 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
703 TCGv r_temp;
704 int l1;
706 l1 = gen_new_label();
707 r_temp = tcg_temp_new();
709 /* old op:
710 if (!(env->y & 1))
711 T1 = 0;
713 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
714 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
715 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
716 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
717 tcg_gen_movi_tl(cpu_cc_src2, 0);
718 gen_set_label(l1);
720 // b2 = T0 & 1;
721 // env->y = (b2 << 31) | (env->y >> 1);
722 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
723 tcg_gen_shli_tl(r_temp, r_temp, 31);
724 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
725 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
726 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
727 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
729 // b1 = N ^ V;
730 gen_mov_reg_N(cpu_tmp0, cpu_psr);
731 gen_mov_reg_V(r_temp, cpu_psr);
732 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
733 tcg_temp_free(r_temp);
735 // T0 = (b1 << 31) | (T0 >> 1);
736 // src1 = T0;
737 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
738 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
739 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
744 gen_cc_clear_icc();
745 gen_cc_NZ_icc(cpu_cc_dst);
746 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
748 tcg_gen_mov_tl(dst, cpu_cc_dst);
751 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 TCGv_i64 r_temp, r_temp2;
755 r_temp = tcg_temp_new_i64();
756 r_temp2 = tcg_temp_new_i64();
758 tcg_gen_extu_tl_i64(r_temp, src2);
759 tcg_gen_extu_tl_i64(r_temp2, src1);
760 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
762 tcg_gen_shri_i64(r_temp, r_temp2, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
764 tcg_temp_free_i64(r_temp);
765 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst, r_temp2);
768 #else
769 tcg_gen_trunc_i64_tl(dst, r_temp2);
770 #endif
771 tcg_temp_free_i64(r_temp2);
774 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
776 TCGv_i64 r_temp, r_temp2;
778 r_temp = tcg_temp_new_i64();
779 r_temp2 = tcg_temp_new_i64();
781 tcg_gen_ext_tl_i64(r_temp, src2);
782 tcg_gen_ext_tl_i64(r_temp2, src1);
783 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
785 tcg_gen_shri_i64(r_temp, r_temp2, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
787 tcg_temp_free_i64(r_temp);
788 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst, r_temp2);
791 #else
792 tcg_gen_trunc_i64_tl(dst, r_temp2);
793 #endif
794 tcg_temp_free_i64(r_temp2);
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
800 TCGv_i32 r_const;
801 int l1;
803 l1 = gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
805 r_const = tcg_const_i32(TT_DIV_ZERO);
806 gen_helper_raise_exception(r_const);
807 tcg_temp_free_i32(r_const);
808 gen_set_label(l1);
811 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
813 int l1, l2;
815 l1 = gen_new_label();
816 l2 = gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src, src1);
818 tcg_gen_mov_tl(cpu_cc_src2, src2);
819 gen_trap_ifdivzero_tl(cpu_cc_src2);
820 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
821 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
822 tcg_gen_movi_i64(dst, INT64_MIN);
823 tcg_gen_br(l2);
824 gen_set_label(l1);
825 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
826 gen_set_label(l2);
828 #endif
830 static inline void gen_op_div_cc(TCGv dst)
832 int l1;
834 tcg_gen_mov_tl(cpu_cc_dst, dst);
835 gen_cc_clear_icc();
836 gen_cc_NZ_icc(cpu_cc_dst);
837 l1 = gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
839 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
840 gen_set_label(l1);
843 static inline void gen_op_logic_cc(TCGv dst)
845 tcg_gen_mov_tl(cpu_cc_dst, dst);
847 gen_cc_clear_icc();
848 gen_cc_NZ_icc(cpu_cc_dst);
849 #ifdef TARGET_SPARC64
850 gen_cc_clear_xcc();
851 gen_cc_NZ_xcc(cpu_cc_dst);
852 #endif
855 // 1
856 static inline void gen_op_eval_ba(TCGv dst)
858 tcg_gen_movi_tl(dst, 1);
861 // Z
862 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
864 gen_mov_reg_Z(dst, src);
867 // Z | (N ^ V)
868 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
870 gen_mov_reg_N(cpu_tmp0, src);
871 gen_mov_reg_V(dst, src);
872 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 gen_mov_reg_Z(cpu_tmp0, src);
874 tcg_gen_or_tl(dst, dst, cpu_tmp0);
877 // N ^ V
878 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
880 gen_mov_reg_V(cpu_tmp0, src);
881 gen_mov_reg_N(dst, src);
882 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
885 // C | Z
886 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
888 gen_mov_reg_Z(cpu_tmp0, src);
889 gen_mov_reg_C(dst, src);
890 tcg_gen_or_tl(dst, dst, cpu_tmp0);
893 // C
894 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
896 gen_mov_reg_C(dst, src);
899 // V
900 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
902 gen_mov_reg_V(dst, src);
905 // 0
906 static inline void gen_op_eval_bn(TCGv dst)
908 tcg_gen_movi_tl(dst, 0);
911 // N
912 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
914 gen_mov_reg_N(dst, src);
917 // !Z
918 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
920 gen_mov_reg_Z(dst, src);
921 tcg_gen_xori_tl(dst, dst, 0x1);
924 // !(Z | (N ^ V))
925 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
927 gen_mov_reg_N(cpu_tmp0, src);
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930 gen_mov_reg_Z(cpu_tmp0, src);
931 tcg_gen_or_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
935 // !(N ^ V)
936 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
938 gen_mov_reg_V(cpu_tmp0, src);
939 gen_mov_reg_N(dst, src);
940 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
941 tcg_gen_xori_tl(dst, dst, 0x1);
944 // !(C | Z)
945 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
947 gen_mov_reg_Z(cpu_tmp0, src);
948 gen_mov_reg_C(dst, src);
949 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 tcg_gen_xori_tl(dst, dst, 0x1);
953 // !C
954 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
956 gen_mov_reg_C(dst, src);
957 tcg_gen_xori_tl(dst, dst, 0x1);
960 // !N
961 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
963 gen_mov_reg_N(dst, src);
964 tcg_gen_xori_tl(dst, dst, 0x1);
967 // !V
968 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
970 gen_mov_reg_V(dst, src);
971 tcg_gen_xori_tl(dst, dst, 0x1);
975 FPSR bit field FCC1 | FCC0:
979 3 unordered
981 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
982 unsigned int fcc_offset)
984 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
985 tcg_gen_andi_tl(reg, reg, 0x1);
988 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
989 unsigned int fcc_offset)
991 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
992 tcg_gen_andi_tl(reg, reg, 0x1);
995 // !0: FCC0 | FCC1
996 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1006 unsigned int fcc_offset)
1008 gen_mov_reg_FCC0(dst, src, fcc_offset);
1009 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1010 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1013 // 1 or 3: FCC0
1014 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1020 // 1: FCC0 & !FCC1
1021 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1022 unsigned int fcc_offset)
1024 gen_mov_reg_FCC0(dst, src, fcc_offset);
1025 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1026 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1027 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1030 // 2 or 3: FCC1
1031 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1032 unsigned int fcc_offset)
1034 gen_mov_reg_FCC1(dst, src, fcc_offset);
1037 // 2: !FCC0 & FCC1
1038 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1039 unsigned int fcc_offset)
1041 gen_mov_reg_FCC0(dst, src, fcc_offset);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1044 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1047 // 3: FCC0 & FCC1
1048 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1049 unsigned int fcc_offset)
1051 gen_mov_reg_FCC0(dst, src, fcc_offset);
1052 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1053 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1058 unsigned int fcc_offset)
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1062 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1063 tcg_gen_xori_tl(dst, dst, 0x1);
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1073 tcg_gen_xori_tl(dst, dst, 0x1);
1076 // 0 or 2: !FCC0
1077 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1089 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1090 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1091 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1092 tcg_gen_xori_tl(dst, dst, 0x1);
1095 // 0 or 1: !FCC1
1096 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1097 unsigned int fcc_offset)
1099 gen_mov_reg_FCC1(dst, src, fcc_offset);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 tcg_gen_xori_tl(dst, dst, 0x1);
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1116 unsigned int fcc_offset)
1118 gen_mov_reg_FCC0(dst, src, fcc_offset);
1119 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1120 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1121 tcg_gen_xori_tl(dst, dst, 0x1);
1124 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1125 target_ulong pc2, TCGv r_cond)
1127 int l1;
1129 l1 = gen_new_label();
1131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1133 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1135 gen_set_label(l1);
1136 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1139 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1140 target_ulong pc2, TCGv r_cond)
1142 int l1;
1144 l1 = gen_new_label();
1146 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1148 gen_goto_tb(dc, 0, pc2, pc1);
1150 gen_set_label(l1);
1151 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1154 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1155 TCGv r_cond)
1157 int l1, l2;
1159 l1 = gen_new_label();
1160 l2 = gen_new_label();
1162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1164 tcg_gen_movi_tl(cpu_npc, npc1);
1165 tcg_gen_br(l2);
1167 gen_set_label(l1);
1168 tcg_gen_movi_tl(cpu_npc, npc2);
1169 gen_set_label(l2);
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext *dc, TCGv cond)
1176 if (dc->npc == JUMP_PC) {
1177 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1178 dc->npc = DYNAMIC_PC;
1182 static inline void save_npc(DisasContext *dc, TCGv cond)
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186 dc->npc = DYNAMIC_PC;
1187 } else if (dc->npc != DYNAMIC_PC) {
1188 tcg_gen_movi_tl(cpu_npc, dc->npc);
1192 static inline void save_state(DisasContext *dc, TCGv cond)
1194 tcg_gen_movi_tl(cpu_pc, dc->pc);
1195 save_npc(dc, cond);
1198 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1200 if (dc->npc == JUMP_PC) {
1201 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1202 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1203 dc->pc = DYNAMIC_PC;
1204 } else if (dc->npc == DYNAMIC_PC) {
1205 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1206 dc->pc = DYNAMIC_PC;
1207 } else {
1208 dc->pc = dc->npc;
1212 static inline void gen_op_next_insn(void)
1214 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1215 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1218 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1220 TCGv_i32 r_src;
1222 #ifdef TARGET_SPARC64
1223 if (cc)
1224 r_src = cpu_xcc;
1225 else
1226 r_src = cpu_psr;
1227 #else
1228 r_src = cpu_psr;
1229 #endif
1230 switch (cond) {
1231 case 0x0:
1232 gen_op_eval_bn(r_dst);
1233 break;
1234 case 0x1:
1235 gen_op_eval_be(r_dst, r_src);
1236 break;
1237 case 0x2:
1238 gen_op_eval_ble(r_dst, r_src);
1239 break;
1240 case 0x3:
1241 gen_op_eval_bl(r_dst, r_src);
1242 break;
1243 case 0x4:
1244 gen_op_eval_bleu(r_dst, r_src);
1245 break;
1246 case 0x5:
1247 gen_op_eval_bcs(r_dst, r_src);
1248 break;
1249 case 0x6:
1250 gen_op_eval_bneg(r_dst, r_src);
1251 break;
1252 case 0x7:
1253 gen_op_eval_bvs(r_dst, r_src);
1254 break;
1255 case 0x8:
1256 gen_op_eval_ba(r_dst);
1257 break;
1258 case 0x9:
1259 gen_op_eval_bne(r_dst, r_src);
1260 break;
1261 case 0xa:
1262 gen_op_eval_bg(r_dst, r_src);
1263 break;
1264 case 0xb:
1265 gen_op_eval_bge(r_dst, r_src);
1266 break;
1267 case 0xc:
1268 gen_op_eval_bgu(r_dst, r_src);
1269 break;
1270 case 0xd:
1271 gen_op_eval_bcc(r_dst, r_src);
1272 break;
1273 case 0xe:
1274 gen_op_eval_bpos(r_dst, r_src);
1275 break;
1276 case 0xf:
1277 gen_op_eval_bvc(r_dst, r_src);
1278 break;
1282 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1284 unsigned int offset;
1286 switch (cc) {
1287 default:
1288 case 0x0:
1289 offset = 0;
1290 break;
1291 case 0x1:
1292 offset = 32 - 10;
1293 break;
1294 case 0x2:
1295 offset = 34 - 10;
1296 break;
1297 case 0x3:
1298 offset = 36 - 10;
1299 break;
1302 switch (cond) {
1303 case 0x0:
1304 gen_op_eval_bn(r_dst);
1305 break;
1306 case 0x1:
1307 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0x2:
1310 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0x3:
1313 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0x4:
1316 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0x5:
1319 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0x6:
1322 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0x7:
1325 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1326 break;
1327 case 0x8:
1328 gen_op_eval_ba(r_dst);
1329 break;
1330 case 0x9:
1331 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1332 break;
1333 case 0xa:
1334 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1335 break;
1336 case 0xb:
1337 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1338 break;
1339 case 0xc:
1340 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1341 break;
1342 case 0xd:
1343 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1344 break;
1345 case 0xe:
1346 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1347 break;
1348 case 0xf:
1349 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1350 break;
1354 #ifdef TARGET_SPARC64
1355 // Inverted logic
1356 static const int gen_tcg_cond_reg[8] = {
1358 TCG_COND_NE,
1359 TCG_COND_GT,
1360 TCG_COND_GE,
1362 TCG_COND_EQ,
1363 TCG_COND_LE,
1364 TCG_COND_LT,
1367 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1369 int l1;
1371 l1 = gen_new_label();
1372 tcg_gen_movi_tl(r_dst, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1374 tcg_gen_movi_tl(r_dst, 1);
1375 gen_set_label(l1);
1377 #endif
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1381 TCGv r_cond)
1383 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1386 if (cond == 0x0) {
1387 /* unconditional not taken */
1388 if (a) {
1389 dc->pc = dc->npc + 4;
1390 dc->npc = dc->pc + 4;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->npc = dc->pc + 4;
1395 } else if (cond == 0x8) {
1396 /* unconditional taken */
1397 if (a) {
1398 dc->pc = target;
1399 dc->npc = dc->pc + 4;
1400 } else {
1401 dc->pc = dc->npc;
1402 dc->npc = target;
1404 } else {
1405 flush_cond(dc, r_cond);
1406 gen_cond(r_cond, cc, cond);
1407 if (a) {
1408 gen_branch_a(dc, target, dc->npc, r_cond);
1409 dc->is_br = 1;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->jump_pc[0] = target;
1413 dc->jump_pc[1] = dc->npc + 4;
1414 dc->npc = JUMP_PC;
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1421 TCGv r_cond)
1423 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1424 target_ulong target = dc->pc + offset;
1426 if (cond == 0x0) {
1427 /* unconditional not taken */
1428 if (a) {
1429 dc->pc = dc->npc + 4;
1430 dc->npc = dc->pc + 4;
1431 } else {
1432 dc->pc = dc->npc;
1433 dc->npc = dc->pc + 4;
1435 } else if (cond == 0x8) {
1436 /* unconditional taken */
1437 if (a) {
1438 dc->pc = target;
1439 dc->npc = dc->pc + 4;
1440 } else {
1441 dc->pc = dc->npc;
1442 dc->npc = target;
1444 } else {
1445 flush_cond(dc, r_cond);
1446 gen_fcond(r_cond, cc, cond);
1447 if (a) {
1448 gen_branch_a(dc, target, dc->npc, r_cond);
1449 dc->is_br = 1;
1450 } else {
1451 dc->pc = dc->npc;
1452 dc->jump_pc[0] = target;
1453 dc->jump_pc[1] = dc->npc + 4;
1454 dc->npc = JUMP_PC;
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1462 TCGv r_cond, TCGv r_reg)
1464 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1467 flush_cond(dc, r_cond);
1468 gen_cond_reg(r_cond, cond, r_reg);
1469 if (a) {
1470 gen_branch_a(dc, target, dc->npc, r_cond);
1471 dc->is_br = 1;
1472 } else {
1473 dc->pc = dc->npc;
1474 dc->jump_pc[0] = target;
1475 dc->jump_pc[1] = dc->npc + 4;
1476 dc->npc = JUMP_PC;
1480 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1482 switch (fccno) {
1483 case 0:
1484 gen_helper_fcmps(r_rs1, r_rs2);
1485 break;
1486 case 1:
1487 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1488 break;
1489 case 2:
1490 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1491 break;
1492 case 3:
1493 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1494 break;
1498 static inline void gen_op_fcmpd(int fccno)
1500 switch (fccno) {
1501 case 0:
1502 gen_helper_fcmpd();
1503 break;
1504 case 1:
1505 gen_helper_fcmpd_fcc1();
1506 break;
1507 case 2:
1508 gen_helper_fcmpd_fcc2();
1509 break;
1510 case 3:
1511 gen_helper_fcmpd_fcc3();
1512 break;
1516 static inline void gen_op_fcmpq(int fccno)
1518 switch (fccno) {
1519 case 0:
1520 gen_helper_fcmpq();
1521 break;
1522 case 1:
1523 gen_helper_fcmpq_fcc1();
1524 break;
1525 case 2:
1526 gen_helper_fcmpq_fcc2();
1527 break;
1528 case 3:
1529 gen_helper_fcmpq_fcc3();
1530 break;
1534 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1536 switch (fccno) {
1537 case 0:
1538 gen_helper_fcmpes(r_rs1, r_rs2);
1539 break;
1540 case 1:
1541 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1542 break;
1543 case 2:
1544 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1545 break;
1546 case 3:
1547 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1548 break;
1552 static inline void gen_op_fcmped(int fccno)
1554 switch (fccno) {
1555 case 0:
1556 gen_helper_fcmped();
1557 break;
1558 case 1:
1559 gen_helper_fcmped_fcc1();
1560 break;
1561 case 2:
1562 gen_helper_fcmped_fcc2();
1563 break;
1564 case 3:
1565 gen_helper_fcmped_fcc3();
1566 break;
1570 static inline void gen_op_fcmpeq(int fccno)
1572 switch (fccno) {
1573 case 0:
1574 gen_helper_fcmpeq();
1575 break;
1576 case 1:
1577 gen_helper_fcmpeq_fcc1();
1578 break;
1579 case 2:
1580 gen_helper_fcmpeq_fcc2();
1581 break;
1582 case 3:
1583 gen_helper_fcmpeq_fcc3();
1584 break;
1588 #else
1590 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1592 gen_helper_fcmps(r_rs1, r_rs2);
1595 static inline void gen_op_fcmpd(int fccno)
1597 gen_helper_fcmpd();
1600 static inline void gen_op_fcmpq(int fccno)
1602 gen_helper_fcmpq();
1605 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1607 gen_helper_fcmpes(r_rs1, r_rs2);
1610 static inline void gen_op_fcmped(int fccno)
1612 gen_helper_fcmped();
1615 static inline void gen_op_fcmpeq(int fccno)
1617 gen_helper_fcmpeq();
1619 #endif
1621 static inline void gen_op_fpexception_im(int fsr_flags)
1623 TCGv_i32 r_const;
1625 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1626 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1627 r_const = tcg_const_i32(TT_FP_EXCP);
1628 gen_helper_raise_exception(r_const);
1629 tcg_temp_free_i32(r_const);
1632 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1634 #if !defined(CONFIG_USER_ONLY)
1635 if (!dc->fpu_enabled) {
1636 TCGv_i32 r_const;
1638 save_state(dc, r_cond);
1639 r_const = tcg_const_i32(TT_NFPU_INSN);
1640 gen_helper_raise_exception(r_const);
1641 tcg_temp_free_i32(r_const);
1642 dc->is_br = 1;
1643 return 1;
1645 #endif
1646 return 0;
1649 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1651 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1654 static inline void gen_clear_float_exceptions(void)
1656 gen_helper_clear_float_exceptions();
1659 /* asi moves */
1660 #ifdef TARGET_SPARC64
1661 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1663 int asi;
1664 TCGv_i32 r_asi;
1666 if (IS_IMM) {
1667 r_asi = tcg_temp_new_i32();
1668 tcg_gen_mov_i32(r_asi, cpu_asi);
1669 } else {
1670 asi = GET_FIELD(insn, 19, 26);
1671 r_asi = tcg_const_i32(asi);
1673 return r_asi;
1676 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1677 int sign)
1679 TCGv_i32 r_asi, r_size, r_sign;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_sign = tcg_const_i32(sign);
1684 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1685 tcg_temp_free_i32(r_sign);
1686 tcg_temp_free_i32(r_size);
1687 tcg_temp_free_i32(r_asi);
1690 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1692 TCGv_i32 r_asi, r_size;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(size);
1696 gen_helper_st_asi(addr, src, r_asi, r_size);
1697 tcg_temp_free_i32(r_size);
1698 tcg_temp_free_i32(r_asi);
1701 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1703 TCGv_i32 r_asi, r_size, r_rd;
1705 r_asi = gen_get_asi(insn, addr);
1706 r_size = tcg_const_i32(size);
1707 r_rd = tcg_const_i32(rd);
1708 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1709 tcg_temp_free_i32(r_rd);
1710 tcg_temp_free_i32(r_size);
1711 tcg_temp_free_i32(r_asi);
1714 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1716 TCGv_i32 r_asi, r_size, r_rd;
1718 r_asi = gen_get_asi(insn, addr);
1719 r_size = tcg_const_i32(size);
1720 r_rd = tcg_const_i32(rd);
1721 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1722 tcg_temp_free_i32(r_rd);
1723 tcg_temp_free_i32(r_size);
1724 tcg_temp_free_i32(r_asi);
1727 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1729 TCGv_i32 r_asi, r_size, r_sign;
1731 r_asi = gen_get_asi(insn, addr);
1732 r_size = tcg_const_i32(4);
1733 r_sign = tcg_const_i32(0);
1734 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1735 tcg_temp_free_i32(r_sign);
1736 gen_helper_st_asi(addr, dst, r_asi, r_size);
1737 tcg_temp_free_i32(r_size);
1738 tcg_temp_free_i32(r_asi);
1739 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1742 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1744 TCGv_i32 r_asi, r_rd;
1746 r_asi = gen_get_asi(insn, addr);
1747 r_rd = tcg_const_i32(rd);
1748 gen_helper_ldda_asi(addr, r_asi, r_rd);
1749 tcg_temp_free_i32(r_rd);
1750 tcg_temp_free_i32(r_asi);
1753 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1755 TCGv_i32 r_asi, r_size;
1757 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1758 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1759 r_asi = gen_get_asi(insn, addr);
1760 r_size = tcg_const_i32(8);
1761 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1762 tcg_temp_free_i32(r_size);
1763 tcg_temp_free_i32(r_asi);
1766 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1767 int rd)
1769 TCGv r_val1;
1770 TCGv_i32 r_asi;
1772 r_val1 = tcg_temp_new();
1773 gen_movl_reg_TN(rd, r_val1);
1774 r_asi = gen_get_asi(insn, addr);
1775 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1776 tcg_temp_free_i32(r_asi);
1777 tcg_temp_free(r_val1);
1780 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1781 int rd)
1783 TCGv_i32 r_asi;
1785 gen_movl_reg_TN(rd, cpu_tmp64);
1786 r_asi = gen_get_asi(insn, addr);
1787 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1788 tcg_temp_free_i32(r_asi);
1791 #elif !defined(CONFIG_USER_ONLY)
1793 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1794 int sign)
1796 TCGv_i32 r_asi, r_size, r_sign;
1798 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1799 r_size = tcg_const_i32(size);
1800 r_sign = tcg_const_i32(sign);
1801 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1802 tcg_temp_free(r_sign);
1803 tcg_temp_free(r_size);
1804 tcg_temp_free(r_asi);
1805 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1808 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1810 TCGv_i32 r_asi, r_size;
1812 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1813 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1814 r_size = tcg_const_i32(size);
1815 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1816 tcg_temp_free(r_size);
1817 tcg_temp_free(r_asi);
1820 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1822 TCGv_i32 r_asi, r_size, r_sign;
1823 TCGv_i64 r_val;
1825 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1826 r_size = tcg_const_i32(4);
1827 r_sign = tcg_const_i32(0);
1828 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1829 tcg_temp_free(r_sign);
1830 r_val = tcg_temp_new_i64();
1831 tcg_gen_extu_tl_i64(r_val, dst);
1832 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1833 tcg_temp_free_i64(r_val);
1834 tcg_temp_free(r_size);
1835 tcg_temp_free(r_asi);
1836 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1839 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1841 TCGv_i32 r_asi, r_size, r_sign;
1843 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1844 r_size = tcg_const_i32(8);
1845 r_sign = tcg_const_i32(0);
1846 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1847 tcg_temp_free(r_sign);
1848 tcg_temp_free(r_size);
1849 tcg_temp_free(r_asi);
1850 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1851 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1852 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1853 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1854 gen_movl_TN_reg(rd, hi);
1857 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1859 TCGv_i32 r_asi, r_size;
1861 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1862 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1863 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1864 r_size = tcg_const_i32(8);
1865 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1866 tcg_temp_free(r_size);
1867 tcg_temp_free(r_asi);
1869 #endif
1871 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1872 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1874 TCGv_i64 r_val;
1875 TCGv_i32 r_asi, r_size;
1877 gen_ld_asi(dst, addr, insn, 1, 0);
1879 r_val = tcg_const_i64(0xffULL);
1880 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1881 r_size = tcg_const_i32(1);
1882 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1883 tcg_temp_free_i32(r_size);
1884 tcg_temp_free_i32(r_asi);
1885 tcg_temp_free_i64(r_val);
1887 #endif
1889 static inline TCGv get_src1(unsigned int insn, TCGv def)
1891 TCGv r_rs1 = def;
1892 unsigned int rs1;
1894 rs1 = GET_FIELD(insn, 13, 17);
1895 if (rs1 == 0)
1896 r_rs1 = tcg_const_tl(0); // XXX how to free?
1897 else if (rs1 < 8)
1898 r_rs1 = cpu_gregs[rs1];
1899 else
1900 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1901 return r_rs1;
1904 static inline TCGv get_src2(unsigned int insn, TCGv def)
1906 TCGv r_rs2 = def;
1907 unsigned int rs2;
1909 if (IS_IMM) { /* immediate */
1910 rs2 = GET_FIELDs(insn, 19, 31);
1911 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1912 } else { /* register */
1913 rs2 = GET_FIELD(insn, 27, 31);
1914 if (rs2 == 0)
1915 r_rs2 = tcg_const_tl(0); // XXX how to free?
1916 else if (rs2 < 8)
1917 r_rs2 = cpu_gregs[rs2];
1918 else
1919 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1921 return r_rs2;
1924 #define CHECK_IU_FEATURE(dc, FEATURE) \
1925 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1926 goto illegal_insn;
1927 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1928 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1929 goto nfpu_insn;
1931 /* before an instruction, dc->pc must be static */
1932 static void disas_sparc_insn(DisasContext * dc)
1934 unsigned int insn, opc, rs1, rs2, rd;
1936 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1937 tcg_gen_debug_insn_start(dc->pc);
1938 insn = ldl_code(dc->pc);
1939 opc = GET_FIELD(insn, 0, 1);
1941 rd = GET_FIELD(insn, 2, 6);
1943 cpu_src1 = tcg_temp_new(); // const
1944 cpu_src2 = tcg_temp_new(); // const
1946 switch (opc) {
1947 case 0: /* branches/sethi */
1949 unsigned int xop = GET_FIELD(insn, 7, 9);
1950 int32_t target;
1951 switch (xop) {
1952 #ifdef TARGET_SPARC64
1953 case 0x1: /* V9 BPcc */
1955 int cc;
1957 target = GET_FIELD_SP(insn, 0, 18);
1958 target = sign_extend(target, 18);
1959 target <<= 2;
1960 cc = GET_FIELD_SP(insn, 20, 21);
1961 if (cc == 0)
1962 do_branch(dc, target, insn, 0, cpu_cond);
1963 else if (cc == 2)
1964 do_branch(dc, target, insn, 1, cpu_cond);
1965 else
1966 goto illegal_insn;
1967 goto jmp_insn;
1969 case 0x3: /* V9 BPr */
1971 target = GET_FIELD_SP(insn, 0, 13) |
1972 (GET_FIELD_SP(insn, 20, 21) << 14);
1973 target = sign_extend(target, 16);
1974 target <<= 2;
1975 cpu_src1 = get_src1(insn, cpu_src1);
1976 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1977 goto jmp_insn;
1979 case 0x5: /* V9 FBPcc */
1981 int cc = GET_FIELD_SP(insn, 20, 21);
1982 if (gen_trap_ifnofpu(dc, cpu_cond))
1983 goto jmp_insn;
1984 target = GET_FIELD_SP(insn, 0, 18);
1985 target = sign_extend(target, 19);
1986 target <<= 2;
1987 do_fbranch(dc, target, insn, cc, cpu_cond);
1988 goto jmp_insn;
1990 #else
1991 case 0x7: /* CBN+x */
1993 goto ncp_insn;
1995 #endif
1996 case 0x2: /* BN+x */
1998 target = GET_FIELD(insn, 10, 31);
1999 target = sign_extend(target, 22);
2000 target <<= 2;
2001 do_branch(dc, target, insn, 0, cpu_cond);
2002 goto jmp_insn;
2004 case 0x6: /* FBN+x */
2006 if (gen_trap_ifnofpu(dc, cpu_cond))
2007 goto jmp_insn;
2008 target = GET_FIELD(insn, 10, 31);
2009 target = sign_extend(target, 22);
2010 target <<= 2;
2011 do_fbranch(dc, target, insn, 0, cpu_cond);
2012 goto jmp_insn;
2014 case 0x4: /* SETHI */
2015 if (rd) { // nop
2016 uint32_t value = GET_FIELD(insn, 10, 31);
2017 TCGv r_const;
2019 r_const = tcg_const_tl(value << 10);
2020 gen_movl_TN_reg(rd, r_const);
2021 tcg_temp_free(r_const);
2023 break;
2024 case 0x0: /* UNIMPL */
2025 default:
2026 goto illegal_insn;
2028 break;
2030 break;
2031 case 1:
2032 /*CALL*/ {
2033 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2034 TCGv r_const;
2036 r_const = tcg_const_tl(dc->pc);
2037 gen_movl_TN_reg(15, r_const);
2038 tcg_temp_free(r_const);
2039 target += dc->pc;
2040 gen_mov_pc_npc(dc, cpu_cond);
2041 dc->npc = target;
2043 goto jmp_insn;
2044 case 2: /* FPU & Logical Operations */
2046 unsigned int xop = GET_FIELD(insn, 7, 12);
2047 if (xop == 0x3a) { /* generate trap */
2048 int cond;
2050 cpu_src1 = get_src1(insn, cpu_src1);
2051 if (IS_IMM) {
2052 rs2 = GET_FIELD(insn, 25, 31);
2053 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2054 } else {
2055 rs2 = GET_FIELD(insn, 27, 31);
2056 if (rs2 != 0) {
2057 gen_movl_reg_TN(rs2, cpu_src2);
2058 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2059 } else
2060 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2062 cond = GET_FIELD(insn, 3, 6);
2063 if (cond == 0x8) {
2064 save_state(dc, cpu_cond);
2065 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2066 supervisor(dc))
2067 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2068 else
2069 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2070 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2071 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2072 gen_helper_raise_exception(cpu_tmp32);
2073 } else if (cond != 0) {
2074 TCGv r_cond = tcg_temp_new();
2075 int l1;
2076 #ifdef TARGET_SPARC64
2077 /* V9 icc/xcc */
2078 int cc = GET_FIELD_SP(insn, 11, 12);
2080 save_state(dc, cpu_cond);
2081 if (cc == 0)
2082 gen_cond(r_cond, 0, cond);
2083 else if (cc == 2)
2084 gen_cond(r_cond, 1, cond);
2085 else
2086 goto illegal_insn;
2087 #else
2088 save_state(dc, cpu_cond);
2089 gen_cond(r_cond, 0, cond);
2090 #endif
2091 l1 = gen_new_label();
2092 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2094 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2095 supervisor(dc))
2096 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2097 else
2098 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2099 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2100 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2101 gen_helper_raise_exception(cpu_tmp32);
2103 gen_set_label(l1);
2104 tcg_temp_free(r_cond);
2106 gen_op_next_insn();
2107 tcg_gen_exit_tb(0);
2108 dc->is_br = 1;
2109 goto jmp_insn;
2110 } else if (xop == 0x28) {
2111 rs1 = GET_FIELD(insn, 13, 17);
2112 switch(rs1) {
2113 case 0: /* rdy */
2114 #ifndef TARGET_SPARC64
2115 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2116 manual, rdy on the microSPARC
2117 II */
2118 case 0x0f: /* stbar in the SPARCv8 manual,
2119 rdy on the microSPARC II */
2120 case 0x10 ... 0x1f: /* implementation-dependent in the
2121 SPARCv8 manual, rdy on the
2122 microSPARC II */
2123 #endif
2124 gen_movl_TN_reg(rd, cpu_y);
2125 break;
2126 #ifdef TARGET_SPARC64
2127 case 0x2: /* V9 rdccr */
2128 gen_helper_rdccr(cpu_dst);
2129 gen_movl_TN_reg(rd, cpu_dst);
2130 break;
2131 case 0x3: /* V9 rdasi */
2132 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2133 gen_movl_TN_reg(rd, cpu_dst);
2134 break;
2135 case 0x4: /* V9 rdtick */
2137 TCGv_ptr r_tickptr;
2139 r_tickptr = tcg_temp_new_ptr();
2140 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2141 offsetof(CPUState, tick));
2142 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2143 tcg_temp_free_ptr(r_tickptr);
2144 gen_movl_TN_reg(rd, cpu_dst);
2146 break;
2147 case 0x5: /* V9 rdpc */
2149 TCGv r_const;
2151 r_const = tcg_const_tl(dc->pc);
2152 gen_movl_TN_reg(rd, r_const);
2153 tcg_temp_free(r_const);
2155 break;
2156 case 0x6: /* V9 rdfprs */
2157 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2158 gen_movl_TN_reg(rd, cpu_dst);
2159 break;
2160 case 0xf: /* V9 membar */
2161 break; /* no effect */
2162 case 0x13: /* Graphics Status */
2163 if (gen_trap_ifnofpu(dc, cpu_cond))
2164 goto jmp_insn;
2165 gen_movl_TN_reg(rd, cpu_gsr);
2166 break;
2167 case 0x16: /* Softint */
2168 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2169 gen_movl_TN_reg(rd, cpu_dst);
2170 break;
2171 case 0x17: /* Tick compare */
2172 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2173 break;
2174 case 0x18: /* System tick */
2176 TCGv_ptr r_tickptr;
2178 r_tickptr = tcg_temp_new_ptr();
2179 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2180 offsetof(CPUState, stick));
2181 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2182 tcg_temp_free_ptr(r_tickptr);
2183 gen_movl_TN_reg(rd, cpu_dst);
2185 break;
2186 case 0x19: /* System tick compare */
2187 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2188 break;
2189 case 0x10: /* Performance Control */
2190 case 0x11: /* Performance Instrumentation Counter */
2191 case 0x12: /* Dispatch Control */
2192 case 0x14: /* Softint set, WO */
2193 case 0x15: /* Softint clear, WO */
2194 #endif
2195 default:
2196 goto illegal_insn;
2198 #if !defined(CONFIG_USER_ONLY)
2199 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2200 #ifndef TARGET_SPARC64
2201 if (!supervisor(dc))
2202 goto priv_insn;
2203 gen_helper_rdpsr(cpu_dst);
2204 #else
2205 CHECK_IU_FEATURE(dc, HYPV);
2206 if (!hypervisor(dc))
2207 goto priv_insn;
2208 rs1 = GET_FIELD(insn, 13, 17);
2209 switch (rs1) {
2210 case 0: // hpstate
2211 // gen_op_rdhpstate();
2212 break;
2213 case 1: // htstate
2214 // gen_op_rdhtstate();
2215 break;
2216 case 3: // hintp
2217 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2218 break;
2219 case 5: // htba
2220 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2221 break;
2222 case 6: // hver
2223 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2224 break;
2225 case 31: // hstick_cmpr
2226 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2227 break;
2228 default:
2229 goto illegal_insn;
2231 #endif
2232 gen_movl_TN_reg(rd, cpu_dst);
2233 break;
2234 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2235 if (!supervisor(dc))
2236 goto priv_insn;
2237 #ifdef TARGET_SPARC64
2238 rs1 = GET_FIELD(insn, 13, 17);
2239 switch (rs1) {
2240 case 0: // tpc
2242 TCGv_ptr r_tsptr;
2244 r_tsptr = tcg_temp_new_ptr();
2245 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2246 offsetof(CPUState, tsptr));
2247 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2248 offsetof(trap_state, tpc));
2249 tcg_temp_free_ptr(r_tsptr);
2251 break;
2252 case 1: // tnpc
2254 TCGv_ptr r_tsptr;
2256 r_tsptr = tcg_temp_new_ptr();
2257 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2258 offsetof(CPUState, tsptr));
2259 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2260 offsetof(trap_state, tnpc));
2261 tcg_temp_free_ptr(r_tsptr);
2263 break;
2264 case 2: // tstate
2266 TCGv_ptr r_tsptr;
2268 r_tsptr = tcg_temp_new_ptr();
2269 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2270 offsetof(CPUState, tsptr));
2271 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2272 offsetof(trap_state, tstate));
2273 tcg_temp_free_ptr(r_tsptr);
2275 break;
2276 case 3: // tt
2278 TCGv_ptr r_tsptr;
2280 r_tsptr = tcg_temp_new_ptr();
2281 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2282 offsetof(CPUState, tsptr));
2283 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2284 offsetof(trap_state, tt));
2285 tcg_temp_free_ptr(r_tsptr);
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288 break;
2289 case 4: // tick
2291 TCGv_ptr r_tickptr;
2293 r_tickptr = tcg_temp_new_ptr();
2294 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2295 offsetof(CPUState, tick));
2296 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2297 gen_movl_TN_reg(rd, cpu_tmp0);
2298 tcg_temp_free_ptr(r_tickptr);
2300 break;
2301 case 5: // tba
2302 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2303 break;
2304 case 6: // pstate
2305 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2306 offsetof(CPUSPARCState, pstate));
2307 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2308 break;
2309 case 7: // tl
2310 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2311 offsetof(CPUSPARCState, tl));
2312 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2313 break;
2314 case 8: // pil
2315 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2316 offsetof(CPUSPARCState, psrpil));
2317 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2318 break;
2319 case 9: // cwp
2320 gen_helper_rdcwp(cpu_tmp0);
2321 break;
2322 case 10: // cansave
2323 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2324 offsetof(CPUSPARCState, cansave));
2325 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2326 break;
2327 case 11: // canrestore
2328 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2329 offsetof(CPUSPARCState, canrestore));
2330 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2331 break;
2332 case 12: // cleanwin
2333 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2334 offsetof(CPUSPARCState, cleanwin));
2335 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2336 break;
2337 case 13: // otherwin
2338 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2339 offsetof(CPUSPARCState, otherwin));
2340 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2341 break;
2342 case 14: // wstate
2343 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2344 offsetof(CPUSPARCState, wstate));
2345 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2346 break;
2347 case 16: // UA2005 gl
2348 CHECK_IU_FEATURE(dc, GL);
2349 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2350 offsetof(CPUSPARCState, gl));
2351 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2352 break;
2353 case 26: // UA2005 strand status
2354 CHECK_IU_FEATURE(dc, HYPV);
2355 if (!hypervisor(dc))
2356 goto priv_insn;
2357 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2358 break;
2359 case 31: // ver
2360 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2361 break;
2362 case 15: // fq
2363 default:
2364 goto illegal_insn;
2366 #else
2367 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2368 #endif
2369 gen_movl_TN_reg(rd, cpu_tmp0);
2370 break;
2371 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2372 #ifdef TARGET_SPARC64
2373 save_state(dc, cpu_cond);
2374 gen_helper_flushw();
2375 #else
2376 if (!supervisor(dc))
2377 goto priv_insn;
2378 gen_movl_TN_reg(rd, cpu_tbr);
2379 #endif
2380 break;
2381 #endif
2382 } else if (xop == 0x34) { /* FPU Operations */
2383 if (gen_trap_ifnofpu(dc, cpu_cond))
2384 goto jmp_insn;
2385 gen_op_clear_ieee_excp_and_FTT();
2386 rs1 = GET_FIELD(insn, 13, 17);
2387 rs2 = GET_FIELD(insn, 27, 31);
2388 xop = GET_FIELD(insn, 18, 26);
2389 switch (xop) {
2390 case 0x1: /* fmovs */
2391 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2392 break;
2393 case 0x5: /* fnegs */
2394 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2395 break;
2396 case 0x9: /* fabss */
2397 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2398 break;
2399 case 0x29: /* fsqrts */
2400 CHECK_FPU_FEATURE(dc, FSQRT);
2401 gen_clear_float_exceptions();
2402 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2403 gen_helper_check_ieee_exceptions();
2404 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2405 break;
2406 case 0x2a: /* fsqrtd */
2407 CHECK_FPU_FEATURE(dc, FSQRT);
2408 gen_op_load_fpr_DT1(DFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 gen_helper_fsqrtd();
2411 gen_helper_check_ieee_exceptions();
2412 gen_op_store_DT0_fpr(DFPREG(rd));
2413 break;
2414 case 0x2b: /* fsqrtq */
2415 CHECK_FPU_FEATURE(dc, FLOAT128);
2416 gen_op_load_fpr_QT1(QFPREG(rs2));
2417 gen_clear_float_exceptions();
2418 gen_helper_fsqrtq();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_QT0_fpr(QFPREG(rd));
2421 break;
2422 case 0x41: /* fadds */
2423 gen_clear_float_exceptions();
2424 gen_helper_fadds(cpu_tmp32,
2425 cpu_fpr[rs1], cpu_fpr[rs2]);
2426 gen_helper_check_ieee_exceptions();
2427 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2428 break;
2429 case 0x42:
2430 gen_op_load_fpr_DT0(DFPREG(rs1));
2431 gen_op_load_fpr_DT1(DFPREG(rs2));
2432 gen_clear_float_exceptions();
2433 gen_helper_faddd();
2434 gen_helper_check_ieee_exceptions();
2435 gen_op_store_DT0_fpr(DFPREG(rd));
2436 break;
2437 case 0x43: /* faddq */
2438 CHECK_FPU_FEATURE(dc, FLOAT128);
2439 gen_op_load_fpr_QT0(QFPREG(rs1));
2440 gen_op_load_fpr_QT1(QFPREG(rs2));
2441 gen_clear_float_exceptions();
2442 gen_helper_faddq();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_QT0_fpr(QFPREG(rd));
2445 break;
2446 case 0x45: /* fsubs */
2447 gen_clear_float_exceptions();
2448 gen_helper_fsubs(cpu_tmp32,
2449 cpu_fpr[rs1], cpu_fpr[rs2]);
2450 gen_helper_check_ieee_exceptions();
2451 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2452 break;
2453 case 0x46:
2454 gen_op_load_fpr_DT0(DFPREG(rs1));
2455 gen_op_load_fpr_DT1(DFPREG(rs2));
2456 gen_clear_float_exceptions();
2457 gen_helper_fsubd();
2458 gen_helper_check_ieee_exceptions();
2459 gen_op_store_DT0_fpr(DFPREG(rd));
2460 break;
2461 case 0x47: /* fsubq */
2462 CHECK_FPU_FEATURE(dc, FLOAT128);
2463 gen_op_load_fpr_QT0(QFPREG(rs1));
2464 gen_op_load_fpr_QT1(QFPREG(rs2));
2465 gen_clear_float_exceptions();
2466 gen_helper_fsubq();
2467 gen_helper_check_ieee_exceptions();
2468 gen_op_store_QT0_fpr(QFPREG(rd));
2469 break;
2470 case 0x49: /* fmuls */
2471 CHECK_FPU_FEATURE(dc, FMUL);
2472 gen_clear_float_exceptions();
2473 gen_helper_fmuls(cpu_tmp32,
2474 cpu_fpr[rs1], cpu_fpr[rs2]);
2475 gen_helper_check_ieee_exceptions();
2476 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2477 break;
2478 case 0x4a: /* fmuld */
2479 CHECK_FPU_FEATURE(dc, FMUL);
2480 gen_op_load_fpr_DT0(DFPREG(rs1));
2481 gen_op_load_fpr_DT1(DFPREG(rs2));
2482 gen_clear_float_exceptions();
2483 gen_helper_fmuld();
2484 gen_helper_check_ieee_exceptions();
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2486 break;
2487 case 0x4b: /* fmulq */
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 CHECK_FPU_FEATURE(dc, FMUL);
2490 gen_op_load_fpr_QT0(QFPREG(rs1));
2491 gen_op_load_fpr_QT1(QFPREG(rs2));
2492 gen_clear_float_exceptions();
2493 gen_helper_fmulq();
2494 gen_helper_check_ieee_exceptions();
2495 gen_op_store_QT0_fpr(QFPREG(rd));
2496 break;
2497 case 0x4d: /* fdivs */
2498 gen_clear_float_exceptions();
2499 gen_helper_fdivs(cpu_tmp32,
2500 cpu_fpr[rs1], cpu_fpr[rs2]);
2501 gen_helper_check_ieee_exceptions();
2502 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2503 break;
2504 case 0x4e:
2505 gen_op_load_fpr_DT0(DFPREG(rs1));
2506 gen_op_load_fpr_DT1(DFPREG(rs2));
2507 gen_clear_float_exceptions();
2508 gen_helper_fdivd();
2509 gen_helper_check_ieee_exceptions();
2510 gen_op_store_DT0_fpr(DFPREG(rd));
2511 break;
2512 case 0x4f: /* fdivq */
2513 CHECK_FPU_FEATURE(dc, FLOAT128);
2514 gen_op_load_fpr_QT0(QFPREG(rs1));
2515 gen_op_load_fpr_QT1(QFPREG(rs2));
2516 gen_clear_float_exceptions();
2517 gen_helper_fdivq();
2518 gen_helper_check_ieee_exceptions();
2519 gen_op_store_QT0_fpr(QFPREG(rd));
2520 break;
2521 case 0x69: /* fsmuld */
2522 CHECK_FPU_FEATURE(dc, FSMULD);
2523 gen_clear_float_exceptions();
2524 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2525 gen_helper_check_ieee_exceptions();
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2527 break;
2528 case 0x6e: /* fdmulq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_DT0(DFPREG(rs1));
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdmulq();
2534 gen_helper_check_ieee_exceptions();
2535 gen_op_store_QT0_fpr(QFPREG(rd));
2536 break;
2537 case 0xc4: /* fitos */
2538 gen_clear_float_exceptions();
2539 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2540 gen_helper_check_ieee_exceptions();
2541 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2542 break;
2543 case 0xc6: /* fdtos */
2544 gen_op_load_fpr_DT1(DFPREG(rs2));
2545 gen_clear_float_exceptions();
2546 gen_helper_fdtos(cpu_tmp32);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2549 break;
2550 case 0xc7: /* fqtos */
2551 CHECK_FPU_FEATURE(dc, FLOAT128);
2552 gen_op_load_fpr_QT1(QFPREG(rs2));
2553 gen_clear_float_exceptions();
2554 gen_helper_fqtos(cpu_tmp32);
2555 gen_helper_check_ieee_exceptions();
2556 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2557 break;
2558 case 0xc8: /* fitod */
2559 gen_helper_fitod(cpu_fpr[rs2]);
2560 gen_op_store_DT0_fpr(DFPREG(rd));
2561 break;
2562 case 0xc9: /* fstod */
2563 gen_helper_fstod(cpu_fpr[rs2]);
2564 gen_op_store_DT0_fpr(DFPREG(rd));
2565 break;
2566 case 0xcb: /* fqtod */
2567 CHECK_FPU_FEATURE(dc, FLOAT128);
2568 gen_op_load_fpr_QT1(QFPREG(rs2));
2569 gen_clear_float_exceptions();
2570 gen_helper_fqtod();
2571 gen_helper_check_ieee_exceptions();
2572 gen_op_store_DT0_fpr(DFPREG(rd));
2573 break;
2574 case 0xcc: /* fitoq */
2575 CHECK_FPU_FEATURE(dc, FLOAT128);
2576 gen_helper_fitoq(cpu_fpr[rs2]);
2577 gen_op_store_QT0_fpr(QFPREG(rd));
2578 break;
2579 case 0xcd: /* fstoq */
2580 CHECK_FPU_FEATURE(dc, FLOAT128);
2581 gen_helper_fstoq(cpu_fpr[rs2]);
2582 gen_op_store_QT0_fpr(QFPREG(rd));
2583 break;
2584 case 0xce: /* fdtoq */
2585 CHECK_FPU_FEATURE(dc, FLOAT128);
2586 gen_op_load_fpr_DT1(DFPREG(rs2));
2587 gen_helper_fdtoq();
2588 gen_op_store_QT0_fpr(QFPREG(rd));
2589 break;
2590 case 0xd1: /* fstoi */
2591 gen_clear_float_exceptions();
2592 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2593 gen_helper_check_ieee_exceptions();
2594 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2595 break;
2596 case 0xd2: /* fdtoi */
2597 gen_op_load_fpr_DT1(DFPREG(rs2));
2598 gen_clear_float_exceptions();
2599 gen_helper_fdtoi(cpu_tmp32);
2600 gen_helper_check_ieee_exceptions();
2601 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2602 break;
2603 case 0xd3: /* fqtoi */
2604 CHECK_FPU_FEATURE(dc, FLOAT128);
2605 gen_op_load_fpr_QT1(QFPREG(rs2));
2606 gen_clear_float_exceptions();
2607 gen_helper_fqtoi(cpu_tmp32);
2608 gen_helper_check_ieee_exceptions();
2609 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2610 break;
2611 #ifdef TARGET_SPARC64
2612 case 0x2: /* V9 fmovd */
2613 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2614 cpu_fpr[DFPREG(rs2)]);
2615 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2616 cpu_fpr[DFPREG(rs2) + 1]);
2617 break;
2618 case 0x3: /* V9 fmovq */
2619 CHECK_FPU_FEATURE(dc, FLOAT128);
2620 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2621 cpu_fpr[QFPREG(rs2)]);
2622 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2623 cpu_fpr[QFPREG(rs2) + 1]);
2624 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2625 cpu_fpr[QFPREG(rs2) + 2]);
2626 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2627 cpu_fpr[QFPREG(rs2) + 3]);
2628 break;
2629 case 0x6: /* V9 fnegd */
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_helper_fnegd();
2632 gen_op_store_DT0_fpr(DFPREG(rd));
2633 break;
2634 case 0x7: /* V9 fnegq */
2635 CHECK_FPU_FEATURE(dc, FLOAT128);
2636 gen_op_load_fpr_QT1(QFPREG(rs2));
2637 gen_helper_fnegq();
2638 gen_op_store_QT0_fpr(QFPREG(rd));
2639 break;
2640 case 0xa: /* V9 fabsd */
2641 gen_op_load_fpr_DT1(DFPREG(rs2));
2642 gen_helper_fabsd();
2643 gen_op_store_DT0_fpr(DFPREG(rd));
2644 break;
2645 case 0xb: /* V9 fabsq */
2646 CHECK_FPU_FEATURE(dc, FLOAT128);
2647 gen_op_load_fpr_QT1(QFPREG(rs2));
2648 gen_helper_fabsq();
2649 gen_op_store_QT0_fpr(QFPREG(rd));
2650 break;
2651 case 0x81: /* V9 fstox */
2652 gen_clear_float_exceptions();
2653 gen_helper_fstox(cpu_fpr[rs2]);
2654 gen_helper_check_ieee_exceptions();
2655 gen_op_store_DT0_fpr(DFPREG(rd));
2656 break;
2657 case 0x82: /* V9 fdtox */
2658 gen_op_load_fpr_DT1(DFPREG(rs2));
2659 gen_clear_float_exceptions();
2660 gen_helper_fdtox();
2661 gen_helper_check_ieee_exceptions();
2662 gen_op_store_DT0_fpr(DFPREG(rd));
2663 break;
2664 case 0x83: /* V9 fqtox */
2665 CHECK_FPU_FEATURE(dc, FLOAT128);
2666 gen_op_load_fpr_QT1(QFPREG(rs2));
2667 gen_clear_float_exceptions();
2668 gen_helper_fqtox();
2669 gen_helper_check_ieee_exceptions();
2670 gen_op_store_DT0_fpr(DFPREG(rd));
2671 break;
2672 case 0x84: /* V9 fxtos */
2673 gen_op_load_fpr_DT1(DFPREG(rs2));
2674 gen_clear_float_exceptions();
2675 gen_helper_fxtos(cpu_tmp32);
2676 gen_helper_check_ieee_exceptions();
2677 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2678 break;
2679 case 0x88: /* V9 fxtod */
2680 gen_op_load_fpr_DT1(DFPREG(rs2));
2681 gen_clear_float_exceptions();
2682 gen_helper_fxtod();
2683 gen_helper_check_ieee_exceptions();
2684 gen_op_store_DT0_fpr(DFPREG(rd));
2685 break;
2686 case 0x8c: /* V9 fxtoq */
2687 CHECK_FPU_FEATURE(dc, FLOAT128);
2688 gen_op_load_fpr_DT1(DFPREG(rs2));
2689 gen_clear_float_exceptions();
2690 gen_helper_fxtoq();
2691 gen_helper_check_ieee_exceptions();
2692 gen_op_store_QT0_fpr(QFPREG(rd));
2693 break;
2694 #endif
2695 default:
2696 goto illegal_insn;
2698 } else if (xop == 0x35) { /* FPU Operations */
2699 #ifdef TARGET_SPARC64
2700 int cond;
2701 #endif
2702 if (gen_trap_ifnofpu(dc, cpu_cond))
2703 goto jmp_insn;
2704 gen_op_clear_ieee_excp_and_FTT();
2705 rs1 = GET_FIELD(insn, 13, 17);
2706 rs2 = GET_FIELD(insn, 27, 31);
2707 xop = GET_FIELD(insn, 18, 26);
2708 #ifdef TARGET_SPARC64
2709 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2710 int l1;
2712 l1 = gen_new_label();
2713 cond = GET_FIELD_SP(insn, 14, 17);
2714 cpu_src1 = get_src1(insn, cpu_src1);
2715 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2716 0, l1);
2717 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2718 gen_set_label(l1);
2719 break;
2720 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2721 int l1;
2723 l1 = gen_new_label();
2724 cond = GET_FIELD_SP(insn, 14, 17);
2725 cpu_src1 = get_src1(insn, cpu_src1);
2726 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2727 0, l1);
2728 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2729 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2730 gen_set_label(l1);
2731 break;
2732 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2733 int l1;
2735 CHECK_FPU_FEATURE(dc, FLOAT128);
2736 l1 = gen_new_label();
2737 cond = GET_FIELD_SP(insn, 14, 17);
2738 cpu_src1 = get_src1(insn, cpu_src1);
2739 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2740 0, l1);
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2743 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2744 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2745 gen_set_label(l1);
2746 break;
2748 #endif
2749 switch (xop) {
2750 #ifdef TARGET_SPARC64
2751 #define FMOVSCC(fcc) \
2753 TCGv r_cond; \
2754 int l1; \
2756 l1 = gen_new_label(); \
2757 r_cond = tcg_temp_new(); \
2758 cond = GET_FIELD_SP(insn, 14, 17); \
2759 gen_fcond(r_cond, fcc, cond); \
2760 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2761 0, l1); \
2762 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2763 gen_set_label(l1); \
2764 tcg_temp_free(r_cond); \
2766 #define FMOVDCC(fcc) \
2768 TCGv r_cond; \
2769 int l1; \
2771 l1 = gen_new_label(); \
2772 r_cond = tcg_temp_new(); \
2773 cond = GET_FIELD_SP(insn, 14, 17); \
2774 gen_fcond(r_cond, fcc, cond); \
2775 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2776 0, l1); \
2777 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2778 cpu_fpr[DFPREG(rs2)]); \
2779 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2780 cpu_fpr[DFPREG(rs2) + 1]); \
2781 gen_set_label(l1); \
2782 tcg_temp_free(r_cond); \
2784 #define FMOVQCC(fcc) \
2786 TCGv r_cond; \
2787 int l1; \
2789 l1 = gen_new_label(); \
2790 r_cond = tcg_temp_new(); \
2791 cond = GET_FIELD_SP(insn, 14, 17); \
2792 gen_fcond(r_cond, fcc, cond); \
2793 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2794 0, l1); \
2795 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2796 cpu_fpr[QFPREG(rs2)]); \
2797 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2798 cpu_fpr[QFPREG(rs2) + 1]); \
2799 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2800 cpu_fpr[QFPREG(rs2) + 2]); \
2801 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2802 cpu_fpr[QFPREG(rs2) + 3]); \
2803 gen_set_label(l1); \
2804 tcg_temp_free(r_cond); \
2806 case 0x001: /* V9 fmovscc %fcc0 */
2807 FMOVSCC(0);
2808 break;
2809 case 0x002: /* V9 fmovdcc %fcc0 */
2810 FMOVDCC(0);
2811 break;
2812 case 0x003: /* V9 fmovqcc %fcc0 */
2813 CHECK_FPU_FEATURE(dc, FLOAT128);
2814 FMOVQCC(0);
2815 break;
2816 case 0x041: /* V9 fmovscc %fcc1 */
2817 FMOVSCC(1);
2818 break;
2819 case 0x042: /* V9 fmovdcc %fcc1 */
2820 FMOVDCC(1);
2821 break;
2822 case 0x043: /* V9 fmovqcc %fcc1 */
2823 CHECK_FPU_FEATURE(dc, FLOAT128);
2824 FMOVQCC(1);
2825 break;
2826 case 0x081: /* V9 fmovscc %fcc2 */
2827 FMOVSCC(2);
2828 break;
2829 case 0x082: /* V9 fmovdcc %fcc2 */
2830 FMOVDCC(2);
2831 break;
2832 case 0x083: /* V9 fmovqcc %fcc2 */
2833 CHECK_FPU_FEATURE(dc, FLOAT128);
2834 FMOVQCC(2);
2835 break;
2836 case 0x0c1: /* V9 fmovscc %fcc3 */
2837 FMOVSCC(3);
2838 break;
2839 case 0x0c2: /* V9 fmovdcc %fcc3 */
2840 FMOVDCC(3);
2841 break;
2842 case 0x0c3: /* V9 fmovqcc %fcc3 */
2843 CHECK_FPU_FEATURE(dc, FLOAT128);
2844 FMOVQCC(3);
2845 break;
2846 #undef FMOVSCC
2847 #undef FMOVDCC
2848 #undef FMOVQCC
2849 #define FMOVCC(size_FDQ, icc) \
2851 TCGv r_cond; \
2852 int l1; \
2854 l1 = gen_new_label(); \
2855 r_cond = tcg_temp_new(); \
2856 cond = GET_FIELD_SP(insn, 14, 17); \
2857 gen_cond(r_cond, icc, cond); \
2858 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2859 0, l1); \
2860 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2861 (glue(size_FDQ, FPREG(rs2))); \
2862 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2863 (glue(size_FDQ, FPREG(rd))); \
2864 gen_set_label(l1); \
2865 tcg_temp_free(r_cond); \
2867 #define FMOVSCC(icc) \
2869 TCGv r_cond; \
2870 int l1; \
2872 l1 = gen_new_label(); \
2873 r_cond = tcg_temp_new(); \
2874 cond = GET_FIELD_SP(insn, 14, 17); \
2875 gen_cond(r_cond, icc, cond); \
2876 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2877 0, l1); \
2878 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2879 gen_set_label(l1); \
2880 tcg_temp_free(r_cond); \
2882 #define FMOVDCC(icc) \
2884 TCGv r_cond; \
2885 int l1; \
2887 l1 = gen_new_label(); \
2888 r_cond = tcg_temp_new(); \
2889 cond = GET_FIELD_SP(insn, 14, 17); \
2890 gen_cond(r_cond, icc, cond); \
2891 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2892 0, l1); \
2893 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2894 cpu_fpr[DFPREG(rs2)]); \
2895 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2896 cpu_fpr[DFPREG(rs2) + 1]); \
2897 gen_set_label(l1); \
2898 tcg_temp_free(r_cond); \
2900 #define FMOVQCC(icc) \
2902 TCGv r_cond; \
2903 int l1; \
2905 l1 = gen_new_label(); \
2906 r_cond = tcg_temp_new(); \
2907 cond = GET_FIELD_SP(insn, 14, 17); \
2908 gen_cond(r_cond, icc, cond); \
2909 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2910 0, l1); \
2911 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2912 cpu_fpr[QFPREG(rs2)]); \
2913 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2914 cpu_fpr[QFPREG(rs2) + 1]); \
2915 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2916 cpu_fpr[QFPREG(rs2) + 2]); \
2917 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2918 cpu_fpr[QFPREG(rs2) + 3]); \
2919 gen_set_label(l1); \
2920 tcg_temp_free(r_cond); \
2923 case 0x101: /* V9 fmovscc %icc */
2924 FMOVSCC(0);
2925 break;
2926 case 0x102: /* V9 fmovdcc %icc */
2927 FMOVDCC(0);
2928 case 0x103: /* V9 fmovqcc %icc */
2929 CHECK_FPU_FEATURE(dc, FLOAT128);
2930 FMOVQCC(0);
2931 break;
2932 case 0x181: /* V9 fmovscc %xcc */
2933 FMOVSCC(1);
2934 break;
2935 case 0x182: /* V9 fmovdcc %xcc */
2936 FMOVDCC(1);
2937 break;
2938 case 0x183: /* V9 fmovqcc %xcc */
2939 CHECK_FPU_FEATURE(dc, FLOAT128);
2940 FMOVQCC(1);
2941 break;
2942 #undef FMOVSCC
2943 #undef FMOVDCC
2944 #undef FMOVQCC
2945 #endif
2946 case 0x51: /* fcmps, V9 %fcc */
2947 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2948 break;
2949 case 0x52: /* fcmpd, V9 %fcc */
2950 gen_op_load_fpr_DT0(DFPREG(rs1));
2951 gen_op_load_fpr_DT1(DFPREG(rs2));
2952 gen_op_fcmpd(rd & 3);
2953 break;
2954 case 0x53: /* fcmpq, V9 %fcc */
2955 CHECK_FPU_FEATURE(dc, FLOAT128);
2956 gen_op_load_fpr_QT0(QFPREG(rs1));
2957 gen_op_load_fpr_QT1(QFPREG(rs2));
2958 gen_op_fcmpq(rd & 3);
2959 break;
2960 case 0x55: /* fcmpes, V9 %fcc */
2961 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2962 break;
2963 case 0x56: /* fcmped, V9 %fcc */
2964 gen_op_load_fpr_DT0(DFPREG(rs1));
2965 gen_op_load_fpr_DT1(DFPREG(rs2));
2966 gen_op_fcmped(rd & 3);
2967 break;
2968 case 0x57: /* fcmpeq, V9 %fcc */
2969 CHECK_FPU_FEATURE(dc, FLOAT128);
2970 gen_op_load_fpr_QT0(QFPREG(rs1));
2971 gen_op_load_fpr_QT1(QFPREG(rs2));
2972 gen_op_fcmpeq(rd & 3);
2973 break;
2974 default:
2975 goto illegal_insn;
2977 } else if (xop == 0x2) {
2978 // clr/mov shortcut
2980 rs1 = GET_FIELD(insn, 13, 17);
2981 if (rs1 == 0) {
2982 // or %g0, x, y -> mov T0, x; mov y, T0
2983 if (IS_IMM) { /* immediate */
2984 TCGv r_const;
2986 rs2 = GET_FIELDs(insn, 19, 31);
2987 r_const = tcg_const_tl((int)rs2);
2988 gen_movl_TN_reg(rd, r_const);
2989 tcg_temp_free(r_const);
2990 } else { /* register */
2991 rs2 = GET_FIELD(insn, 27, 31);
2992 gen_movl_reg_TN(rs2, cpu_dst);
2993 gen_movl_TN_reg(rd, cpu_dst);
2995 } else {
2996 cpu_src1 = get_src1(insn, cpu_src1);
2997 if (IS_IMM) { /* immediate */
2998 rs2 = GET_FIELDs(insn, 19, 31);
2999 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
3000 gen_movl_TN_reg(rd, cpu_dst);
3001 } else { /* register */
3002 // or x, %g0, y -> mov T1, x; mov y, T1
3003 rs2 = GET_FIELD(insn, 27, 31);
3004 if (rs2 != 0) {
3005 gen_movl_reg_TN(rs2, cpu_src2);
3006 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3007 gen_movl_TN_reg(rd, cpu_dst);
3008 } else
3009 gen_movl_TN_reg(rd, cpu_src1);
3012 #ifdef TARGET_SPARC64
3013 } else if (xop == 0x25) { /* sll, V9 sllx */
3014 cpu_src1 = get_src1(insn, cpu_src1);
3015 if (IS_IMM) { /* immediate */
3016 rs2 = GET_FIELDs(insn, 20, 31);
3017 if (insn & (1 << 12)) {
3018 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3019 } else {
3020 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
3022 } else { /* register */
3023 rs2 = GET_FIELD(insn, 27, 31);
3024 gen_movl_reg_TN(rs2, cpu_src2);
3025 if (insn & (1 << 12)) {
3026 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3027 } else {
3028 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3030 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3032 gen_movl_TN_reg(rd, cpu_dst);
3033 } else if (xop == 0x26) { /* srl, V9 srlx */
3034 cpu_src1 = get_src1(insn, cpu_src1);
3035 if (IS_IMM) { /* immediate */
3036 rs2 = GET_FIELDs(insn, 20, 31);
3037 if (insn & (1 << 12)) {
3038 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3039 } else {
3040 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3041 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3043 } else { /* register */
3044 rs2 = GET_FIELD(insn, 27, 31);
3045 gen_movl_reg_TN(rs2, cpu_src2);
3046 if (insn & (1 << 12)) {
3047 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3048 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3049 } else {
3050 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3051 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3052 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3055 gen_movl_TN_reg(rd, cpu_dst);
3056 } else if (xop == 0x27) { /* sra, V9 srax */
3057 cpu_src1 = get_src1(insn, cpu_src1);
3058 if (IS_IMM) { /* immediate */
3059 rs2 = GET_FIELDs(insn, 20, 31);
3060 if (insn & (1 << 12)) {
3061 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3062 } else {
3063 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3065 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3067 } else { /* register */
3068 rs2 = GET_FIELD(insn, 27, 31);
3069 gen_movl_reg_TN(rs2, cpu_src2);
3070 if (insn & (1 << 12)) {
3071 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3073 } else {
3074 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3080 gen_movl_TN_reg(rd, cpu_dst);
3081 #endif
3082 } else if (xop < 0x36) {
3083 cpu_src1 = get_src1(insn, cpu_src1);
3084 cpu_src2 = get_src2(insn, cpu_src2);
3085 if (xop < 0x20) {
3086 switch (xop & ~0x10) {
3087 case 0x0:
3088 if (xop & 0x10)
3089 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3090 else
3091 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3092 break;
3093 case 0x1:
3094 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3095 if (xop & 0x10)
3096 gen_op_logic_cc(cpu_dst);
3097 break;
3098 case 0x2:
3099 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3100 if (xop & 0x10)
3101 gen_op_logic_cc(cpu_dst);
3102 break;
3103 case 0x3:
3104 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3105 if (xop & 0x10)
3106 gen_op_logic_cc(cpu_dst);
3107 break;
3108 case 0x4:
3109 if (xop & 0x10)
3110 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3111 else
3112 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3113 break;
3114 case 0x5:
3115 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3116 if (xop & 0x10)
3117 gen_op_logic_cc(cpu_dst);
3118 break;
3119 case 0x6:
3120 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3121 if (xop & 0x10)
3122 gen_op_logic_cc(cpu_dst);
3123 break;
3124 case 0x7:
3125 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3126 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3127 if (xop & 0x10)
3128 gen_op_logic_cc(cpu_dst);
3129 break;
3130 case 0x8:
3131 if (xop & 0x10)
3132 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3133 else {
3134 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3135 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3136 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3138 break;
3139 #ifdef TARGET_SPARC64
3140 case 0x9: /* V9 mulx */
3141 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3142 break;
3143 #endif
3144 case 0xa:
3145 CHECK_IU_FEATURE(dc, MUL);
3146 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3147 if (xop & 0x10)
3148 gen_op_logic_cc(cpu_dst);
3149 break;
3150 case 0xb:
3151 CHECK_IU_FEATURE(dc, MUL);
3152 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3153 if (xop & 0x10)
3154 gen_op_logic_cc(cpu_dst);
3155 break;
3156 case 0xc:
3157 if (xop & 0x10)
3158 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3159 else {
3160 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3161 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3162 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3164 break;
3165 #ifdef TARGET_SPARC64
3166 case 0xd: /* V9 udivx */
3167 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3168 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3169 gen_trap_ifdivzero_tl(cpu_cc_src2);
3170 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3171 break;
3172 #endif
3173 case 0xe:
3174 CHECK_IU_FEATURE(dc, DIV);
3175 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3176 if (xop & 0x10)
3177 gen_op_div_cc(cpu_dst);
3178 break;
3179 case 0xf:
3180 CHECK_IU_FEATURE(dc, DIV);
3181 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3182 if (xop & 0x10)
3183 gen_op_div_cc(cpu_dst);
3184 break;
3185 default:
3186 goto illegal_insn;
3188 gen_movl_TN_reg(rd, cpu_dst);
3189 } else {
3190 switch (xop) {
3191 case 0x20: /* taddcc */
3192 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3193 gen_movl_TN_reg(rd, cpu_dst);
3194 break;
3195 case 0x21: /* tsubcc */
3196 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3197 gen_movl_TN_reg(rd, cpu_dst);
3198 break;
3199 case 0x22: /* taddcctv */
3200 save_state(dc, cpu_cond);
3201 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3202 gen_movl_TN_reg(rd, cpu_dst);
3203 break;
3204 case 0x23: /* tsubcctv */
3205 save_state(dc, cpu_cond);
3206 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3207 gen_movl_TN_reg(rd, cpu_dst);
3208 break;
3209 case 0x24: /* mulscc */
3210 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3211 gen_movl_TN_reg(rd, cpu_dst);
3212 break;
3213 #ifndef TARGET_SPARC64
3214 case 0x25: /* sll */
3215 if (IS_IMM) { /* immediate */
3216 rs2 = GET_FIELDs(insn, 20, 31);
3217 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3218 } else { /* register */
3219 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3220 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3222 gen_movl_TN_reg(rd, cpu_dst);
3223 break;
3224 case 0x26: /* srl */
3225 if (IS_IMM) { /* immediate */
3226 rs2 = GET_FIELDs(insn, 20, 31);
3227 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3228 } else { /* register */
3229 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3230 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3232 gen_movl_TN_reg(rd, cpu_dst);
3233 break;
3234 case 0x27: /* sra */
3235 if (IS_IMM) { /* immediate */
3236 rs2 = GET_FIELDs(insn, 20, 31);
3237 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3238 } else { /* register */
3239 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3240 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3242 gen_movl_TN_reg(rd, cpu_dst);
3243 break;
3244 #endif
3245 case 0x30:
3247 switch(rd) {
3248 case 0: /* wry */
3249 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3250 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3251 break;
3252 #ifndef TARGET_SPARC64
3253 case 0x01 ... 0x0f: /* undefined in the
3254 SPARCv8 manual, nop
3255 on the microSPARC
3256 II */
3257 case 0x10 ... 0x1f: /* implementation-dependent
3258 in the SPARCv8
3259 manual, nop on the
3260 microSPARC II */
3261 break;
3262 #else
3263 case 0x2: /* V9 wrccr */
3264 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3265 gen_helper_wrccr(cpu_dst);
3266 break;
3267 case 0x3: /* V9 wrasi */
3268 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3269 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3270 break;
3271 case 0x6: /* V9 wrfprs */
3272 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3273 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3274 save_state(dc, cpu_cond);
3275 gen_op_next_insn();
3276 tcg_gen_exit_tb(0);
3277 dc->is_br = 1;
3278 break;
3279 case 0xf: /* V9 sir, nop if user */
3280 #if !defined(CONFIG_USER_ONLY)
3281 if (supervisor(dc))
3282 ; // XXX
3283 #endif
3284 break;
3285 case 0x13: /* Graphics Status */
3286 if (gen_trap_ifnofpu(dc, cpu_cond))
3287 goto jmp_insn;
3288 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3289 break;
3290 case 0x14: /* Softint set */
3291 if (!supervisor(dc))
3292 goto illegal_insn;
3293 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3294 gen_helper_set_softint(cpu_tmp64);
3295 break;
3296 case 0x15: /* Softint clear */
3297 if (!supervisor(dc))
3298 goto illegal_insn;
3299 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3300 gen_helper_clear_softint(cpu_tmp64);
3301 break;
3302 case 0x16: /* Softint write */
3303 if (!supervisor(dc))
3304 goto illegal_insn;
3305 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3306 gen_helper_write_softint(cpu_tmp64);
3307 break;
3308 case 0x17: /* Tick compare */
3309 #if !defined(CONFIG_USER_ONLY)
3310 if (!supervisor(dc))
3311 goto illegal_insn;
3312 #endif
3314 TCGv_ptr r_tickptr;
3316 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3317 cpu_src2);
3318 r_tickptr = tcg_temp_new_ptr();
3319 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3320 offsetof(CPUState, tick));
3321 gen_helper_tick_set_limit(r_tickptr,
3322 cpu_tick_cmpr);
3323 tcg_temp_free_ptr(r_tickptr);
3325 break;
3326 case 0x18: /* System tick */
3327 #if !defined(CONFIG_USER_ONLY)
3328 if (!supervisor(dc))
3329 goto illegal_insn;
3330 #endif
3332 TCGv_ptr r_tickptr;
3334 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3335 cpu_src2);
3336 r_tickptr = tcg_temp_new_ptr();
3337 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3338 offsetof(CPUState, stick));
3339 gen_helper_tick_set_count(r_tickptr,
3340 cpu_dst);
3341 tcg_temp_free_ptr(r_tickptr);
3343 break;
3344 case 0x19: /* System tick compare */
3345 #if !defined(CONFIG_USER_ONLY)
3346 if (!supervisor(dc))
3347 goto illegal_insn;
3348 #endif
3350 TCGv_ptr r_tickptr;
3352 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3353 cpu_src2);
3354 r_tickptr = tcg_temp_new_ptr();
3355 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3356 offsetof(CPUState, stick));
3357 gen_helper_tick_set_limit(r_tickptr,
3358 cpu_stick_cmpr);
3359 tcg_temp_free_ptr(r_tickptr);
3361 break;
3363 case 0x10: /* Performance Control */
3364 case 0x11: /* Performance Instrumentation
3365 Counter */
3366 case 0x12: /* Dispatch Control */
3367 #endif
3368 default:
3369 goto illegal_insn;
3372 break;
3373 #if !defined(CONFIG_USER_ONLY)
3374 case 0x31: /* wrpsr, V9 saved, restored */
3376 if (!supervisor(dc))
3377 goto priv_insn;
3378 #ifdef TARGET_SPARC64
3379 switch (rd) {
3380 case 0:
3381 gen_helper_saved();
3382 break;
3383 case 1:
3384 gen_helper_restored();
3385 break;
3386 case 2: /* UA2005 allclean */
3387 case 3: /* UA2005 otherw */
3388 case 4: /* UA2005 normalw */
3389 case 5: /* UA2005 invalw */
3390 // XXX
3391 default:
3392 goto illegal_insn;
3394 #else
3395 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3396 gen_helper_wrpsr(cpu_dst);
3397 save_state(dc, cpu_cond);
3398 gen_op_next_insn();
3399 tcg_gen_exit_tb(0);
3400 dc->is_br = 1;
3401 #endif
3403 break;
3404 case 0x32: /* wrwim, V9 wrpr */
3406 if (!supervisor(dc))
3407 goto priv_insn;
3408 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3409 #ifdef TARGET_SPARC64
3410 switch (rd) {
3411 case 0: // tpc
3413 TCGv_ptr r_tsptr;
3415 r_tsptr = tcg_temp_new_ptr();
3416 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3417 offsetof(CPUState, tsptr));
3418 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3419 offsetof(trap_state, tpc));
3420 tcg_temp_free_ptr(r_tsptr);
3422 break;
3423 case 1: // tnpc
3425 TCGv_ptr r_tsptr;
3427 r_tsptr = tcg_temp_new_ptr();
3428 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3429 offsetof(CPUState, tsptr));
3430 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3431 offsetof(trap_state, tnpc));
3432 tcg_temp_free_ptr(r_tsptr);
3434 break;
3435 case 2: // tstate
3437 TCGv_ptr r_tsptr;
3439 r_tsptr = tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3441 offsetof(CPUState, tsptr));
3442 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3443 offsetof(trap_state,
3444 tstate));
3445 tcg_temp_free_ptr(r_tsptr);
3447 break;
3448 case 3: // tt
3450 TCGv_ptr r_tsptr;
3452 r_tsptr = tcg_temp_new_ptr();
3453 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3454 offsetof(CPUState, tsptr));
3455 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3456 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3457 offsetof(trap_state, tt));
3458 tcg_temp_free_ptr(r_tsptr);
3460 break;
3461 case 4: // tick
3463 TCGv_ptr r_tickptr;
3465 r_tickptr = tcg_temp_new_ptr();
3466 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3467 offsetof(CPUState, tick));
3468 gen_helper_tick_set_count(r_tickptr,
3469 cpu_tmp0);
3470 tcg_temp_free_ptr(r_tickptr);
3472 break;
3473 case 5: // tba
3474 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3475 break;
3476 case 6: // pstate
3477 save_state(dc, cpu_cond);
3478 gen_helper_wrpstate(cpu_tmp0);
3479 gen_op_next_insn();
3480 tcg_gen_exit_tb(0);
3481 dc->is_br = 1;
3482 break;
3483 case 7: // tl
3484 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3485 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3486 offsetof(CPUSPARCState, tl));
3487 break;
3488 case 8: // pil
3489 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3490 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3491 offsetof(CPUSPARCState,
3492 psrpil));
3493 break;
3494 case 9: // cwp
3495 gen_helper_wrcwp(cpu_tmp0);
3496 break;
3497 case 10: // cansave
3498 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3499 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3500 offsetof(CPUSPARCState,
3501 cansave));
3502 break;
3503 case 11: // canrestore
3504 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3505 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3506 offsetof(CPUSPARCState,
3507 canrestore));
3508 break;
3509 case 12: // cleanwin
3510 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3511 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3512 offsetof(CPUSPARCState,
3513 cleanwin));
3514 break;
3515 case 13: // otherwin
3516 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3517 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3518 offsetof(CPUSPARCState,
3519 otherwin));
3520 break;
3521 case 14: // wstate
3522 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3523 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3524 offsetof(CPUSPARCState,
3525 wstate));
3526 break;
3527 case 16: // UA2005 gl
3528 CHECK_IU_FEATURE(dc, GL);
3529 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3530 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3531 offsetof(CPUSPARCState, gl));
3532 break;
3533 case 26: // UA2005 strand status
3534 CHECK_IU_FEATURE(dc, HYPV);
3535 if (!hypervisor(dc))
3536 goto priv_insn;
3537 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3538 break;
3539 default:
3540 goto illegal_insn;
3542 #else
3543 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3544 if (dc->def->nwindows != 32)
3545 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3546 (1 << dc->def->nwindows) - 1);
3547 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3548 #endif
3550 break;
3551 case 0x33: /* wrtbr, UA2005 wrhpr */
3553 #ifndef TARGET_SPARC64
3554 if (!supervisor(dc))
3555 goto priv_insn;
3556 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3557 #else
3558 CHECK_IU_FEATURE(dc, HYPV);
3559 if (!hypervisor(dc))
3560 goto priv_insn;
3561 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3562 switch (rd) {
3563 case 0: // hpstate
3564 // XXX gen_op_wrhpstate();
3565 save_state(dc, cpu_cond);
3566 gen_op_next_insn();
3567 tcg_gen_exit_tb(0);
3568 dc->is_br = 1;
3569 break;
3570 case 1: // htstate
3571 // XXX gen_op_wrhtstate();
3572 break;
3573 case 3: // hintp
3574 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3575 break;
3576 case 5: // htba
3577 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3578 break;
3579 case 31: // hstick_cmpr
3581 TCGv_ptr r_tickptr;
3583 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3584 r_tickptr = tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3586 offsetof(CPUState, hstick));
3587 gen_helper_tick_set_limit(r_tickptr,
3588 cpu_hstick_cmpr);
3589 tcg_temp_free_ptr(r_tickptr);
3591 break;
3592 case 6: // hver readonly
3593 default:
3594 goto illegal_insn;
3596 #endif
3598 break;
3599 #endif
3600 #ifdef TARGET_SPARC64
3601 case 0x2c: /* V9 movcc */
3603 int cc = GET_FIELD_SP(insn, 11, 12);
3604 int cond = GET_FIELD_SP(insn, 14, 17);
3605 TCGv r_cond;
3606 int l1;
3608 r_cond = tcg_temp_new();
3609 if (insn & (1 << 18)) {
3610 if (cc == 0)
3611 gen_cond(r_cond, 0, cond);
3612 else if (cc == 2)
3613 gen_cond(r_cond, 1, cond);
3614 else
3615 goto illegal_insn;
3616 } else {
3617 gen_fcond(r_cond, cc, cond);
3620 l1 = gen_new_label();
3622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3623 if (IS_IMM) { /* immediate */
3624 TCGv r_const;
3626 rs2 = GET_FIELD_SPs(insn, 0, 10);
3627 r_const = tcg_const_tl((int)rs2);
3628 gen_movl_TN_reg(rd, r_const);
3629 tcg_temp_free(r_const);
3630 } else {
3631 rs2 = GET_FIELD_SP(insn, 0, 4);
3632 gen_movl_reg_TN(rs2, cpu_tmp0);
3633 gen_movl_TN_reg(rd, cpu_tmp0);
3635 gen_set_label(l1);
3636 tcg_temp_free(r_cond);
3637 break;
3639 case 0x2d: /* V9 sdivx */
3640 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3641 gen_movl_TN_reg(rd, cpu_dst);
3642 break;
3643 case 0x2e: /* V9 popc */
3645 cpu_src2 = get_src2(insn, cpu_src2);
3646 gen_helper_popc(cpu_dst, cpu_src2);
3647 gen_movl_TN_reg(rd, cpu_dst);
3649 case 0x2f: /* V9 movr */
3651 int cond = GET_FIELD_SP(insn, 10, 12);
3652 int l1;
3654 cpu_src1 = get_src1(insn, cpu_src1);
3656 l1 = gen_new_label();
3658 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3659 cpu_src1, 0, l1);
3660 if (IS_IMM) { /* immediate */
3661 TCGv r_const;
3663 rs2 = GET_FIELD_SPs(insn, 0, 9);
3664 r_const = tcg_const_tl((int)rs2);
3665 gen_movl_TN_reg(rd, r_const);
3666 tcg_temp_free(r_const);
3667 } else {
3668 rs2 = GET_FIELD_SP(insn, 0, 4);
3669 gen_movl_reg_TN(rs2, cpu_tmp0);
3670 gen_movl_TN_reg(rd, cpu_tmp0);
3672 gen_set_label(l1);
3673 break;
3675 #endif
3676 default:
3677 goto illegal_insn;
3680 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3681 #ifdef TARGET_SPARC64
3682 int opf = GET_FIELD_SP(insn, 5, 13);
3683 rs1 = GET_FIELD(insn, 13, 17);
3684 rs2 = GET_FIELD(insn, 27, 31);
3685 if (gen_trap_ifnofpu(dc, cpu_cond))
3686 goto jmp_insn;
3688 switch (opf) {
3689 case 0x000: /* VIS I edge8cc */
3690 case 0x001: /* VIS II edge8n */
3691 case 0x002: /* VIS I edge8lcc */
3692 case 0x003: /* VIS II edge8ln */
3693 case 0x004: /* VIS I edge16cc */
3694 case 0x005: /* VIS II edge16n */
3695 case 0x006: /* VIS I edge16lcc */
3696 case 0x007: /* VIS II edge16ln */
3697 case 0x008: /* VIS I edge32cc */
3698 case 0x009: /* VIS II edge32n */
3699 case 0x00a: /* VIS I edge32lcc */
3700 case 0x00b: /* VIS II edge32ln */
3701 // XXX
3702 goto illegal_insn;
3703 case 0x010: /* VIS I array8 */
3704 CHECK_FPU_FEATURE(dc, VIS1);
3705 cpu_src1 = get_src1(insn, cpu_src1);
3706 gen_movl_reg_TN(rs2, cpu_src2);
3707 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3708 gen_movl_TN_reg(rd, cpu_dst);
3709 break;
3710 case 0x012: /* VIS I array16 */
3711 CHECK_FPU_FEATURE(dc, VIS1);
3712 cpu_src1 = get_src1(insn, cpu_src1);
3713 gen_movl_reg_TN(rs2, cpu_src2);
3714 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3715 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3716 gen_movl_TN_reg(rd, cpu_dst);
3717 break;
3718 case 0x014: /* VIS I array32 */
3719 CHECK_FPU_FEATURE(dc, VIS1);
3720 cpu_src1 = get_src1(insn, cpu_src1);
3721 gen_movl_reg_TN(rs2, cpu_src2);
3722 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3723 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3724 gen_movl_TN_reg(rd, cpu_dst);
3725 break;
3726 case 0x018: /* VIS I alignaddr */
3727 CHECK_FPU_FEATURE(dc, VIS1);
3728 cpu_src1 = get_src1(insn, cpu_src1);
3729 gen_movl_reg_TN(rs2, cpu_src2);
3730 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3731 gen_movl_TN_reg(rd, cpu_dst);
3732 break;
3733 case 0x019: /* VIS II bmask */
3734 case 0x01a: /* VIS I alignaddrl */
3735 // XXX
3736 goto illegal_insn;
3737 case 0x020: /* VIS I fcmple16 */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 gen_helper_fcmple16();
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x022: /* VIS I fcmpne16 */
3745 CHECK_FPU_FEATURE(dc, VIS1);
3746 gen_op_load_fpr_DT0(DFPREG(rs1));
3747 gen_op_load_fpr_DT1(DFPREG(rs2));
3748 gen_helper_fcmpne16();
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3750 break;
3751 case 0x024: /* VIS I fcmple32 */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 gen_helper_fcmple32();
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3757 break;
3758 case 0x026: /* VIS I fcmpne32 */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 gen_helper_fcmpne32();
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3764 break;
3765 case 0x028: /* VIS I fcmpgt16 */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 gen_helper_fcmpgt16();
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3771 break;
3772 case 0x02a: /* VIS I fcmpeq16 */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_op_load_fpr_DT0(DFPREG(rs1));
3775 gen_op_load_fpr_DT1(DFPREG(rs2));
3776 gen_helper_fcmpeq16();
3777 gen_op_store_DT0_fpr(DFPREG(rd));
3778 break;
3779 case 0x02c: /* VIS I fcmpgt32 */
3780 CHECK_FPU_FEATURE(dc, VIS1);
3781 gen_op_load_fpr_DT0(DFPREG(rs1));
3782 gen_op_load_fpr_DT1(DFPREG(rs2));
3783 gen_helper_fcmpgt32();
3784 gen_op_store_DT0_fpr(DFPREG(rd));
3785 break;
3786 case 0x02e: /* VIS I fcmpeq32 */
3787 CHECK_FPU_FEATURE(dc, VIS1);
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 gen_helper_fcmpeq32();
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3792 break;
3793 case 0x031: /* VIS I fmul8x16 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 gen_helper_fmul8x16();
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x033: /* VIS I fmul8x16au */
3801 CHECK_FPU_FEATURE(dc, VIS1);
3802 gen_op_load_fpr_DT0(DFPREG(rs1));
3803 gen_op_load_fpr_DT1(DFPREG(rs2));
3804 gen_helper_fmul8x16au();
3805 gen_op_store_DT0_fpr(DFPREG(rd));
3806 break;
3807 case 0x035: /* VIS I fmul8x16al */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 gen_op_load_fpr_DT0(DFPREG(rs1));
3810 gen_op_load_fpr_DT1(DFPREG(rs2));
3811 gen_helper_fmul8x16al();
3812 gen_op_store_DT0_fpr(DFPREG(rd));
3813 break;
3814 case 0x036: /* VIS I fmul8sux16 */
3815 CHECK_FPU_FEATURE(dc, VIS1);
3816 gen_op_load_fpr_DT0(DFPREG(rs1));
3817 gen_op_load_fpr_DT1(DFPREG(rs2));
3818 gen_helper_fmul8sux16();
3819 gen_op_store_DT0_fpr(DFPREG(rd));
3820 break;
3821 case 0x037: /* VIS I fmul8ulx16 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 gen_helper_fmul8ulx16();
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3827 break;
3828 case 0x038: /* VIS I fmuld8sux16 */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_DT0(DFPREG(rs1));
3831 gen_op_load_fpr_DT1(DFPREG(rs2));
3832 gen_helper_fmuld8sux16();
3833 gen_op_store_DT0_fpr(DFPREG(rd));
3834 break;
3835 case 0x039: /* VIS I fmuld8ulx16 */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 gen_helper_fmuld8ulx16();
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3841 break;
3842 case 0x03a: /* VIS I fpack32 */
3843 case 0x03b: /* VIS I fpack16 */
3844 case 0x03d: /* VIS I fpackfix */
3845 case 0x03e: /* VIS I pdist */
3846 // XXX
3847 goto illegal_insn;
3848 case 0x048: /* VIS I faligndata */
3849 CHECK_FPU_FEATURE(dc, VIS1);
3850 gen_op_load_fpr_DT0(DFPREG(rs1));
3851 gen_op_load_fpr_DT1(DFPREG(rs2));
3852 gen_helper_faligndata();
3853 gen_op_store_DT0_fpr(DFPREG(rd));
3854 break;
3855 case 0x04b: /* VIS I fpmerge */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 gen_op_load_fpr_DT0(DFPREG(rs1));
3858 gen_op_load_fpr_DT1(DFPREG(rs2));
3859 gen_helper_fpmerge();
3860 gen_op_store_DT0_fpr(DFPREG(rd));
3861 break;
3862 case 0x04c: /* VIS II bshuffle */
3863 // XXX
3864 goto illegal_insn;
3865 case 0x04d: /* VIS I fexpand */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_DT0(DFPREG(rs1));
3868 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_helper_fexpand();
3870 gen_op_store_DT0_fpr(DFPREG(rd));
3871 break;
3872 case 0x050: /* VIS I fpadd16 */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 gen_helper_fpadd16();
3877 gen_op_store_DT0_fpr(DFPREG(rd));
3878 break;
3879 case 0x051: /* VIS I fpadd16s */
3880 CHECK_FPU_FEATURE(dc, VIS1);
3881 gen_helper_fpadd16s(cpu_fpr[rd],
3882 cpu_fpr[rs1], cpu_fpr[rs2]);
3883 break;
3884 case 0x052: /* VIS I fpadd32 */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 gen_op_load_fpr_DT0(DFPREG(rs1));
3887 gen_op_load_fpr_DT1(DFPREG(rs2));
3888 gen_helper_fpadd32();
3889 gen_op_store_DT0_fpr(DFPREG(rd));
3890 break;
3891 case 0x053: /* VIS I fpadd32s */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 gen_helper_fpadd32s(cpu_fpr[rd],
3894 cpu_fpr[rs1], cpu_fpr[rs2]);
3895 break;
3896 case 0x054: /* VIS I fpsub16 */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_DT0(DFPREG(rs1));
3899 gen_op_load_fpr_DT1(DFPREG(rs2));
3900 gen_helper_fpsub16();
3901 gen_op_store_DT0_fpr(DFPREG(rd));
3902 break;
3903 case 0x055: /* VIS I fpsub16s */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_helper_fpsub16s(cpu_fpr[rd],
3906 cpu_fpr[rs1], cpu_fpr[rs2]);
3907 break;
3908 case 0x056: /* VIS I fpsub32 */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 gen_op_load_fpr_DT0(DFPREG(rs1));
3911 gen_op_load_fpr_DT1(DFPREG(rs2));
3912 gen_helper_fpsub32();
3913 gen_op_store_DT0_fpr(DFPREG(rd));
3914 break;
3915 case 0x057: /* VIS I fpsub32s */
3916 CHECK_FPU_FEATURE(dc, VIS1);
3917 gen_helper_fpsub32s(cpu_fpr[rd],
3918 cpu_fpr[rs1], cpu_fpr[rs2]);
3919 break;
3920 case 0x060: /* VIS I fzero */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3923 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3924 break;
3925 case 0x061: /* VIS I fzeros */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3928 break;
3929 case 0x062: /* VIS I fnor */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3932 cpu_fpr[DFPREG(rs2)]);
3933 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3934 cpu_fpr[DFPREG(rs2) + 1]);
3935 break;
3936 case 0x063: /* VIS I fnors */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3939 break;
3940 case 0x064: /* VIS I fandnot2 */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3943 cpu_fpr[DFPREG(rs2)]);
3944 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3945 cpu_fpr[DFPREG(rs1) + 1],
3946 cpu_fpr[DFPREG(rs2) + 1]);
3947 break;
3948 case 0x065: /* VIS I fandnot2s */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3951 break;
3952 case 0x066: /* VIS I fnot2 */
3953 CHECK_FPU_FEATURE(dc, VIS1);
3954 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3955 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3956 cpu_fpr[DFPREG(rs2) + 1]);
3957 break;
3958 case 0x067: /* VIS I fnot2s */
3959 CHECK_FPU_FEATURE(dc, VIS1);
3960 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3961 break;
3962 case 0x068: /* VIS I fandnot1 */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3965 cpu_fpr[DFPREG(rs1)]);
3966 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3967 cpu_fpr[DFPREG(rs2) + 1],
3968 cpu_fpr[DFPREG(rs1) + 1]);
3969 break;
3970 case 0x069: /* VIS I fandnot1s */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3973 break;
3974 case 0x06a: /* VIS I fnot1 */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3977 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3978 cpu_fpr[DFPREG(rs1) + 1]);
3979 break;
3980 case 0x06b: /* VIS I fnot1s */
3981 CHECK_FPU_FEATURE(dc, VIS1);
3982 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3983 break;
3984 case 0x06c: /* VIS I fxor */
3985 CHECK_FPU_FEATURE(dc, VIS1);
3986 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3987 cpu_fpr[DFPREG(rs2)]);
3988 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3989 cpu_fpr[DFPREG(rs1) + 1],
3990 cpu_fpr[DFPREG(rs2) + 1]);
3991 break;
3992 case 0x06d: /* VIS I fxors */
3993 CHECK_FPU_FEATURE(dc, VIS1);
3994 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3995 break;
3996 case 0x06e: /* VIS I fnand */
3997 CHECK_FPU_FEATURE(dc, VIS1);
3998 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3999 cpu_fpr[DFPREG(rs2)]);
4000 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4001 cpu_fpr[DFPREG(rs2) + 1]);
4002 break;
4003 case 0x06f: /* VIS I fnands */
4004 CHECK_FPU_FEATURE(dc, VIS1);
4005 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4006 break;
4007 case 0x070: /* VIS I fand */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4010 cpu_fpr[DFPREG(rs2)]);
4011 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4012 cpu_fpr[DFPREG(rs1) + 1],
4013 cpu_fpr[DFPREG(rs2) + 1]);
4014 break;
4015 case 0x071: /* VIS I fands */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4018 break;
4019 case 0x072: /* VIS I fxnor */
4020 CHECK_FPU_FEATURE(dc, VIS1);
4021 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4022 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4023 cpu_fpr[DFPREG(rs1)]);
4024 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4025 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4026 cpu_fpr[DFPREG(rs1) + 1]);
4027 break;
4028 case 0x073: /* VIS I fxnors */
4029 CHECK_FPU_FEATURE(dc, VIS1);
4030 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4031 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4032 break;
4033 case 0x074: /* VIS I fsrc1 */
4034 CHECK_FPU_FEATURE(dc, VIS1);
4035 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4036 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4037 cpu_fpr[DFPREG(rs1) + 1]);
4038 break;
4039 case 0x075: /* VIS I fsrc1s */
4040 CHECK_FPU_FEATURE(dc, VIS1);
4041 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4042 break;
4043 case 0x076: /* VIS I fornot2 */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4046 cpu_fpr[DFPREG(rs2)]);
4047 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4048 cpu_fpr[DFPREG(rs1) + 1],
4049 cpu_fpr[DFPREG(rs2) + 1]);
4050 break;
4051 case 0x077: /* VIS I fornot2s */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4054 break;
4055 case 0x078: /* VIS I fsrc2 */
4056 CHECK_FPU_FEATURE(dc, VIS1);
4057 gen_op_load_fpr_DT0(DFPREG(rs2));
4058 gen_op_store_DT0_fpr(DFPREG(rd));
4059 break;
4060 case 0x079: /* VIS I fsrc2s */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4063 break;
4064 case 0x07a: /* VIS I fornot1 */
4065 CHECK_FPU_FEATURE(dc, VIS1);
4066 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4067 cpu_fpr[DFPREG(rs1)]);
4068 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4069 cpu_fpr[DFPREG(rs2) + 1],
4070 cpu_fpr[DFPREG(rs1) + 1]);
4071 break;
4072 case 0x07b: /* VIS I fornot1s */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4075 break;
4076 case 0x07c: /* VIS I for */
4077 CHECK_FPU_FEATURE(dc, VIS1);
4078 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4079 cpu_fpr[DFPREG(rs2)]);
4080 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4081 cpu_fpr[DFPREG(rs1) + 1],
4082 cpu_fpr[DFPREG(rs2) + 1]);
4083 break;
4084 case 0x07d: /* VIS I fors */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4087 break;
4088 case 0x07e: /* VIS I fone */
4089 CHECK_FPU_FEATURE(dc, VIS1);
4090 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4091 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4092 break;
4093 case 0x07f: /* VIS I fones */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4096 break;
4097 case 0x080: /* VIS I shutdown */
4098 case 0x081: /* VIS II siam */
4099 // XXX
4100 goto illegal_insn;
4101 default:
4102 goto illegal_insn;
4104 #else
4105 goto ncp_insn;
4106 #endif
4107 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4108 #ifdef TARGET_SPARC64
4109 goto illegal_insn;
4110 #else
4111 goto ncp_insn;
4112 #endif
4113 #ifdef TARGET_SPARC64
4114 } else if (xop == 0x39) { /* V9 return */
4115 TCGv_i32 r_const;
4117 save_state(dc, cpu_cond);
4118 cpu_src1 = get_src1(insn, cpu_src1);
4119 if (IS_IMM) { /* immediate */
4120 rs2 = GET_FIELDs(insn, 19, 31);
4121 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4122 } else { /* register */
4123 rs2 = GET_FIELD(insn, 27, 31);
4124 if (rs2) {
4125 gen_movl_reg_TN(rs2, cpu_src2);
4126 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4127 } else
4128 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4130 gen_helper_restore();
4131 gen_mov_pc_npc(dc, cpu_cond);
4132 r_const = tcg_const_i32(3);
4133 gen_helper_check_align(cpu_dst, r_const);
4134 tcg_temp_free_i32(r_const);
4135 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4136 dc->npc = DYNAMIC_PC;
4137 goto jmp_insn;
4138 #endif
4139 } else {
4140 cpu_src1 = get_src1(insn, cpu_src1);
4141 if (IS_IMM) { /* immediate */
4142 rs2 = GET_FIELDs(insn, 19, 31);
4143 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4144 } else { /* register */
4145 rs2 = GET_FIELD(insn, 27, 31);
4146 if (rs2) {
4147 gen_movl_reg_TN(rs2, cpu_src2);
4148 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4149 } else
4150 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4152 switch (xop) {
4153 case 0x38: /* jmpl */
4155 TCGv r_pc;
4156 TCGv_i32 r_const;
4158 r_pc = tcg_const_tl(dc->pc);
4159 gen_movl_TN_reg(rd, r_pc);
4160 tcg_temp_free(r_pc);
4161 gen_mov_pc_npc(dc, cpu_cond);
4162 r_const = tcg_const_i32(3);
4163 gen_helper_check_align(cpu_dst, r_const);
4164 tcg_temp_free_i32(r_const);
4165 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4166 dc->npc = DYNAMIC_PC;
4168 goto jmp_insn;
4169 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4170 case 0x39: /* rett, V9 return */
4172 TCGv_i32 r_const;
4174 if (!supervisor(dc))
4175 goto priv_insn;
4176 gen_mov_pc_npc(dc, cpu_cond);
4177 r_const = tcg_const_i32(3);
4178 gen_helper_check_align(cpu_dst, r_const);
4179 tcg_temp_free_i32(r_const);
4180 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4181 dc->npc = DYNAMIC_PC;
4182 gen_helper_rett();
4184 goto jmp_insn;
4185 #endif
4186 case 0x3b: /* flush */
4187 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4188 goto unimp_flush;
4189 gen_helper_flush(cpu_dst);
4190 break;
4191 case 0x3c: /* save */
4192 save_state(dc, cpu_cond);
4193 gen_helper_save();
4194 gen_movl_TN_reg(rd, cpu_dst);
4195 break;
4196 case 0x3d: /* restore */
4197 save_state(dc, cpu_cond);
4198 gen_helper_restore();
4199 gen_movl_TN_reg(rd, cpu_dst);
4200 break;
4201 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4202 case 0x3e: /* V9 done/retry */
4204 switch (rd) {
4205 case 0:
4206 if (!supervisor(dc))
4207 goto priv_insn;
4208 dc->npc = DYNAMIC_PC;
4209 dc->pc = DYNAMIC_PC;
4210 gen_helper_done();
4211 goto jmp_insn;
4212 case 1:
4213 if (!supervisor(dc))
4214 goto priv_insn;
4215 dc->npc = DYNAMIC_PC;
4216 dc->pc = DYNAMIC_PC;
4217 gen_helper_retry();
4218 goto jmp_insn;
4219 default:
4220 goto illegal_insn;
4223 break;
4224 #endif
4225 default:
4226 goto illegal_insn;
4229 break;
4231 break;
4232 case 3: /* load/store instructions */
4234 unsigned int xop = GET_FIELD(insn, 7, 12);
4236 cpu_src1 = get_src1(insn, cpu_src1);
4237 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4238 rs2 = GET_FIELD(insn, 27, 31);
4239 gen_movl_reg_TN(rs2, cpu_src2);
4240 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4241 } else if (IS_IMM) { /* immediate */
4242 rs2 = GET_FIELDs(insn, 19, 31);
4243 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4244 } else { /* register */
4245 rs2 = GET_FIELD(insn, 27, 31);
4246 if (rs2 != 0) {
4247 gen_movl_reg_TN(rs2, cpu_src2);
4248 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4249 } else
4250 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4252 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4253 (xop > 0x17 && xop <= 0x1d ) ||
4254 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4255 switch (xop) {
4256 case 0x0: /* load unsigned word */
4257 gen_address_mask(dc, cpu_addr);
4258 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4259 break;
4260 case 0x1: /* load unsigned byte */
4261 gen_address_mask(dc, cpu_addr);
4262 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4263 break;
4264 case 0x2: /* load unsigned halfword */
4265 gen_address_mask(dc, cpu_addr);
4266 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4267 break;
4268 case 0x3: /* load double word */
4269 if (rd & 1)
4270 goto illegal_insn;
4271 else {
4272 TCGv_i32 r_const;
4274 save_state(dc, cpu_cond);
4275 r_const = tcg_const_i32(7);
4276 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4277 tcg_temp_free_i32(r_const);
4278 gen_address_mask(dc, cpu_addr);
4279 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4280 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4281 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4282 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4283 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4284 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4285 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4287 break;
4288 case 0x9: /* load signed byte */
4289 gen_address_mask(dc, cpu_addr);
4290 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4291 break;
4292 case 0xa: /* load signed halfword */
4293 gen_address_mask(dc, cpu_addr);
4294 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4295 break;
4296 case 0xd: /* ldstub -- XXX: should be atomically */
4298 TCGv r_const;
4300 gen_address_mask(dc, cpu_addr);
4301 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4302 r_const = tcg_const_tl(0xff);
4303 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4304 tcg_temp_free(r_const);
4306 break;
4307 case 0x0f: /* swap register with memory. Also
4308 atomically */
4309 CHECK_IU_FEATURE(dc, SWAP);
4310 gen_movl_reg_TN(rd, cpu_val);
4311 gen_address_mask(dc, cpu_addr);
4312 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4313 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4314 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4315 break;
4316 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4317 case 0x10: /* load word alternate */
4318 #ifndef TARGET_SPARC64
4319 if (IS_IMM)
4320 goto illegal_insn;
4321 if (!supervisor(dc))
4322 goto priv_insn;
4323 #endif
4324 save_state(dc, cpu_cond);
4325 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4326 break;
4327 case 0x11: /* load unsigned byte alternate */
4328 #ifndef TARGET_SPARC64
4329 if (IS_IMM)
4330 goto illegal_insn;
4331 if (!supervisor(dc))
4332 goto priv_insn;
4333 #endif
4334 save_state(dc, cpu_cond);
4335 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4336 break;
4337 case 0x12: /* load unsigned halfword alternate */
4338 #ifndef TARGET_SPARC64
4339 if (IS_IMM)
4340 goto illegal_insn;
4341 if (!supervisor(dc))
4342 goto priv_insn;
4343 #endif
4344 save_state(dc, cpu_cond);
4345 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4346 break;
4347 case 0x13: /* load double word alternate */
4348 #ifndef TARGET_SPARC64
4349 if (IS_IMM)
4350 goto illegal_insn;
4351 if (!supervisor(dc))
4352 goto priv_insn;
4353 #endif
4354 if (rd & 1)
4355 goto illegal_insn;
4356 save_state(dc, cpu_cond);
4357 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4358 goto skip_move;
4359 case 0x19: /* load signed byte alternate */
4360 #ifndef TARGET_SPARC64
4361 if (IS_IMM)
4362 goto illegal_insn;
4363 if (!supervisor(dc))
4364 goto priv_insn;
4365 #endif
4366 save_state(dc, cpu_cond);
4367 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4368 break;
4369 case 0x1a: /* load signed halfword alternate */
4370 #ifndef TARGET_SPARC64
4371 if (IS_IMM)
4372 goto illegal_insn;
4373 if (!supervisor(dc))
4374 goto priv_insn;
4375 #endif
4376 save_state(dc, cpu_cond);
4377 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4378 break;
4379 case 0x1d: /* ldstuba -- XXX: should be atomically */
4380 #ifndef TARGET_SPARC64
4381 if (IS_IMM)
4382 goto illegal_insn;
4383 if (!supervisor(dc))
4384 goto priv_insn;
4385 #endif
4386 save_state(dc, cpu_cond);
4387 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4388 break;
4389 case 0x1f: /* swap reg with alt. memory. Also
4390 atomically */
4391 CHECK_IU_FEATURE(dc, SWAP);
4392 #ifndef TARGET_SPARC64
4393 if (IS_IMM)
4394 goto illegal_insn;
4395 if (!supervisor(dc))
4396 goto priv_insn;
4397 #endif
4398 save_state(dc, cpu_cond);
4399 gen_movl_reg_TN(rd, cpu_val);
4400 gen_swap_asi(cpu_val, cpu_addr, insn);
4401 break;
4403 #ifndef TARGET_SPARC64
4404 case 0x30: /* ldc */
4405 case 0x31: /* ldcsr */
4406 case 0x33: /* lddc */
4407 goto ncp_insn;
4408 #endif
4409 #endif
4410 #ifdef TARGET_SPARC64
4411 case 0x08: /* V9 ldsw */
4412 gen_address_mask(dc, cpu_addr);
4413 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4414 break;
4415 case 0x0b: /* V9 ldx */
4416 gen_address_mask(dc, cpu_addr);
4417 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4418 break;
4419 case 0x18: /* V9 ldswa */
4420 save_state(dc, cpu_cond);
4421 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4422 break;
4423 case 0x1b: /* V9 ldxa */
4424 save_state(dc, cpu_cond);
4425 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4426 break;
4427 case 0x2d: /* V9 prefetch, no effect */
4428 goto skip_move;
4429 case 0x30: /* V9 ldfa */
4430 save_state(dc, cpu_cond);
4431 gen_ldf_asi(cpu_addr, insn, 4, rd);
4432 goto skip_move;
4433 case 0x33: /* V9 lddfa */
4434 save_state(dc, cpu_cond);
4435 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4436 goto skip_move;
4437 case 0x3d: /* V9 prefetcha, no effect */
4438 goto skip_move;
4439 case 0x32: /* V9 ldqfa */
4440 CHECK_FPU_FEATURE(dc, FLOAT128);
4441 save_state(dc, cpu_cond);
4442 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4443 goto skip_move;
4444 #endif
4445 default:
4446 goto illegal_insn;
4448 gen_movl_TN_reg(rd, cpu_val);
4449 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4450 skip_move: ;
4451 #endif
4452 } else if (xop >= 0x20 && xop < 0x24) {
4453 if (gen_trap_ifnofpu(dc, cpu_cond))
4454 goto jmp_insn;
4455 save_state(dc, cpu_cond);
4456 switch (xop) {
4457 case 0x20: /* load fpreg */
4458 gen_address_mask(dc, cpu_addr);
4459 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4460 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4461 break;
4462 case 0x21: /* ldfsr, V9 ldxfsr */
4463 #ifdef TARGET_SPARC64
4464 gen_address_mask(dc, cpu_addr);
4465 if (rd == 1) {
4466 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4467 gen_helper_ldxfsr(cpu_tmp64);
4468 } else
4469 #else
4471 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4472 gen_helper_ldfsr(cpu_tmp32);
4474 #endif
4475 break;
4476 case 0x22: /* load quad fpreg */
4478 TCGv_i32 r_const;
4480 CHECK_FPU_FEATURE(dc, FLOAT128);
4481 r_const = tcg_const_i32(dc->mem_idx);
4482 gen_helper_ldqf(cpu_addr, r_const);
4483 tcg_temp_free_i32(r_const);
4484 gen_op_store_QT0_fpr(QFPREG(rd));
4486 break;
4487 case 0x23: /* load double fpreg */
4489 TCGv_i32 r_const;
4491 r_const = tcg_const_i32(dc->mem_idx);
4492 gen_helper_lddf(cpu_addr, r_const);
4493 tcg_temp_free_i32(r_const);
4494 gen_op_store_DT0_fpr(DFPREG(rd));
4496 break;
4497 default:
4498 goto illegal_insn;
4500 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4501 xop == 0xe || xop == 0x1e) {
4502 gen_movl_reg_TN(rd, cpu_val);
4503 switch (xop) {
4504 case 0x4: /* store word */
4505 gen_address_mask(dc, cpu_addr);
4506 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4507 break;
4508 case 0x5: /* store byte */
4509 gen_address_mask(dc, cpu_addr);
4510 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4511 break;
4512 case 0x6: /* store halfword */
4513 gen_address_mask(dc, cpu_addr);
4514 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4515 break;
4516 case 0x7: /* store double word */
4517 if (rd & 1)
4518 goto illegal_insn;
4519 else {
4520 TCGv_i32 r_const;
4522 save_state(dc, cpu_cond);
4523 gen_address_mask(dc, cpu_addr);
4524 r_const = tcg_const_i32(7);
4525 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4526 tcg_temp_free_i32(r_const);
4527 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4528 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4529 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4531 break;
4532 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4533 case 0x14: /* store word alternate */
4534 #ifndef TARGET_SPARC64
4535 if (IS_IMM)
4536 goto illegal_insn;
4537 if (!supervisor(dc))
4538 goto priv_insn;
4539 #endif
4540 save_state(dc, cpu_cond);
4541 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4542 break;
4543 case 0x15: /* store byte alternate */
4544 #ifndef TARGET_SPARC64
4545 if (IS_IMM)
4546 goto illegal_insn;
4547 if (!supervisor(dc))
4548 goto priv_insn;
4549 #endif
4550 save_state(dc, cpu_cond);
4551 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4552 break;
4553 case 0x16: /* store halfword alternate */
4554 #ifndef TARGET_SPARC64
4555 if (IS_IMM)
4556 goto illegal_insn;
4557 if (!supervisor(dc))
4558 goto priv_insn;
4559 #endif
4560 save_state(dc, cpu_cond);
4561 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4562 break;
4563 case 0x17: /* store double word alternate */
4564 #ifndef TARGET_SPARC64
4565 if (IS_IMM)
4566 goto illegal_insn;
4567 if (!supervisor(dc))
4568 goto priv_insn;
4569 #endif
4570 if (rd & 1)
4571 goto illegal_insn;
4572 else {
4573 save_state(dc, cpu_cond);
4574 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4576 break;
4577 #endif
4578 #ifdef TARGET_SPARC64
4579 case 0x0e: /* V9 stx */
4580 gen_address_mask(dc, cpu_addr);
4581 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4582 break;
4583 case 0x1e: /* V9 stxa */
4584 save_state(dc, cpu_cond);
4585 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4586 break;
4587 #endif
4588 default:
4589 goto illegal_insn;
4591 } else if (xop > 0x23 && xop < 0x28) {
4592 if (gen_trap_ifnofpu(dc, cpu_cond))
4593 goto jmp_insn;
4594 save_state(dc, cpu_cond);
4595 switch (xop) {
4596 case 0x24: /* store fpreg */
4597 gen_address_mask(dc, cpu_addr);
4598 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4599 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4600 break;
4601 case 0x25: /* stfsr, V9 stxfsr */
4602 #ifdef TARGET_SPARC64
4603 gen_address_mask(dc, cpu_addr);
4604 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4605 if (rd == 1)
4606 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4607 else
4608 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4609 #else
4610 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4611 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4612 #endif
4613 break;
4614 case 0x26:
4615 #ifdef TARGET_SPARC64
4616 /* V9 stqf, store quad fpreg */
4618 TCGv_i32 r_const;
4620 CHECK_FPU_FEATURE(dc, FLOAT128);
4621 gen_op_load_fpr_QT0(QFPREG(rd));
4622 r_const = tcg_const_i32(dc->mem_idx);
4623 gen_helper_stqf(cpu_addr, r_const);
4624 tcg_temp_free_i32(r_const);
4626 break;
4627 #else /* !TARGET_SPARC64 */
4628 /* stdfq, store floating point queue */
4629 #if defined(CONFIG_USER_ONLY)
4630 goto illegal_insn;
4631 #else
4632 if (!supervisor(dc))
4633 goto priv_insn;
4634 if (gen_trap_ifnofpu(dc, cpu_cond))
4635 goto jmp_insn;
4636 goto nfq_insn;
4637 #endif
4638 #endif
4639 case 0x27: /* store double fpreg */
4641 TCGv_i32 r_const;
4643 gen_op_load_fpr_DT0(DFPREG(rd));
4644 r_const = tcg_const_i32(dc->mem_idx);
4645 gen_helper_stdf(cpu_addr, r_const);
4646 tcg_temp_free_i32(r_const);
4648 break;
4649 default:
4650 goto illegal_insn;
4652 } else if (xop > 0x33 && xop < 0x3f) {
4653 save_state(dc, cpu_cond);
4654 switch (xop) {
4655 #ifdef TARGET_SPARC64
4656 case 0x34: /* V9 stfa */
4657 gen_stf_asi(cpu_addr, insn, 4, rd);
4658 break;
4659 case 0x36: /* V9 stqfa */
4661 TCGv_i32 r_const;
4663 CHECK_FPU_FEATURE(dc, FLOAT128);
4664 r_const = tcg_const_i32(7);
4665 gen_helper_check_align(cpu_addr, r_const);
4666 tcg_temp_free_i32(r_const);
4667 gen_op_load_fpr_QT0(QFPREG(rd));
4668 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4670 break;
4671 case 0x37: /* V9 stdfa */
4672 gen_op_load_fpr_DT0(DFPREG(rd));
4673 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4674 break;
4675 case 0x3c: /* V9 casa */
4676 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4677 gen_movl_TN_reg(rd, cpu_val);
4678 break;
4679 case 0x3e: /* V9 casxa */
4680 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4681 gen_movl_TN_reg(rd, cpu_val);
4682 break;
4683 #else
4684 case 0x34: /* stc */
4685 case 0x35: /* stcsr */
4686 case 0x36: /* stdcq */
4687 case 0x37: /* stdc */
4688 goto ncp_insn;
4689 #endif
4690 default:
4691 goto illegal_insn;
4694 else
4695 goto illegal_insn;
4697 break;
4699 /* default case for non jump instructions */
4700 if (dc->npc == DYNAMIC_PC) {
4701 dc->pc = DYNAMIC_PC;
4702 gen_op_next_insn();
4703 } else if (dc->npc == JUMP_PC) {
4704 /* we can do a static jump */
4705 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4706 dc->is_br = 1;
4707 } else {
4708 dc->pc = dc->npc;
4709 dc->npc = dc->npc + 4;
4711 jmp_insn:
4712 return;
4713 illegal_insn:
4715 TCGv_i32 r_const;
4717 save_state(dc, cpu_cond);
4718 r_const = tcg_const_i32(TT_ILL_INSN);
4719 gen_helper_raise_exception(r_const);
4720 tcg_temp_free_i32(r_const);
4721 dc->is_br = 1;
4723 return;
4724 unimp_flush:
4726 TCGv_i32 r_const;
4728 save_state(dc, cpu_cond);
4729 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4730 gen_helper_raise_exception(r_const);
4731 tcg_temp_free_i32(r_const);
4732 dc->is_br = 1;
4734 return;
4735 #if !defined(CONFIG_USER_ONLY)
4736 priv_insn:
4738 TCGv_i32 r_const;
4740 save_state(dc, cpu_cond);
4741 r_const = tcg_const_i32(TT_PRIV_INSN);
4742 gen_helper_raise_exception(r_const);
4743 tcg_temp_free_i32(r_const);
4744 dc->is_br = 1;
4746 return;
4747 #endif
4748 nfpu_insn:
4749 save_state(dc, cpu_cond);
4750 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4751 dc->is_br = 1;
4752 return;
4753 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4754 nfq_insn:
4755 save_state(dc, cpu_cond);
4756 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4757 dc->is_br = 1;
4758 return;
4759 #endif
4760 #ifndef TARGET_SPARC64
4761 ncp_insn:
4763 TCGv r_const;
4765 save_state(dc, cpu_cond);
4766 r_const = tcg_const_i32(TT_NCP_INSN);
4767 gen_helper_raise_exception(r_const);
4768 tcg_temp_free(r_const);
4769 dc->is_br = 1;
4771 return;
4772 #endif
4775 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4776 int spc, CPUSPARCState *env)
4778 target_ulong pc_start, last_pc;
4779 uint16_t *gen_opc_end;
4780 DisasContext dc1, *dc = &dc1;
4781 CPUBreakpoint *bp;
4782 int j, lj = -1;
4783 int num_insns;
4784 int max_insns;
4786 memset(dc, 0, sizeof(DisasContext));
4787 dc->tb = tb;
4788 pc_start = tb->pc;
4789 dc->pc = pc_start;
4790 last_pc = dc->pc;
4791 dc->npc = (target_ulong) tb->cs_base;
4792 dc->mem_idx = cpu_mmu_index(env);
4793 dc->def = env->def;
4794 if ((dc->def->features & CPU_FEATURE_FLOAT))
4795 dc->fpu_enabled = cpu_fpu_enabled(env);
4796 else
4797 dc->fpu_enabled = 0;
4798 #ifdef TARGET_SPARC64
4799 dc->address_mask_32bit = env->pstate & PS_AM;
4800 #endif
4801 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4803 cpu_tmp0 = tcg_temp_new();
4804 cpu_tmp32 = tcg_temp_new_i32();
4805 cpu_tmp64 = tcg_temp_new_i64();
4807 cpu_dst = tcg_temp_local_new();
4809 // loads and stores
4810 cpu_val = tcg_temp_local_new();
4811 cpu_addr = tcg_temp_local_new();
4813 num_insns = 0;
4814 max_insns = tb->cflags & CF_COUNT_MASK;
4815 if (max_insns == 0)
4816 max_insns = CF_COUNT_MASK;
4817 gen_icount_start();
4818 do {
4819 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4820 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4821 if (bp->pc == dc->pc) {
4822 if (dc->pc != pc_start)
4823 save_state(dc, cpu_cond);
4824 gen_helper_debug();
4825 tcg_gen_exit_tb(0);
4826 dc->is_br = 1;
4827 goto exit_gen_loop;
4831 if (spc) {
4832 qemu_log("Search PC...\n");
4833 j = gen_opc_ptr - gen_opc_buf;
4834 if (lj < j) {
4835 lj++;
4836 while (lj < j)
4837 gen_opc_instr_start[lj++] = 0;
4838 gen_opc_pc[lj] = dc->pc;
4839 gen_opc_npc[lj] = dc->npc;
4840 gen_opc_instr_start[lj] = 1;
4841 gen_opc_icount[lj] = num_insns;
4844 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4845 gen_io_start();
4846 last_pc = dc->pc;
4847 disas_sparc_insn(dc);
4848 num_insns++;
4850 if (dc->is_br)
4851 break;
4852 /* if the next PC is different, we abort now */
4853 if (dc->pc != (last_pc + 4))
4854 break;
4855 /* if we reach a page boundary, we stop generation so that the
4856 PC of a TT_TFAULT exception is always in the right page */
4857 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4858 break;
4859 /* if single step mode, we generate only one instruction and
4860 generate an exception */
4861 if (env->singlestep_enabled) {
4862 tcg_gen_movi_tl(cpu_pc, dc->pc);
4863 tcg_gen_exit_tb(0);
4864 break;
4866 } while ((gen_opc_ptr < gen_opc_end) &&
4867 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4868 num_insns < max_insns);
4870 exit_gen_loop:
4871 tcg_temp_free(cpu_addr);
4872 tcg_temp_free(cpu_val);
4873 tcg_temp_free(cpu_dst);
4874 tcg_temp_free_i64(cpu_tmp64);
4875 tcg_temp_free_i32(cpu_tmp32);
4876 tcg_temp_free(cpu_tmp0);
4877 if (tb->cflags & CF_LAST_IO)
4878 gen_io_end();
4879 if (!dc->is_br) {
4880 if (dc->pc != DYNAMIC_PC &&
4881 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4882 /* static PC and NPC: we can use direct chaining */
4883 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4884 } else {
4885 if (dc->pc != DYNAMIC_PC)
4886 tcg_gen_movi_tl(cpu_pc, dc->pc);
4887 save_npc(dc, cpu_cond);
4888 tcg_gen_exit_tb(0);
4891 gen_icount_end(tb, num_insns);
4892 *gen_opc_ptr = INDEX_op_end;
4893 if (spc) {
4894 j = gen_opc_ptr - gen_opc_buf;
4895 lj++;
4896 while (lj <= j)
4897 gen_opc_instr_start[lj++] = 0;
4898 #if 0
4899 log_page_dump();
4900 #endif
4901 gen_opc_jump_pc[0] = dc->jump_pc[0];
4902 gen_opc_jump_pc[1] = dc->jump_pc[1];
4903 } else {
4904 tb->size = last_pc + 4 - pc_start;
4905 tb->icount = num_insns;
4907 #ifdef DEBUG_DISAS
4908 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4909 qemu_log("--------------\n");
4910 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4911 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4912 qemu_log("\n");
4914 #endif
4917 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4919 gen_intermediate_code_internal(tb, 0, env);
4922 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4924 gen_intermediate_code_internal(tb, 1, env);
4927 void gen_intermediate_code_init(CPUSPARCState *env)
4929 unsigned int i;
4930 static int inited;
4931 static const char * const gregnames[8] = {
4932 NULL, // g0 not used
4933 "g1",
4934 "g2",
4935 "g3",
4936 "g4",
4937 "g5",
4938 "g6",
4939 "g7",
4941 static const char * const fregnames[64] = {
4942 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4943 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4944 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4945 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4946 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4947 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4948 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4949 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4952 /* init various static tables */
4953 if (!inited) {
4954 inited = 1;
4956 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4957 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4958 offsetof(CPUState, regwptr),
4959 "regwptr");
4960 #ifdef TARGET_SPARC64
4961 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4962 "xcc");
4963 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4964 "asi");
4965 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4966 "fprs");
4967 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4968 "gsr");
4969 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4970 offsetof(CPUState, tick_cmpr),
4971 "tick_cmpr");
4972 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4973 offsetof(CPUState, stick_cmpr),
4974 "stick_cmpr");
4975 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4976 offsetof(CPUState, hstick_cmpr),
4977 "hstick_cmpr");
4978 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4979 "hintp");
4980 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4981 "htba");
4982 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4983 "hver");
4984 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4985 offsetof(CPUState, ssr), "ssr");
4986 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4987 offsetof(CPUState, version), "ver");
4988 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4989 offsetof(CPUState, softint),
4990 "softint");
4991 #else
4992 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4993 "wim");
4994 #endif
4995 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4996 "cond");
4997 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4998 "cc_src");
4999 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5000 offsetof(CPUState, cc_src2),
5001 "cc_src2");
5002 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5003 "cc_dst");
5004 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5005 "psr");
5006 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5007 "fsr");
5008 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5009 "pc");
5010 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5011 "npc");
5012 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5013 #ifndef CONFIG_USER_ONLY
5014 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5015 "tbr");
5016 #endif
5017 for (i = 1; i < 8; i++)
5018 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5019 offsetof(CPUState, gregs[i]),
5020 gregnames[i]);
5021 for (i = 0; i < TARGET_FPREGS; i++)
5022 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5023 offsetof(CPUState, fpr[i]),
5024 fregnames[i]);
5026 /* register helpers */
5028 #define GEN_HELPER 2
5029 #include "helper.h"
5033 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5034 unsigned long searched_pc, int pc_pos, void *puc)
5036 target_ulong npc;
5037 env->pc = gen_opc_pc[pc_pos];
5038 npc = gen_opc_npc[pc_pos];
5039 if (npc == 1) {
5040 /* dynamic NPC: already stored */
5041 } else if (npc == 2) {
5042 target_ulong t2 = (target_ulong)(unsigned long)puc;
5043 /* jump PC: use T2 and the jump targets of the translation */
5044 if (t2)
5045 env->npc = gen_opc_jump_pc[0];
5046 else
5047 env->npc = gen_opc_jump_pc[1];
5048 } else {
5049 env->npc = npc;