Convert logical operations and umul/smul
[qemu-kvm/fedora.git] / target-sparc / translate.c
blob3b3f55006b0bb7d1667d935d585f73ac5cad1be1
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 uint32_t cc_op; /* current CC operation */
80 struct TranslationBlock *tb;
81 sparc_def_t *def;
82 } DisasContext;
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO) \
86 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO) \
90 GET_FIELD(X, 31 - (TO), 31 - (FROM))
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
106 static int sign_extend(int x, int len)
108 len = 32 - len;
109 return (x << len) >> len;
112 #define IS_IMM (insn & (1<<13))
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
117 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118 offsetof(CPU_DoubleU, l.upper));
119 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
123 static void gen_op_load_fpr_DT1(unsigned int src)
125 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126 offsetof(CPU_DoubleU, l.upper));
127 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128 offsetof(CPU_DoubleU, l.lower));
131 static void gen_op_store_DT0_fpr(unsigned int dst)
133 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134 offsetof(CPU_DoubleU, l.upper));
135 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.lower));
139 static void gen_op_load_fpr_QT0(unsigned int src)
141 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142 offsetof(CPU_QuadU, l.upmost));
143 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144 offsetof(CPU_QuadU, l.upper));
145 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146 offsetof(CPU_QuadU, l.lower));
147 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148 offsetof(CPU_QuadU, l.lowest));
151 static void gen_op_load_fpr_QT1(unsigned int src)
153 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154 offsetof(CPU_QuadU, l.upmost));
155 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156 offsetof(CPU_QuadU, l.upper));
157 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158 offsetof(CPU_QuadU, l.lower));
159 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160 offsetof(CPU_QuadU, l.lowest));
163 static void gen_op_store_QT0_fpr(unsigned int dst)
165 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166 offsetof(CPU_QuadU, l.upmost));
167 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168 offsetof(CPU_QuadU, l.upper));
169 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170 offsetof(CPU_QuadU, l.lower));
171 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172 offsetof(CPU_QuadU, l.lowest));
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
199 #ifdef TARGET_SPARC64
200 if (AM_CHECK(dc))
201 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
207 if (reg == 0)
208 tcg_gen_movi_tl(tn, 0);
209 else if (reg < 8)
210 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211 else {
212 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
218 if (reg == 0)
219 return;
220 else if (reg < 8)
221 tcg_gen_mov_tl(cpu_gregs[reg], tn);
222 else {
223 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228 target_ulong pc, target_ulong npc)
230 TranslationBlock *tb;
232 tb = s->tb;
233 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
235 /* jump to same page: we can use a direct jump */
236 tcg_gen_goto_tb(tb_num);
237 tcg_gen_movi_tl(cpu_pc, pc);
238 tcg_gen_movi_tl(cpu_npc, npc);
239 tcg_gen_exit_tb((long)tb + tb_num);
240 } else {
241 /* jump to another page: currently not optimized */
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb(0);
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
251 tcg_gen_extu_i32_tl(reg, src);
252 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253 tcg_gen_andi_tl(reg, reg, 0x1);
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
258 tcg_gen_extu_i32_tl(reg, src);
259 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260 tcg_gen_andi_tl(reg, reg, 0x1);
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
265 tcg_gen_extu_i32_tl(reg, src);
266 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267 tcg_gen_andi_tl(reg, reg, 0x1);
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
272 tcg_gen_extu_i32_tl(reg, src);
273 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274 tcg_gen_andi_tl(reg, reg, 0x1);
277 static inline void gen_cc_clear_icc(void)
279 tcg_gen_movi_i32(cpu_psr, 0);
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
285 tcg_gen_movi_i32(cpu_xcc, 0);
287 #endif
289 /* old op:
290 if (!T0)
291 env->psr |= PSR_ZERO;
292 if ((int32_t) T0 < 0)
293 env->psr |= PSR_NEG;
295 static inline void gen_cc_NZ_icc(TCGv dst)
297 TCGv r_temp;
298 int l1, l2;
300 l1 = gen_new_label();
301 l2 = gen_new_label();
302 r_temp = tcg_temp_new();
303 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306 gen_set_label(l1);
307 tcg_gen_ext32s_tl(r_temp, dst);
308 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310 gen_set_label(l2);
311 tcg_temp_free(r_temp);
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
317 int l1, l2;
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323 gen_set_label(l1);
324 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326 gen_set_label(l2);
328 #endif
330 /* old op:
331 if (T0 < src1)
332 env->psr |= PSR_CARRY;
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
336 TCGv r_temp1, r_temp2;
337 int l1;
339 l1 = gen_new_label();
340 r_temp1 = tcg_temp_new();
341 r_temp2 = tcg_temp_new();
342 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346 gen_set_label(l1);
347 tcg_temp_free(r_temp1);
348 tcg_temp_free(r_temp2);
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
354 int l1;
356 l1 = gen_new_label();
357 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359 gen_set_label(l1);
361 #endif
363 /* old op:
364 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365 env->psr |= PSR_OVF;
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
369 TCGv r_temp;
371 r_temp = tcg_temp_new();
372 tcg_gen_xor_tl(r_temp, src1, src2);
373 tcg_gen_not_tl(r_temp, r_temp);
374 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379 tcg_temp_free(r_temp);
380 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
386 TCGv r_temp;
388 r_temp = tcg_temp_new();
389 tcg_gen_xor_tl(r_temp, src1, src2);
390 tcg_gen_not_tl(r_temp, r_temp);
391 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396 tcg_temp_free(r_temp);
397 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
399 #endif
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
403 TCGv r_temp;
404 TCGv_i32 r_const;
405 int l1;
407 l1 = gen_new_label();
409 r_temp = tcg_temp_new();
410 tcg_gen_xor_tl(r_temp, src1, src2);
411 tcg_gen_not_tl(r_temp, r_temp);
412 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416 r_const = tcg_const_i32(TT_TOVF);
417 gen_helper_raise_exception(r_const);
418 tcg_temp_free_i32(r_const);
419 gen_set_label(l1);
420 tcg_temp_free(r_temp);
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
425 int l1;
427 l1 = gen_new_label();
428 tcg_gen_or_tl(cpu_tmp0, src1, src2);
429 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
432 gen_set_label(l1);
435 static inline void gen_op_logic_cc(TCGv dst)
437 tcg_gen_mov_tl(cpu_cc_dst, dst);
439 gen_cc_clear_icc();
440 gen_cc_NZ_icc(cpu_cc_dst);
441 #ifdef TARGET_SPARC64
442 gen_cc_clear_xcc();
443 gen_cc_NZ_xcc(cpu_cc_dst);
444 #endif
447 static inline void gen_tag_tv(TCGv src1, TCGv src2)
449 int l1;
450 TCGv_i32 r_const;
452 l1 = gen_new_label();
453 tcg_gen_or_tl(cpu_tmp0, src1, src2);
454 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
455 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
456 r_const = tcg_const_i32(TT_TOVF);
457 gen_helper_raise_exception(r_const);
458 tcg_temp_free_i32(r_const);
459 gen_set_label(l1);
462 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_movi_tl(cpu_cc_src2, src2);
466 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
475 tcg_gen_mov_tl(dst, cpu_cc_dst);
478 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
480 tcg_gen_mov_tl(cpu_cc_src, src1);
481 tcg_gen_movi_tl(cpu_cc_src2, src2);
482 gen_mov_reg_C(cpu_tmp0, cpu_psr);
483 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
484 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
485 tcg_gen_mov_tl(dst, cpu_cc_dst);
488 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
490 tcg_gen_mov_tl(cpu_cc_src, src1);
491 tcg_gen_mov_tl(cpu_cc_src2, src2);
492 gen_mov_reg_C(cpu_tmp0, cpu_psr);
493 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
494 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
495 tcg_gen_mov_tl(dst, cpu_cc_dst);
498 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
500 tcg_gen_mov_tl(cpu_cc_src, src1);
501 tcg_gen_mov_tl(cpu_cc_src2, src2);
502 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
503 gen_cc_clear_icc();
504 gen_cc_NZ_icc(cpu_cc_dst);
505 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
506 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
508 #ifdef TARGET_SPARC64
509 gen_cc_clear_xcc();
510 gen_cc_NZ_xcc(cpu_cc_dst);
511 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
512 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
513 #endif
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
517 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
522 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
523 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
524 gen_cc_clear_icc();
525 gen_cc_NZ_icc(cpu_cc_dst);
526 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
527 #ifdef TARGET_SPARC64
528 gen_cc_clear_xcc();
529 gen_cc_NZ_xcc(cpu_cc_dst);
530 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
531 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
532 #endif
533 tcg_gen_mov_tl(dst, cpu_cc_dst);
536 /* old op:
537 if (src1 < T1)
538 env->psr |= PSR_CARRY;
540 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
542 TCGv r_temp1, r_temp2;
543 int l1;
545 l1 = gen_new_label();
546 r_temp1 = tcg_temp_new();
547 r_temp2 = tcg_temp_new();
548 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
549 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
550 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
551 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
552 gen_set_label(l1);
553 tcg_temp_free(r_temp1);
554 tcg_temp_free(r_temp2);
557 #ifdef TARGET_SPARC64
558 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
560 int l1;
562 l1 = gen_new_label();
563 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
564 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
565 gen_set_label(l1);
567 #endif
569 /* old op:
570 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
571 env->psr |= PSR_OVF;
573 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
575 TCGv r_temp;
577 r_temp = tcg_temp_new();
578 tcg_gen_xor_tl(r_temp, src1, src2);
579 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
580 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
581 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
582 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
583 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
584 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
585 tcg_temp_free(r_temp);
588 #ifdef TARGET_SPARC64
589 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
591 TCGv r_temp;
593 r_temp = tcg_temp_new();
594 tcg_gen_xor_tl(r_temp, src1, src2);
595 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
596 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
597 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
598 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
599 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
600 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
601 tcg_temp_free(r_temp);
603 #endif
605 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
607 TCGv r_temp;
608 TCGv_i32 r_const;
609 int l1;
611 l1 = gen_new_label();
613 r_temp = tcg_temp_new();
614 tcg_gen_xor_tl(r_temp, src1, src2);
615 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
616 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
617 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
618 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
619 r_const = tcg_const_i32(TT_TOVF);
620 gen_helper_raise_exception(r_const);
621 tcg_temp_free_i32(r_const);
622 gen_set_label(l1);
623 tcg_temp_free(r_temp);
626 static inline void gen_op_sub_cc2(TCGv dst)
628 gen_cc_clear_icc();
629 gen_cc_NZ_icc(cpu_cc_dst);
630 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
631 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
632 #ifdef TARGET_SPARC64
633 gen_cc_clear_xcc();
634 gen_cc_NZ_xcc(cpu_cc_dst);
635 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
636 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
637 #endif
638 tcg_gen_mov_tl(dst, cpu_cc_dst);
641 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
643 tcg_gen_mov_tl(cpu_cc_src, src1);
644 tcg_gen_movi_tl(cpu_cc_src2, src2);
645 if (src2 == 0) {
646 tcg_gen_mov_tl(dst, src1);
647 gen_op_logic_cc(dst);
648 } else {
649 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
650 gen_op_sub_cc2(dst);
654 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
656 tcg_gen_mov_tl(cpu_cc_src, src1);
657 tcg_gen_mov_tl(cpu_cc_src2, src2);
658 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_op_sub_cc2(dst);
662 static inline void gen_op_subx_cc2(TCGv dst)
664 gen_cc_NZ_icc(cpu_cc_dst);
665 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
666 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 #ifdef TARGET_SPARC64
668 gen_cc_NZ_xcc(cpu_cc_dst);
669 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
670 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
671 #endif
672 tcg_gen_mov_tl(dst, cpu_cc_dst);
675 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
677 tcg_gen_mov_tl(cpu_cc_src, src1);
678 tcg_gen_movi_tl(cpu_cc_src2, src2);
679 gen_mov_reg_C(cpu_tmp0, cpu_psr);
680 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
681 gen_cc_clear_icc();
682 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
683 #ifdef TARGET_SPARC64
684 gen_cc_clear_xcc();
685 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
686 #endif
687 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
688 gen_op_subx_cc2(dst);
691 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
693 tcg_gen_mov_tl(cpu_cc_src, src1);
694 tcg_gen_mov_tl(cpu_cc_src2, src2);
695 gen_mov_reg_C(cpu_tmp0, cpu_psr);
696 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
697 gen_cc_clear_icc();
698 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
699 #ifdef TARGET_SPARC64
700 gen_cc_clear_xcc();
701 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
702 #endif
703 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
704 gen_op_subx_cc2(dst);
707 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
709 tcg_gen_mov_tl(cpu_cc_src, src1);
710 tcg_gen_mov_tl(cpu_cc_src2, src2);
711 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
712 gen_cc_clear_icc();
713 gen_cc_NZ_icc(cpu_cc_dst);
714 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
715 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
716 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
717 #ifdef TARGET_SPARC64
718 gen_cc_clear_xcc();
719 gen_cc_NZ_xcc(cpu_cc_dst);
720 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
721 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
722 #endif
723 tcg_gen_mov_tl(dst, cpu_cc_dst);
726 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
728 tcg_gen_mov_tl(cpu_cc_src, src1);
729 tcg_gen_mov_tl(cpu_cc_src2, src2);
730 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
731 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
732 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
733 gen_cc_clear_icc();
734 gen_cc_NZ_icc(cpu_cc_dst);
735 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
736 #ifdef TARGET_SPARC64
737 gen_cc_clear_xcc();
738 gen_cc_NZ_xcc(cpu_cc_dst);
739 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
740 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
741 #endif
742 tcg_gen_mov_tl(dst, cpu_cc_dst);
745 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
747 TCGv r_temp;
748 int l1;
750 l1 = gen_new_label();
751 r_temp = tcg_temp_new();
753 /* old op:
754 if (!(env->y & 1))
755 T1 = 0;
757 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
758 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
759 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
760 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
761 tcg_gen_movi_tl(cpu_cc_src2, 0);
762 gen_set_label(l1);
764 // b2 = T0 & 1;
765 // env->y = (b2 << 31) | (env->y >> 1);
766 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
767 tcg_gen_shli_tl(r_temp, r_temp, 31);
768 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
769 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
770 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
771 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
773 // b1 = N ^ V;
774 gen_mov_reg_N(cpu_tmp0, cpu_psr);
775 gen_mov_reg_V(r_temp, cpu_psr);
776 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
777 tcg_temp_free(r_temp);
779 // T0 = (b1 << 31) | (T0 >> 1);
780 // src1 = T0;
781 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
782 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
783 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
785 /* do addition and update flags */
786 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
788 gen_cc_clear_icc();
789 gen_cc_NZ_icc(cpu_cc_dst);
790 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
791 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
792 tcg_gen_mov_tl(dst, cpu_cc_dst);
795 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
797 TCGv_i64 r_temp, r_temp2;
799 r_temp = tcg_temp_new_i64();
800 r_temp2 = tcg_temp_new_i64();
802 tcg_gen_extu_tl_i64(r_temp, src2);
803 tcg_gen_extu_tl_i64(r_temp2, src1);
804 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
806 tcg_gen_shri_i64(r_temp, r_temp2, 32);
807 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
808 tcg_temp_free_i64(r_temp);
809 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
810 #ifdef TARGET_SPARC64
811 tcg_gen_mov_i64(dst, r_temp2);
812 #else
813 tcg_gen_trunc_i64_tl(dst, r_temp2);
814 #endif
815 tcg_temp_free_i64(r_temp2);
818 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
820 TCGv_i64 r_temp, r_temp2;
822 r_temp = tcg_temp_new_i64();
823 r_temp2 = tcg_temp_new_i64();
825 tcg_gen_ext_tl_i64(r_temp, src2);
826 tcg_gen_ext_tl_i64(r_temp2, src1);
827 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
829 tcg_gen_shri_i64(r_temp, r_temp2, 32);
830 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
831 tcg_temp_free_i64(r_temp);
832 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
833 #ifdef TARGET_SPARC64
834 tcg_gen_mov_i64(dst, r_temp2);
835 #else
836 tcg_gen_trunc_i64_tl(dst, r_temp2);
837 #endif
838 tcg_temp_free_i64(r_temp2);
841 #ifdef TARGET_SPARC64
842 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
844 TCGv_i32 r_const;
845 int l1;
847 l1 = gen_new_label();
848 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
849 r_const = tcg_const_i32(TT_DIV_ZERO);
850 gen_helper_raise_exception(r_const);
851 tcg_temp_free_i32(r_const);
852 gen_set_label(l1);
855 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
857 int l1, l2;
859 l1 = gen_new_label();
860 l2 = gen_new_label();
861 tcg_gen_mov_tl(cpu_cc_src, src1);
862 tcg_gen_mov_tl(cpu_cc_src2, src2);
863 gen_trap_ifdivzero_tl(cpu_cc_src2);
864 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
865 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
866 tcg_gen_movi_i64(dst, INT64_MIN);
867 tcg_gen_br(l2);
868 gen_set_label(l1);
869 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
870 gen_set_label(l2);
872 #endif
874 static inline void gen_op_div_cc(TCGv dst)
876 int l1;
878 tcg_gen_mov_tl(cpu_cc_dst, dst);
879 gen_cc_clear_icc();
880 gen_cc_NZ_icc(cpu_cc_dst);
881 l1 = gen_new_label();
882 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
883 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
884 gen_set_label(l1);
887 // 1
888 static inline void gen_op_eval_ba(TCGv dst)
890 tcg_gen_movi_tl(dst, 1);
893 // Z
894 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
896 gen_mov_reg_Z(dst, src);
899 // Z | (N ^ V)
900 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
902 gen_mov_reg_N(cpu_tmp0, src);
903 gen_mov_reg_V(dst, src);
904 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
905 gen_mov_reg_Z(cpu_tmp0, src);
906 tcg_gen_or_tl(dst, dst, cpu_tmp0);
909 // N ^ V
910 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
912 gen_mov_reg_V(cpu_tmp0, src);
913 gen_mov_reg_N(dst, src);
914 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
917 // C | Z
918 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
920 gen_mov_reg_Z(cpu_tmp0, src);
921 gen_mov_reg_C(dst, src);
922 tcg_gen_or_tl(dst, dst, cpu_tmp0);
925 // C
926 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
928 gen_mov_reg_C(dst, src);
931 // V
932 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
934 gen_mov_reg_V(dst, src);
937 // 0
938 static inline void gen_op_eval_bn(TCGv dst)
940 tcg_gen_movi_tl(dst, 0);
943 // N
944 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
946 gen_mov_reg_N(dst, src);
949 // !Z
950 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
952 gen_mov_reg_Z(dst, src);
953 tcg_gen_xori_tl(dst, dst, 0x1);
956 // !(Z | (N ^ V))
957 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
959 gen_mov_reg_N(cpu_tmp0, src);
960 gen_mov_reg_V(dst, src);
961 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
962 gen_mov_reg_Z(cpu_tmp0, src);
963 tcg_gen_or_tl(dst, dst, cpu_tmp0);
964 tcg_gen_xori_tl(dst, dst, 0x1);
967 // !(N ^ V)
968 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
970 gen_mov_reg_V(cpu_tmp0, src);
971 gen_mov_reg_N(dst, src);
972 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
973 tcg_gen_xori_tl(dst, dst, 0x1);
976 // !(C | Z)
977 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
979 gen_mov_reg_Z(cpu_tmp0, src);
980 gen_mov_reg_C(dst, src);
981 tcg_gen_or_tl(dst, dst, cpu_tmp0);
982 tcg_gen_xori_tl(dst, dst, 0x1);
985 // !C
986 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
988 gen_mov_reg_C(dst, src);
989 tcg_gen_xori_tl(dst, dst, 0x1);
992 // !N
993 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
995 gen_mov_reg_N(dst, src);
996 tcg_gen_xori_tl(dst, dst, 0x1);
999 // !V
1000 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1002 gen_mov_reg_V(dst, src);
1003 tcg_gen_xori_tl(dst, dst, 0x1);
1007 FPSR bit field FCC1 | FCC0:
1011 3 unordered
1013 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1014 unsigned int fcc_offset)
1016 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1017 tcg_gen_andi_tl(reg, reg, 0x1);
1020 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1021 unsigned int fcc_offset)
1023 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1024 tcg_gen_andi_tl(reg, reg, 0x1);
1027 // !0: FCC0 | FCC1
1028 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1029 unsigned int fcc_offset)
1031 gen_mov_reg_FCC0(dst, src, fcc_offset);
1032 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1033 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1036 // 1 or 2: FCC0 ^ FCC1
1037 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1038 unsigned int fcc_offset)
1040 gen_mov_reg_FCC0(dst, src, fcc_offset);
1041 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1042 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1045 // 1 or 3: FCC0
1046 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1047 unsigned int fcc_offset)
1049 gen_mov_reg_FCC0(dst, src, fcc_offset);
1052 // 1: FCC0 & !FCC1
1053 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1054 unsigned int fcc_offset)
1056 gen_mov_reg_FCC0(dst, src, fcc_offset);
1057 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1058 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1059 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1062 // 2 or 3: FCC1
1063 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1064 unsigned int fcc_offset)
1066 gen_mov_reg_FCC1(dst, src, fcc_offset);
1069 // 2: !FCC0 & FCC1
1070 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1071 unsigned int fcc_offset)
1073 gen_mov_reg_FCC0(dst, src, fcc_offset);
1074 tcg_gen_xori_tl(dst, dst, 0x1);
1075 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1076 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1079 // 3: FCC0 & FCC1
1080 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1081 unsigned int fcc_offset)
1083 gen_mov_reg_FCC0(dst, src, fcc_offset);
1084 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1085 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1088 // 0: !(FCC0 | FCC1)
1089 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1090 unsigned int fcc_offset)
1092 gen_mov_reg_FCC0(dst, src, fcc_offset);
1093 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1094 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1095 tcg_gen_xori_tl(dst, dst, 0x1);
1098 // 0 or 3: !(FCC0 ^ FCC1)
1099 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1100 unsigned int fcc_offset)
1102 gen_mov_reg_FCC0(dst, src, fcc_offset);
1103 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1104 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1105 tcg_gen_xori_tl(dst, dst, 0x1);
1108 // 0 or 2: !FCC0
1109 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1110 unsigned int fcc_offset)
1112 gen_mov_reg_FCC0(dst, src, fcc_offset);
1113 tcg_gen_xori_tl(dst, dst, 0x1);
1116 // !1: !(FCC0 & !FCC1)
1117 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1118 unsigned int fcc_offset)
1120 gen_mov_reg_FCC0(dst, src, fcc_offset);
1121 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1122 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1123 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1124 tcg_gen_xori_tl(dst, dst, 0x1);
1127 // 0 or 1: !FCC1
1128 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1129 unsigned int fcc_offset)
1131 gen_mov_reg_FCC1(dst, src, fcc_offset);
1132 tcg_gen_xori_tl(dst, dst, 0x1);
1135 // !2: !(!FCC0 & FCC1)
1136 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1137 unsigned int fcc_offset)
1139 gen_mov_reg_FCC0(dst, src, fcc_offset);
1140 tcg_gen_xori_tl(dst, dst, 0x1);
1141 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1142 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1143 tcg_gen_xori_tl(dst, dst, 0x1);
1146 // !3: !(FCC0 & FCC1)
1147 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1148 unsigned int fcc_offset)
1150 gen_mov_reg_FCC0(dst, src, fcc_offset);
1151 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1152 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1153 tcg_gen_xori_tl(dst, dst, 0x1);
1156 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1157 target_ulong pc2, TCGv r_cond)
1159 int l1;
1161 l1 = gen_new_label();
1163 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1165 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1167 gen_set_label(l1);
1168 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1171 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1172 target_ulong pc2, TCGv r_cond)
1174 int l1;
1176 l1 = gen_new_label();
1178 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1180 gen_goto_tb(dc, 0, pc2, pc1);
1182 gen_set_label(l1);
1183 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1186 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1187 TCGv r_cond)
1189 int l1, l2;
1191 l1 = gen_new_label();
1192 l2 = gen_new_label();
1194 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1196 tcg_gen_movi_tl(cpu_npc, npc1);
1197 tcg_gen_br(l2);
1199 gen_set_label(l1);
1200 tcg_gen_movi_tl(cpu_npc, npc2);
1201 gen_set_label(l2);
1204 /* call this function before using the condition register as it may
1205 have been set for a jump */
1206 static inline void flush_cond(DisasContext *dc, TCGv cond)
1208 if (dc->npc == JUMP_PC) {
1209 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1210 dc->npc = DYNAMIC_PC;
1214 static inline void save_npc(DisasContext *dc, TCGv cond)
1216 if (dc->npc == JUMP_PC) {
1217 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1218 dc->npc = DYNAMIC_PC;
1219 } else if (dc->npc != DYNAMIC_PC) {
1220 tcg_gen_movi_tl(cpu_npc, dc->npc);
1224 static inline void save_state(DisasContext *dc, TCGv cond)
1226 tcg_gen_movi_tl(cpu_pc, dc->pc);
1227 save_npc(dc, cond);
1230 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1232 if (dc->npc == JUMP_PC) {
1233 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1234 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1235 dc->pc = DYNAMIC_PC;
1236 } else if (dc->npc == DYNAMIC_PC) {
1237 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1238 dc->pc = DYNAMIC_PC;
1239 } else {
1240 dc->pc = dc->npc;
1244 static inline void gen_op_next_insn(void)
1246 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1247 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1250 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1251 DisasContext *dc)
1253 TCGv_i32 r_src;
1255 #ifdef TARGET_SPARC64
1256 if (cc)
1257 r_src = cpu_xcc;
1258 else
1259 r_src = cpu_psr;
1260 #else
1261 r_src = cpu_psr;
1262 #endif
1263 switch (dc->cc_op) {
1264 case CC_OP_FLAGS:
1265 break;
1266 default:
1267 gen_helper_compute_psr();
1268 dc->cc_op = CC_OP_FLAGS;
1269 break;
1271 switch (cond) {
1272 case 0x0:
1273 gen_op_eval_bn(r_dst);
1274 break;
1275 case 0x1:
1276 gen_op_eval_be(r_dst, r_src);
1277 break;
1278 case 0x2:
1279 gen_op_eval_ble(r_dst, r_src);
1280 break;
1281 case 0x3:
1282 gen_op_eval_bl(r_dst, r_src);
1283 break;
1284 case 0x4:
1285 gen_op_eval_bleu(r_dst, r_src);
1286 break;
1287 case 0x5:
1288 gen_op_eval_bcs(r_dst, r_src);
1289 break;
1290 case 0x6:
1291 gen_op_eval_bneg(r_dst, r_src);
1292 break;
1293 case 0x7:
1294 gen_op_eval_bvs(r_dst, r_src);
1295 break;
1296 case 0x8:
1297 gen_op_eval_ba(r_dst);
1298 break;
1299 case 0x9:
1300 gen_op_eval_bne(r_dst, r_src);
1301 break;
1302 case 0xa:
1303 gen_op_eval_bg(r_dst, r_src);
1304 break;
1305 case 0xb:
1306 gen_op_eval_bge(r_dst, r_src);
1307 break;
1308 case 0xc:
1309 gen_op_eval_bgu(r_dst, r_src);
1310 break;
1311 case 0xd:
1312 gen_op_eval_bcc(r_dst, r_src);
1313 break;
1314 case 0xe:
1315 gen_op_eval_bpos(r_dst, r_src);
1316 break;
1317 case 0xf:
1318 gen_op_eval_bvc(r_dst, r_src);
1319 break;
1323 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1325 unsigned int offset;
1327 switch (cc) {
1328 default:
1329 case 0x0:
1330 offset = 0;
1331 break;
1332 case 0x1:
1333 offset = 32 - 10;
1334 break;
1335 case 0x2:
1336 offset = 34 - 10;
1337 break;
1338 case 0x3:
1339 offset = 36 - 10;
1340 break;
1343 switch (cond) {
1344 case 0x0:
1345 gen_op_eval_bn(r_dst);
1346 break;
1347 case 0x1:
1348 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1349 break;
1350 case 0x2:
1351 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1352 break;
1353 case 0x3:
1354 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1355 break;
1356 case 0x4:
1357 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1358 break;
1359 case 0x5:
1360 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1361 break;
1362 case 0x6:
1363 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1364 break;
1365 case 0x7:
1366 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1367 break;
1368 case 0x8:
1369 gen_op_eval_ba(r_dst);
1370 break;
1371 case 0x9:
1372 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1373 break;
1374 case 0xa:
1375 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1376 break;
1377 case 0xb:
1378 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1379 break;
1380 case 0xc:
1381 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1382 break;
1383 case 0xd:
1384 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1385 break;
1386 case 0xe:
1387 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1388 break;
1389 case 0xf:
1390 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1391 break;
1395 #ifdef TARGET_SPARC64
1396 // Inverted logic
1397 static const int gen_tcg_cond_reg[8] = {
1399 TCG_COND_NE,
1400 TCG_COND_GT,
1401 TCG_COND_GE,
1403 TCG_COND_EQ,
1404 TCG_COND_LE,
1405 TCG_COND_LT,
1408 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1410 int l1;
1412 l1 = gen_new_label();
1413 tcg_gen_movi_tl(r_dst, 0);
1414 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1415 tcg_gen_movi_tl(r_dst, 1);
1416 gen_set_label(l1);
1418 #endif
1420 /* XXX: potentially incorrect if dynamic npc */
1421 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1422 TCGv r_cond)
1424 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1425 target_ulong target = dc->pc + offset;
1427 if (cond == 0x0) {
1428 /* unconditional not taken */
1429 if (a) {
1430 dc->pc = dc->npc + 4;
1431 dc->npc = dc->pc + 4;
1432 } else {
1433 dc->pc = dc->npc;
1434 dc->npc = dc->pc + 4;
1436 } else if (cond == 0x8) {
1437 /* unconditional taken */
1438 if (a) {
1439 dc->pc = target;
1440 dc->npc = dc->pc + 4;
1441 } else {
1442 dc->pc = dc->npc;
1443 dc->npc = target;
1445 } else {
1446 flush_cond(dc, r_cond);
1447 gen_cond(r_cond, cc, cond, dc);
1448 if (a) {
1449 gen_branch_a(dc, target, dc->npc, r_cond);
1450 dc->is_br = 1;
1451 } else {
1452 dc->pc = dc->npc;
1453 dc->jump_pc[0] = target;
1454 dc->jump_pc[1] = dc->npc + 4;
1455 dc->npc = JUMP_PC;
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1462 TCGv r_cond)
1464 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1467 if (cond == 0x0) {
1468 /* unconditional not taken */
1469 if (a) {
1470 dc->pc = dc->npc + 4;
1471 dc->npc = dc->pc + 4;
1472 } else {
1473 dc->pc = dc->npc;
1474 dc->npc = dc->pc + 4;
1476 } else if (cond == 0x8) {
1477 /* unconditional taken */
1478 if (a) {
1479 dc->pc = target;
1480 dc->npc = dc->pc + 4;
1481 } else {
1482 dc->pc = dc->npc;
1483 dc->npc = target;
1485 } else {
1486 flush_cond(dc, r_cond);
1487 gen_fcond(r_cond, cc, cond);
1488 if (a) {
1489 gen_branch_a(dc, target, dc->npc, r_cond);
1490 dc->is_br = 1;
1491 } else {
1492 dc->pc = dc->npc;
1493 dc->jump_pc[0] = target;
1494 dc->jump_pc[1] = dc->npc + 4;
1495 dc->npc = JUMP_PC;
1500 #ifdef TARGET_SPARC64
1501 /* XXX: potentially incorrect if dynamic npc */
1502 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1503 TCGv r_cond, TCGv r_reg)
1505 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1506 target_ulong target = dc->pc + offset;
1508 flush_cond(dc, r_cond);
1509 gen_cond_reg(r_cond, cond, r_reg);
1510 if (a) {
1511 gen_branch_a(dc, target, dc->npc, r_cond);
1512 dc->is_br = 1;
1513 } else {
1514 dc->pc = dc->npc;
1515 dc->jump_pc[0] = target;
1516 dc->jump_pc[1] = dc->npc + 4;
1517 dc->npc = JUMP_PC;
1521 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1523 switch (fccno) {
1524 case 0:
1525 gen_helper_fcmps(r_rs1, r_rs2);
1526 break;
1527 case 1:
1528 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1529 break;
1530 case 2:
1531 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1532 break;
1533 case 3:
1534 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1535 break;
1539 static inline void gen_op_fcmpd(int fccno)
1541 switch (fccno) {
1542 case 0:
1543 gen_helper_fcmpd();
1544 break;
1545 case 1:
1546 gen_helper_fcmpd_fcc1();
1547 break;
1548 case 2:
1549 gen_helper_fcmpd_fcc2();
1550 break;
1551 case 3:
1552 gen_helper_fcmpd_fcc3();
1553 break;
1557 static inline void gen_op_fcmpq(int fccno)
1559 switch (fccno) {
1560 case 0:
1561 gen_helper_fcmpq();
1562 break;
1563 case 1:
1564 gen_helper_fcmpq_fcc1();
1565 break;
1566 case 2:
1567 gen_helper_fcmpq_fcc2();
1568 break;
1569 case 3:
1570 gen_helper_fcmpq_fcc3();
1571 break;
1575 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1577 switch (fccno) {
1578 case 0:
1579 gen_helper_fcmpes(r_rs1, r_rs2);
1580 break;
1581 case 1:
1582 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1583 break;
1584 case 2:
1585 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1586 break;
1587 case 3:
1588 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1589 break;
1593 static inline void gen_op_fcmped(int fccno)
1595 switch (fccno) {
1596 case 0:
1597 gen_helper_fcmped();
1598 break;
1599 case 1:
1600 gen_helper_fcmped_fcc1();
1601 break;
1602 case 2:
1603 gen_helper_fcmped_fcc2();
1604 break;
1605 case 3:
1606 gen_helper_fcmped_fcc3();
1607 break;
1611 static inline void gen_op_fcmpeq(int fccno)
1613 switch (fccno) {
1614 case 0:
1615 gen_helper_fcmpeq();
1616 break;
1617 case 1:
1618 gen_helper_fcmpeq_fcc1();
1619 break;
1620 case 2:
1621 gen_helper_fcmpeq_fcc2();
1622 break;
1623 case 3:
1624 gen_helper_fcmpeq_fcc3();
1625 break;
1629 #else
1631 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1633 gen_helper_fcmps(r_rs1, r_rs2);
1636 static inline void gen_op_fcmpd(int fccno)
1638 gen_helper_fcmpd();
1641 static inline void gen_op_fcmpq(int fccno)
1643 gen_helper_fcmpq();
1646 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1648 gen_helper_fcmpes(r_rs1, r_rs2);
1651 static inline void gen_op_fcmped(int fccno)
1653 gen_helper_fcmped();
1656 static inline void gen_op_fcmpeq(int fccno)
1658 gen_helper_fcmpeq();
1660 #endif
1662 static inline void gen_op_fpexception_im(int fsr_flags)
1664 TCGv_i32 r_const;
1666 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1667 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1668 r_const = tcg_const_i32(TT_FP_EXCP);
1669 gen_helper_raise_exception(r_const);
1670 tcg_temp_free_i32(r_const);
1673 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1675 #if !defined(CONFIG_USER_ONLY)
1676 if (!dc->fpu_enabled) {
1677 TCGv_i32 r_const;
1679 save_state(dc, r_cond);
1680 r_const = tcg_const_i32(TT_NFPU_INSN);
1681 gen_helper_raise_exception(r_const);
1682 tcg_temp_free_i32(r_const);
1683 dc->is_br = 1;
1684 return 1;
1686 #endif
1687 return 0;
1690 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1692 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1695 static inline void gen_clear_float_exceptions(void)
1697 gen_helper_clear_float_exceptions();
1700 /* asi moves */
1701 #ifdef TARGET_SPARC64
1702 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1704 int asi;
1705 TCGv_i32 r_asi;
1707 if (IS_IMM) {
1708 r_asi = tcg_temp_new_i32();
1709 tcg_gen_mov_i32(r_asi, cpu_asi);
1710 } else {
1711 asi = GET_FIELD(insn, 19, 26);
1712 r_asi = tcg_const_i32(asi);
1714 return r_asi;
1717 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1718 int sign)
1720 TCGv_i32 r_asi, r_size, r_sign;
1722 r_asi = gen_get_asi(insn, addr);
1723 r_size = tcg_const_i32(size);
1724 r_sign = tcg_const_i32(sign);
1725 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1726 tcg_temp_free_i32(r_sign);
1727 tcg_temp_free_i32(r_size);
1728 tcg_temp_free_i32(r_asi);
1731 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1733 TCGv_i32 r_asi, r_size;
1735 r_asi = gen_get_asi(insn, addr);
1736 r_size = tcg_const_i32(size);
1737 gen_helper_st_asi(addr, src, r_asi, r_size);
1738 tcg_temp_free_i32(r_size);
1739 tcg_temp_free_i32(r_asi);
1742 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1744 TCGv_i32 r_asi, r_size, r_rd;
1746 r_asi = gen_get_asi(insn, addr);
1747 r_size = tcg_const_i32(size);
1748 r_rd = tcg_const_i32(rd);
1749 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1750 tcg_temp_free_i32(r_rd);
1751 tcg_temp_free_i32(r_size);
1752 tcg_temp_free_i32(r_asi);
1755 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1757 TCGv_i32 r_asi, r_size, r_rd;
1759 r_asi = gen_get_asi(insn, addr);
1760 r_size = tcg_const_i32(size);
1761 r_rd = tcg_const_i32(rd);
1762 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1763 tcg_temp_free_i32(r_rd);
1764 tcg_temp_free_i32(r_size);
1765 tcg_temp_free_i32(r_asi);
1768 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1770 TCGv_i32 r_asi, r_size, r_sign;
1772 r_asi = gen_get_asi(insn, addr);
1773 r_size = tcg_const_i32(4);
1774 r_sign = tcg_const_i32(0);
1775 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1776 tcg_temp_free_i32(r_sign);
1777 gen_helper_st_asi(addr, dst, r_asi, r_size);
1778 tcg_temp_free_i32(r_size);
1779 tcg_temp_free_i32(r_asi);
1780 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1783 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1785 TCGv_i32 r_asi, r_rd;
1787 r_asi = gen_get_asi(insn, addr);
1788 r_rd = tcg_const_i32(rd);
1789 gen_helper_ldda_asi(addr, r_asi, r_rd);
1790 tcg_temp_free_i32(r_rd);
1791 tcg_temp_free_i32(r_asi);
1794 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1796 TCGv_i32 r_asi, r_size;
1798 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1799 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1800 r_asi = gen_get_asi(insn, addr);
1801 r_size = tcg_const_i32(8);
1802 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1803 tcg_temp_free_i32(r_size);
1804 tcg_temp_free_i32(r_asi);
1807 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1808 int rd)
1810 TCGv r_val1;
1811 TCGv_i32 r_asi;
1813 r_val1 = tcg_temp_new();
1814 gen_movl_reg_TN(rd, r_val1);
1815 r_asi = gen_get_asi(insn, addr);
1816 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1817 tcg_temp_free_i32(r_asi);
1818 tcg_temp_free(r_val1);
1821 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1822 int rd)
1824 TCGv_i32 r_asi;
1826 gen_movl_reg_TN(rd, cpu_tmp64);
1827 r_asi = gen_get_asi(insn, addr);
1828 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1829 tcg_temp_free_i32(r_asi);
1832 #elif !defined(CONFIG_USER_ONLY)
1834 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1835 int sign)
1837 TCGv_i32 r_asi, r_size, r_sign;
1839 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1840 r_size = tcg_const_i32(size);
1841 r_sign = tcg_const_i32(sign);
1842 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1843 tcg_temp_free(r_sign);
1844 tcg_temp_free(r_size);
1845 tcg_temp_free(r_asi);
1846 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1849 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1851 TCGv_i32 r_asi, r_size;
1853 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1854 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1855 r_size = tcg_const_i32(size);
1856 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1857 tcg_temp_free(r_size);
1858 tcg_temp_free(r_asi);
1861 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1863 TCGv_i32 r_asi, r_size, r_sign;
1864 TCGv_i64 r_val;
1866 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1867 r_size = tcg_const_i32(4);
1868 r_sign = tcg_const_i32(0);
1869 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1870 tcg_temp_free(r_sign);
1871 r_val = tcg_temp_new_i64();
1872 tcg_gen_extu_tl_i64(r_val, dst);
1873 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1874 tcg_temp_free_i64(r_val);
1875 tcg_temp_free(r_size);
1876 tcg_temp_free(r_asi);
1877 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1880 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1882 TCGv_i32 r_asi, r_size, r_sign;
1884 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1885 r_size = tcg_const_i32(8);
1886 r_sign = tcg_const_i32(0);
1887 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1888 tcg_temp_free(r_sign);
1889 tcg_temp_free(r_size);
1890 tcg_temp_free(r_asi);
1891 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1892 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1893 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1894 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1895 gen_movl_TN_reg(rd, hi);
1898 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1900 TCGv_i32 r_asi, r_size;
1902 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1903 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1904 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1905 r_size = tcg_const_i32(8);
1906 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1907 tcg_temp_free(r_size);
1908 tcg_temp_free(r_asi);
1910 #endif
1912 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1913 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1915 TCGv_i64 r_val;
1916 TCGv_i32 r_asi, r_size;
1918 gen_ld_asi(dst, addr, insn, 1, 0);
1920 r_val = tcg_const_i64(0xffULL);
1921 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1922 r_size = tcg_const_i32(1);
1923 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1924 tcg_temp_free_i32(r_size);
1925 tcg_temp_free_i32(r_asi);
1926 tcg_temp_free_i64(r_val);
1928 #endif
1930 static inline TCGv get_src1(unsigned int insn, TCGv def)
1932 TCGv r_rs1 = def;
1933 unsigned int rs1;
1935 rs1 = GET_FIELD(insn, 13, 17);
1936 if (rs1 == 0)
1937 r_rs1 = tcg_const_tl(0); // XXX how to free?
1938 else if (rs1 < 8)
1939 r_rs1 = cpu_gregs[rs1];
1940 else
1941 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1942 return r_rs1;
1945 static inline TCGv get_src2(unsigned int insn, TCGv def)
1947 TCGv r_rs2 = def;
1949 if (IS_IMM) { /* immediate */
1950 target_long simm;
1952 simm = GET_FIELDs(insn, 19, 31);
1953 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1954 } else { /* register */
1955 unsigned int rs2;
1957 rs2 = GET_FIELD(insn, 27, 31);
1958 if (rs2 == 0)
1959 r_rs2 = tcg_const_tl(0); // XXX how to free?
1960 else if (rs2 < 8)
1961 r_rs2 = cpu_gregs[rs2];
1962 else
1963 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1965 return r_rs2;
1968 #define CHECK_IU_FEATURE(dc, FEATURE) \
1969 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1970 goto illegal_insn;
1971 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1972 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1973 goto nfpu_insn;
1975 /* before an instruction, dc->pc must be static */
1976 static void disas_sparc_insn(DisasContext * dc)
1978 unsigned int insn, opc, rs1, rs2, rd;
1979 target_long simm;
1981 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1982 tcg_gen_debug_insn_start(dc->pc);
1983 insn = ldl_code(dc->pc);
1984 opc = GET_FIELD(insn, 0, 1);
1986 rd = GET_FIELD(insn, 2, 6);
1988 cpu_src1 = tcg_temp_new(); // const
1989 cpu_src2 = tcg_temp_new(); // const
1991 switch (opc) {
1992 case 0: /* branches/sethi */
1994 unsigned int xop = GET_FIELD(insn, 7, 9);
1995 int32_t target;
1996 switch (xop) {
1997 #ifdef TARGET_SPARC64
1998 case 0x1: /* V9 BPcc */
2000 int cc;
2002 target = GET_FIELD_SP(insn, 0, 18);
2003 target = sign_extend(target, 18);
2004 target <<= 2;
2005 cc = GET_FIELD_SP(insn, 20, 21);
2006 if (cc == 0)
2007 do_branch(dc, target, insn, 0, cpu_cond);
2008 else if (cc == 2)
2009 do_branch(dc, target, insn, 1, cpu_cond);
2010 else
2011 goto illegal_insn;
2012 goto jmp_insn;
2014 case 0x3: /* V9 BPr */
2016 target = GET_FIELD_SP(insn, 0, 13) |
2017 (GET_FIELD_SP(insn, 20, 21) << 14);
2018 target = sign_extend(target, 16);
2019 target <<= 2;
2020 cpu_src1 = get_src1(insn, cpu_src1);
2021 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2022 goto jmp_insn;
2024 case 0x5: /* V9 FBPcc */
2026 int cc = GET_FIELD_SP(insn, 20, 21);
2027 if (gen_trap_ifnofpu(dc, cpu_cond))
2028 goto jmp_insn;
2029 target = GET_FIELD_SP(insn, 0, 18);
2030 target = sign_extend(target, 19);
2031 target <<= 2;
2032 do_fbranch(dc, target, insn, cc, cpu_cond);
2033 goto jmp_insn;
2035 #else
2036 case 0x7: /* CBN+x */
2038 goto ncp_insn;
2040 #endif
2041 case 0x2: /* BN+x */
2043 target = GET_FIELD(insn, 10, 31);
2044 target = sign_extend(target, 22);
2045 target <<= 2;
2046 do_branch(dc, target, insn, 0, cpu_cond);
2047 goto jmp_insn;
2049 case 0x6: /* FBN+x */
2051 if (gen_trap_ifnofpu(dc, cpu_cond))
2052 goto jmp_insn;
2053 target = GET_FIELD(insn, 10, 31);
2054 target = sign_extend(target, 22);
2055 target <<= 2;
2056 do_fbranch(dc, target, insn, 0, cpu_cond);
2057 goto jmp_insn;
2059 case 0x4: /* SETHI */
2060 if (rd) { // nop
2061 uint32_t value = GET_FIELD(insn, 10, 31);
2062 TCGv r_const;
2064 r_const = tcg_const_tl(value << 10);
2065 gen_movl_TN_reg(rd, r_const);
2066 tcg_temp_free(r_const);
2068 break;
2069 case 0x0: /* UNIMPL */
2070 default:
2071 goto illegal_insn;
2073 break;
2075 break;
2076 case 1: /*CALL*/
2078 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2079 TCGv r_const;
2081 r_const = tcg_const_tl(dc->pc);
2082 gen_movl_TN_reg(15, r_const);
2083 tcg_temp_free(r_const);
2084 target += dc->pc;
2085 gen_mov_pc_npc(dc, cpu_cond);
2086 dc->npc = target;
2088 goto jmp_insn;
2089 case 2: /* FPU & Logical Operations */
2091 unsigned int xop = GET_FIELD(insn, 7, 12);
2092 if (xop == 0x3a) { /* generate trap */
2093 int cond;
2095 cpu_src1 = get_src1(insn, cpu_src1);
2096 if (IS_IMM) {
2097 rs2 = GET_FIELD(insn, 25, 31);
2098 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2099 } else {
2100 rs2 = GET_FIELD(insn, 27, 31);
2101 if (rs2 != 0) {
2102 gen_movl_reg_TN(rs2, cpu_src2);
2103 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2104 } else
2105 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2107 cond = GET_FIELD(insn, 3, 6);
2108 if (cond == 0x8) {
2109 save_state(dc, cpu_cond);
2110 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2111 supervisor(dc))
2112 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2113 else
2114 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2115 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2116 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2117 gen_helper_raise_exception(cpu_tmp32);
2118 } else if (cond != 0) {
2119 TCGv r_cond = tcg_temp_new();
2120 int l1;
2121 #ifdef TARGET_SPARC64
2122 /* V9 icc/xcc */
2123 int cc = GET_FIELD_SP(insn, 11, 12);
2125 save_state(dc, cpu_cond);
2126 if (cc == 0)
2127 gen_cond(r_cond, 0, cond, dc);
2128 else if (cc == 2)
2129 gen_cond(r_cond, 1, cond, dc);
2130 else
2131 goto illegal_insn;
2132 #else
2133 save_state(dc, cpu_cond);
2134 gen_cond(r_cond, 0, cond, dc);
2135 #endif
2136 l1 = gen_new_label();
2137 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2139 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2140 supervisor(dc))
2141 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2142 else
2143 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2144 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2145 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2146 gen_helper_raise_exception(cpu_tmp32);
2148 gen_set_label(l1);
2149 tcg_temp_free(r_cond);
2151 gen_op_next_insn();
2152 tcg_gen_exit_tb(0);
2153 dc->is_br = 1;
2154 goto jmp_insn;
2155 } else if (xop == 0x28) {
2156 rs1 = GET_FIELD(insn, 13, 17);
2157 switch(rs1) {
2158 case 0: /* rdy */
2159 #ifndef TARGET_SPARC64
2160 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2161 manual, rdy on the microSPARC
2162 II */
2163 case 0x0f: /* stbar in the SPARCv8 manual,
2164 rdy on the microSPARC II */
2165 case 0x10 ... 0x1f: /* implementation-dependent in the
2166 SPARCv8 manual, rdy on the
2167 microSPARC II */
2168 #endif
2169 gen_movl_TN_reg(rd, cpu_y);
2170 break;
2171 #ifdef TARGET_SPARC64
2172 case 0x2: /* V9 rdccr */
2173 gen_helper_compute_psr();
2174 gen_helper_rdccr(cpu_dst);
2175 gen_movl_TN_reg(rd, cpu_dst);
2176 break;
2177 case 0x3: /* V9 rdasi */
2178 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2179 gen_movl_TN_reg(rd, cpu_dst);
2180 break;
2181 case 0x4: /* V9 rdtick */
2183 TCGv_ptr r_tickptr;
2185 r_tickptr = tcg_temp_new_ptr();
2186 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2187 offsetof(CPUState, tick));
2188 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2189 tcg_temp_free_ptr(r_tickptr);
2190 gen_movl_TN_reg(rd, cpu_dst);
2192 break;
2193 case 0x5: /* V9 rdpc */
2195 TCGv r_const;
2197 r_const = tcg_const_tl(dc->pc);
2198 gen_movl_TN_reg(rd, r_const);
2199 tcg_temp_free(r_const);
2201 break;
2202 case 0x6: /* V9 rdfprs */
2203 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2204 gen_movl_TN_reg(rd, cpu_dst);
2205 break;
2206 case 0xf: /* V9 membar */
2207 break; /* no effect */
2208 case 0x13: /* Graphics Status */
2209 if (gen_trap_ifnofpu(dc, cpu_cond))
2210 goto jmp_insn;
2211 gen_movl_TN_reg(rd, cpu_gsr);
2212 break;
2213 case 0x16: /* Softint */
2214 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2215 gen_movl_TN_reg(rd, cpu_dst);
2216 break;
2217 case 0x17: /* Tick compare */
2218 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2219 break;
2220 case 0x18: /* System tick */
2222 TCGv_ptr r_tickptr;
2224 r_tickptr = tcg_temp_new_ptr();
2225 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2226 offsetof(CPUState, stick));
2227 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2228 tcg_temp_free_ptr(r_tickptr);
2229 gen_movl_TN_reg(rd, cpu_dst);
2231 break;
2232 case 0x19: /* System tick compare */
2233 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2234 break;
2235 case 0x10: /* Performance Control */
2236 case 0x11: /* Performance Instrumentation Counter */
2237 case 0x12: /* Dispatch Control */
2238 case 0x14: /* Softint set, WO */
2239 case 0x15: /* Softint clear, WO */
2240 #endif
2241 default:
2242 goto illegal_insn;
2244 #if !defined(CONFIG_USER_ONLY)
2245 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2246 #ifndef TARGET_SPARC64
2247 if (!supervisor(dc))
2248 goto priv_insn;
2249 gen_helper_compute_psr();
2250 dc->cc_op = CC_OP_FLAGS;
2251 gen_helper_rdpsr(cpu_dst);
2252 #else
2253 CHECK_IU_FEATURE(dc, HYPV);
2254 if (!hypervisor(dc))
2255 goto priv_insn;
2256 rs1 = GET_FIELD(insn, 13, 17);
2257 switch (rs1) {
2258 case 0: // hpstate
2259 // gen_op_rdhpstate();
2260 break;
2261 case 1: // htstate
2262 // gen_op_rdhtstate();
2263 break;
2264 case 3: // hintp
2265 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2266 break;
2267 case 5: // htba
2268 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2269 break;
2270 case 6: // hver
2271 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2272 break;
2273 case 31: // hstick_cmpr
2274 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2275 break;
2276 default:
2277 goto illegal_insn;
2279 #endif
2280 gen_movl_TN_reg(rd, cpu_dst);
2281 break;
2282 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2283 if (!supervisor(dc))
2284 goto priv_insn;
2285 #ifdef TARGET_SPARC64
2286 rs1 = GET_FIELD(insn, 13, 17);
2287 switch (rs1) {
2288 case 0: // tpc
2290 TCGv_ptr r_tsptr;
2292 r_tsptr = tcg_temp_new_ptr();
2293 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2294 offsetof(CPUState, tsptr));
2295 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2296 offsetof(trap_state, tpc));
2297 tcg_temp_free_ptr(r_tsptr);
2299 break;
2300 case 1: // tnpc
2302 TCGv_ptr r_tsptr;
2304 r_tsptr = tcg_temp_new_ptr();
2305 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2306 offsetof(CPUState, tsptr));
2307 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2308 offsetof(trap_state, tnpc));
2309 tcg_temp_free_ptr(r_tsptr);
2311 break;
2312 case 2: // tstate
2314 TCGv_ptr r_tsptr;
2316 r_tsptr = tcg_temp_new_ptr();
2317 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2318 offsetof(CPUState, tsptr));
2319 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2320 offsetof(trap_state, tstate));
2321 tcg_temp_free_ptr(r_tsptr);
2323 break;
2324 case 3: // tt
2326 TCGv_ptr r_tsptr;
2328 r_tsptr = tcg_temp_new_ptr();
2329 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2330 offsetof(CPUState, tsptr));
2331 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2332 offsetof(trap_state, tt));
2333 tcg_temp_free_ptr(r_tsptr);
2334 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2336 break;
2337 case 4: // tick
2339 TCGv_ptr r_tickptr;
2341 r_tickptr = tcg_temp_new_ptr();
2342 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2343 offsetof(CPUState, tick));
2344 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2345 gen_movl_TN_reg(rd, cpu_tmp0);
2346 tcg_temp_free_ptr(r_tickptr);
2348 break;
2349 case 5: // tba
2350 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2351 break;
2352 case 6: // pstate
2353 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2354 offsetof(CPUSPARCState, pstate));
2355 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2356 break;
2357 case 7: // tl
2358 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2359 offsetof(CPUSPARCState, tl));
2360 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2361 break;
2362 case 8: // pil
2363 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2364 offsetof(CPUSPARCState, psrpil));
2365 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2366 break;
2367 case 9: // cwp
2368 gen_helper_rdcwp(cpu_tmp0);
2369 break;
2370 case 10: // cansave
2371 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2372 offsetof(CPUSPARCState, cansave));
2373 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2374 break;
2375 case 11: // canrestore
2376 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2377 offsetof(CPUSPARCState, canrestore));
2378 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2379 break;
2380 case 12: // cleanwin
2381 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2382 offsetof(CPUSPARCState, cleanwin));
2383 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2384 break;
2385 case 13: // otherwin
2386 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2387 offsetof(CPUSPARCState, otherwin));
2388 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2389 break;
2390 case 14: // wstate
2391 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2392 offsetof(CPUSPARCState, wstate));
2393 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2394 break;
2395 case 16: // UA2005 gl
2396 CHECK_IU_FEATURE(dc, GL);
2397 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2398 offsetof(CPUSPARCState, gl));
2399 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2400 break;
2401 case 26: // UA2005 strand status
2402 CHECK_IU_FEATURE(dc, HYPV);
2403 if (!hypervisor(dc))
2404 goto priv_insn;
2405 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2406 break;
2407 case 31: // ver
2408 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2409 break;
2410 case 15: // fq
2411 default:
2412 goto illegal_insn;
2414 #else
2415 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2416 #endif
2417 gen_movl_TN_reg(rd, cpu_tmp0);
2418 break;
2419 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2420 #ifdef TARGET_SPARC64
2421 save_state(dc, cpu_cond);
2422 gen_helper_flushw();
2423 #else
2424 if (!supervisor(dc))
2425 goto priv_insn;
2426 gen_movl_TN_reg(rd, cpu_tbr);
2427 #endif
2428 break;
2429 #endif
2430 } else if (xop == 0x34) { /* FPU Operations */
2431 if (gen_trap_ifnofpu(dc, cpu_cond))
2432 goto jmp_insn;
2433 gen_op_clear_ieee_excp_and_FTT();
2434 rs1 = GET_FIELD(insn, 13, 17);
2435 rs2 = GET_FIELD(insn, 27, 31);
2436 xop = GET_FIELD(insn, 18, 26);
2437 switch (xop) {
2438 case 0x1: /* fmovs */
2439 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2440 break;
2441 case 0x5: /* fnegs */
2442 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2443 break;
2444 case 0x9: /* fabss */
2445 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2446 break;
2447 case 0x29: /* fsqrts */
2448 CHECK_FPU_FEATURE(dc, FSQRT);
2449 gen_clear_float_exceptions();
2450 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2451 gen_helper_check_ieee_exceptions();
2452 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2453 break;
2454 case 0x2a: /* fsqrtd */
2455 CHECK_FPU_FEATURE(dc, FSQRT);
2456 gen_op_load_fpr_DT1(DFPREG(rs2));
2457 gen_clear_float_exceptions();
2458 gen_helper_fsqrtd();
2459 gen_helper_check_ieee_exceptions();
2460 gen_op_store_DT0_fpr(DFPREG(rd));
2461 break;
2462 case 0x2b: /* fsqrtq */
2463 CHECK_FPU_FEATURE(dc, FLOAT128);
2464 gen_op_load_fpr_QT1(QFPREG(rs2));
2465 gen_clear_float_exceptions();
2466 gen_helper_fsqrtq();
2467 gen_helper_check_ieee_exceptions();
2468 gen_op_store_QT0_fpr(QFPREG(rd));
2469 break;
2470 case 0x41: /* fadds */
2471 gen_clear_float_exceptions();
2472 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2475 break;
2476 case 0x42: /* faddd */
2477 gen_op_load_fpr_DT0(DFPREG(rs1));
2478 gen_op_load_fpr_DT1(DFPREG(rs2));
2479 gen_clear_float_exceptions();
2480 gen_helper_faddd();
2481 gen_helper_check_ieee_exceptions();
2482 gen_op_store_DT0_fpr(DFPREG(rd));
2483 break;
2484 case 0x43: /* faddq */
2485 CHECK_FPU_FEATURE(dc, FLOAT128);
2486 gen_op_load_fpr_QT0(QFPREG(rs1));
2487 gen_op_load_fpr_QT1(QFPREG(rs2));
2488 gen_clear_float_exceptions();
2489 gen_helper_faddq();
2490 gen_helper_check_ieee_exceptions();
2491 gen_op_store_QT0_fpr(QFPREG(rd));
2492 break;
2493 case 0x45: /* fsubs */
2494 gen_clear_float_exceptions();
2495 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2496 gen_helper_check_ieee_exceptions();
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498 break;
2499 case 0x46: /* fsubd */
2500 gen_op_load_fpr_DT0(DFPREG(rs1));
2501 gen_op_load_fpr_DT1(DFPREG(rs2));
2502 gen_clear_float_exceptions();
2503 gen_helper_fsubd();
2504 gen_helper_check_ieee_exceptions();
2505 gen_op_store_DT0_fpr(DFPREG(rd));
2506 break;
2507 case 0x47: /* fsubq */
2508 CHECK_FPU_FEATURE(dc, FLOAT128);
2509 gen_op_load_fpr_QT0(QFPREG(rs1));
2510 gen_op_load_fpr_QT1(QFPREG(rs2));
2511 gen_clear_float_exceptions();
2512 gen_helper_fsubq();
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_QT0_fpr(QFPREG(rd));
2515 break;
2516 case 0x49: /* fmuls */
2517 CHECK_FPU_FEATURE(dc, FMUL);
2518 gen_clear_float_exceptions();
2519 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2520 gen_helper_check_ieee_exceptions();
2521 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2522 break;
2523 case 0x4a: /* fmuld */
2524 CHECK_FPU_FEATURE(dc, FMUL);
2525 gen_op_load_fpr_DT0(DFPREG(rs1));
2526 gen_op_load_fpr_DT1(DFPREG(rs2));
2527 gen_clear_float_exceptions();
2528 gen_helper_fmuld();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_DT0_fpr(DFPREG(rd));
2531 break;
2532 case 0x4b: /* fmulq */
2533 CHECK_FPU_FEATURE(dc, FLOAT128);
2534 CHECK_FPU_FEATURE(dc, FMUL);
2535 gen_op_load_fpr_QT0(QFPREG(rs1));
2536 gen_op_load_fpr_QT1(QFPREG(rs2));
2537 gen_clear_float_exceptions();
2538 gen_helper_fmulq();
2539 gen_helper_check_ieee_exceptions();
2540 gen_op_store_QT0_fpr(QFPREG(rd));
2541 break;
2542 case 0x4d: /* fdivs */
2543 gen_clear_float_exceptions();
2544 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547 break;
2548 case 0x4e: /* fdivd */
2549 gen_op_load_fpr_DT0(DFPREG(rs1));
2550 gen_op_load_fpr_DT1(DFPREG(rs2));
2551 gen_clear_float_exceptions();
2552 gen_helper_fdivd();
2553 gen_helper_check_ieee_exceptions();
2554 gen_op_store_DT0_fpr(DFPREG(rd));
2555 break;
2556 case 0x4f: /* fdivq */
2557 CHECK_FPU_FEATURE(dc, FLOAT128);
2558 gen_op_load_fpr_QT0(QFPREG(rs1));
2559 gen_op_load_fpr_QT1(QFPREG(rs2));
2560 gen_clear_float_exceptions();
2561 gen_helper_fdivq();
2562 gen_helper_check_ieee_exceptions();
2563 gen_op_store_QT0_fpr(QFPREG(rd));
2564 break;
2565 case 0x69: /* fsmuld */
2566 CHECK_FPU_FEATURE(dc, FSMULD);
2567 gen_clear_float_exceptions();
2568 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2569 gen_helper_check_ieee_exceptions();
2570 gen_op_store_DT0_fpr(DFPREG(rd));
2571 break;
2572 case 0x6e: /* fdmulq */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 gen_op_load_fpr_DT0(DFPREG(rs1));
2575 gen_op_load_fpr_DT1(DFPREG(rs2));
2576 gen_clear_float_exceptions();
2577 gen_helper_fdmulq();
2578 gen_helper_check_ieee_exceptions();
2579 gen_op_store_QT0_fpr(QFPREG(rd));
2580 break;
2581 case 0xc4: /* fitos */
2582 gen_clear_float_exceptions();
2583 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2584 gen_helper_check_ieee_exceptions();
2585 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2586 break;
2587 case 0xc6: /* fdtos */
2588 gen_op_load_fpr_DT1(DFPREG(rs2));
2589 gen_clear_float_exceptions();
2590 gen_helper_fdtos(cpu_tmp32);
2591 gen_helper_check_ieee_exceptions();
2592 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2593 break;
2594 case 0xc7: /* fqtos */
2595 CHECK_FPU_FEATURE(dc, FLOAT128);
2596 gen_op_load_fpr_QT1(QFPREG(rs2));
2597 gen_clear_float_exceptions();
2598 gen_helper_fqtos(cpu_tmp32);
2599 gen_helper_check_ieee_exceptions();
2600 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2601 break;
2602 case 0xc8: /* fitod */
2603 gen_helper_fitod(cpu_fpr[rs2]);
2604 gen_op_store_DT0_fpr(DFPREG(rd));
2605 break;
2606 case 0xc9: /* fstod */
2607 gen_helper_fstod(cpu_fpr[rs2]);
2608 gen_op_store_DT0_fpr(DFPREG(rd));
2609 break;
2610 case 0xcb: /* fqtod */
2611 CHECK_FPU_FEATURE(dc, FLOAT128);
2612 gen_op_load_fpr_QT1(QFPREG(rs2));
2613 gen_clear_float_exceptions();
2614 gen_helper_fqtod();
2615 gen_helper_check_ieee_exceptions();
2616 gen_op_store_DT0_fpr(DFPREG(rd));
2617 break;
2618 case 0xcc: /* fitoq */
2619 CHECK_FPU_FEATURE(dc, FLOAT128);
2620 gen_helper_fitoq(cpu_fpr[rs2]);
2621 gen_op_store_QT0_fpr(QFPREG(rd));
2622 break;
2623 case 0xcd: /* fstoq */
2624 CHECK_FPU_FEATURE(dc, FLOAT128);
2625 gen_helper_fstoq(cpu_fpr[rs2]);
2626 gen_op_store_QT0_fpr(QFPREG(rd));
2627 break;
2628 case 0xce: /* fdtoq */
2629 CHECK_FPU_FEATURE(dc, FLOAT128);
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_helper_fdtoq();
2632 gen_op_store_QT0_fpr(QFPREG(rd));
2633 break;
2634 case 0xd1: /* fstoi */
2635 gen_clear_float_exceptions();
2636 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2637 gen_helper_check_ieee_exceptions();
2638 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2639 break;
2640 case 0xd2: /* fdtoi */
2641 gen_op_load_fpr_DT1(DFPREG(rs2));
2642 gen_clear_float_exceptions();
2643 gen_helper_fdtoi(cpu_tmp32);
2644 gen_helper_check_ieee_exceptions();
2645 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2646 break;
2647 case 0xd3: /* fqtoi */
2648 CHECK_FPU_FEATURE(dc, FLOAT128);
2649 gen_op_load_fpr_QT1(QFPREG(rs2));
2650 gen_clear_float_exceptions();
2651 gen_helper_fqtoi(cpu_tmp32);
2652 gen_helper_check_ieee_exceptions();
2653 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2654 break;
2655 #ifdef TARGET_SPARC64
2656 case 0x2: /* V9 fmovd */
2657 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2658 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2659 cpu_fpr[DFPREG(rs2) + 1]);
2660 break;
2661 case 0x3: /* V9 fmovq */
2662 CHECK_FPU_FEATURE(dc, FLOAT128);
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2664 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2665 cpu_fpr[QFPREG(rs2) + 1]);
2666 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2667 cpu_fpr[QFPREG(rs2) + 2]);
2668 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2669 cpu_fpr[QFPREG(rs2) + 3]);
2670 break;
2671 case 0x6: /* V9 fnegd */
2672 gen_op_load_fpr_DT1(DFPREG(rs2));
2673 gen_helper_fnegd();
2674 gen_op_store_DT0_fpr(DFPREG(rd));
2675 break;
2676 case 0x7: /* V9 fnegq */
2677 CHECK_FPU_FEATURE(dc, FLOAT128);
2678 gen_op_load_fpr_QT1(QFPREG(rs2));
2679 gen_helper_fnegq();
2680 gen_op_store_QT0_fpr(QFPREG(rd));
2681 break;
2682 case 0xa: /* V9 fabsd */
2683 gen_op_load_fpr_DT1(DFPREG(rs2));
2684 gen_helper_fabsd();
2685 gen_op_store_DT0_fpr(DFPREG(rd));
2686 break;
2687 case 0xb: /* V9 fabsq */
2688 CHECK_FPU_FEATURE(dc, FLOAT128);
2689 gen_op_load_fpr_QT1(QFPREG(rs2));
2690 gen_helper_fabsq();
2691 gen_op_store_QT0_fpr(QFPREG(rd));
2692 break;
2693 case 0x81: /* V9 fstox */
2694 gen_clear_float_exceptions();
2695 gen_helper_fstox(cpu_fpr[rs2]);
2696 gen_helper_check_ieee_exceptions();
2697 gen_op_store_DT0_fpr(DFPREG(rd));
2698 break;
2699 case 0x82: /* V9 fdtox */
2700 gen_op_load_fpr_DT1(DFPREG(rs2));
2701 gen_clear_float_exceptions();
2702 gen_helper_fdtox();
2703 gen_helper_check_ieee_exceptions();
2704 gen_op_store_DT0_fpr(DFPREG(rd));
2705 break;
2706 case 0x83: /* V9 fqtox */
2707 CHECK_FPU_FEATURE(dc, FLOAT128);
2708 gen_op_load_fpr_QT1(QFPREG(rs2));
2709 gen_clear_float_exceptions();
2710 gen_helper_fqtox();
2711 gen_helper_check_ieee_exceptions();
2712 gen_op_store_DT0_fpr(DFPREG(rd));
2713 break;
2714 case 0x84: /* V9 fxtos */
2715 gen_op_load_fpr_DT1(DFPREG(rs2));
2716 gen_clear_float_exceptions();
2717 gen_helper_fxtos(cpu_tmp32);
2718 gen_helper_check_ieee_exceptions();
2719 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2720 break;
2721 case 0x88: /* V9 fxtod */
2722 gen_op_load_fpr_DT1(DFPREG(rs2));
2723 gen_clear_float_exceptions();
2724 gen_helper_fxtod();
2725 gen_helper_check_ieee_exceptions();
2726 gen_op_store_DT0_fpr(DFPREG(rd));
2727 break;
2728 case 0x8c: /* V9 fxtoq */
2729 CHECK_FPU_FEATURE(dc, FLOAT128);
2730 gen_op_load_fpr_DT1(DFPREG(rs2));
2731 gen_clear_float_exceptions();
2732 gen_helper_fxtoq();
2733 gen_helper_check_ieee_exceptions();
2734 gen_op_store_QT0_fpr(QFPREG(rd));
2735 break;
2736 #endif
2737 default:
2738 goto illegal_insn;
2740 } else if (xop == 0x35) { /* FPU Operations */
2741 #ifdef TARGET_SPARC64
2742 int cond;
2743 #endif
2744 if (gen_trap_ifnofpu(dc, cpu_cond))
2745 goto jmp_insn;
2746 gen_op_clear_ieee_excp_and_FTT();
2747 rs1 = GET_FIELD(insn, 13, 17);
2748 rs2 = GET_FIELD(insn, 27, 31);
2749 xop = GET_FIELD(insn, 18, 26);
2750 #ifdef TARGET_SPARC64
2751 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2752 int l1;
2754 l1 = gen_new_label();
2755 cond = GET_FIELD_SP(insn, 14, 17);
2756 cpu_src1 = get_src1(insn, cpu_src1);
2757 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2758 0, l1);
2759 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2760 gen_set_label(l1);
2761 break;
2762 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2763 int l1;
2765 l1 = gen_new_label();
2766 cond = GET_FIELD_SP(insn, 14, 17);
2767 cpu_src1 = get_src1(insn, cpu_src1);
2768 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2769 0, l1);
2770 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2771 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2772 gen_set_label(l1);
2773 break;
2774 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2775 int l1;
2777 CHECK_FPU_FEATURE(dc, FLOAT128);
2778 l1 = gen_new_label();
2779 cond = GET_FIELD_SP(insn, 14, 17);
2780 cpu_src1 = get_src1(insn, cpu_src1);
2781 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2782 0, l1);
2783 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2784 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2785 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2786 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2787 gen_set_label(l1);
2788 break;
2790 #endif
2791 switch (xop) {
2792 #ifdef TARGET_SPARC64
2793 #define FMOVSCC(fcc) \
2795 TCGv r_cond; \
2796 int l1; \
2798 l1 = gen_new_label(); \
2799 r_cond = tcg_temp_new(); \
2800 cond = GET_FIELD_SP(insn, 14, 17); \
2801 gen_fcond(r_cond, fcc, cond); \
2802 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2803 0, l1); \
2804 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2805 gen_set_label(l1); \
2806 tcg_temp_free(r_cond); \
2808 #define FMOVDCC(fcc) \
2810 TCGv r_cond; \
2811 int l1; \
2813 l1 = gen_new_label(); \
2814 r_cond = tcg_temp_new(); \
2815 cond = GET_FIELD_SP(insn, 14, 17); \
2816 gen_fcond(r_cond, fcc, cond); \
2817 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2818 0, l1); \
2819 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2820 cpu_fpr[DFPREG(rs2)]); \
2821 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2822 cpu_fpr[DFPREG(rs2) + 1]); \
2823 gen_set_label(l1); \
2824 tcg_temp_free(r_cond); \
2826 #define FMOVQCC(fcc) \
2828 TCGv r_cond; \
2829 int l1; \
2831 l1 = gen_new_label(); \
2832 r_cond = tcg_temp_new(); \
2833 cond = GET_FIELD_SP(insn, 14, 17); \
2834 gen_fcond(r_cond, fcc, cond); \
2835 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2836 0, l1); \
2837 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2838 cpu_fpr[QFPREG(rs2)]); \
2839 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2840 cpu_fpr[QFPREG(rs2) + 1]); \
2841 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2842 cpu_fpr[QFPREG(rs2) + 2]); \
2843 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2844 cpu_fpr[QFPREG(rs2) + 3]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 case 0x001: /* V9 fmovscc %fcc0 */
2849 FMOVSCC(0);
2850 break;
2851 case 0x002: /* V9 fmovdcc %fcc0 */
2852 FMOVDCC(0);
2853 break;
2854 case 0x003: /* V9 fmovqcc %fcc0 */
2855 CHECK_FPU_FEATURE(dc, FLOAT128);
2856 FMOVQCC(0);
2857 break;
2858 case 0x041: /* V9 fmovscc %fcc1 */
2859 FMOVSCC(1);
2860 break;
2861 case 0x042: /* V9 fmovdcc %fcc1 */
2862 FMOVDCC(1);
2863 break;
2864 case 0x043: /* V9 fmovqcc %fcc1 */
2865 CHECK_FPU_FEATURE(dc, FLOAT128);
2866 FMOVQCC(1);
2867 break;
2868 case 0x081: /* V9 fmovscc %fcc2 */
2869 FMOVSCC(2);
2870 break;
2871 case 0x082: /* V9 fmovdcc %fcc2 */
2872 FMOVDCC(2);
2873 break;
2874 case 0x083: /* V9 fmovqcc %fcc2 */
2875 CHECK_FPU_FEATURE(dc, FLOAT128);
2876 FMOVQCC(2);
2877 break;
2878 case 0x0c1: /* V9 fmovscc %fcc3 */
2879 FMOVSCC(3);
2880 break;
2881 case 0x0c2: /* V9 fmovdcc %fcc3 */
2882 FMOVDCC(3);
2883 break;
2884 case 0x0c3: /* V9 fmovqcc %fcc3 */
2885 CHECK_FPU_FEATURE(dc, FLOAT128);
2886 FMOVQCC(3);
2887 break;
2888 #undef FMOVSCC
2889 #undef FMOVDCC
2890 #undef FMOVQCC
2891 #define FMOVSCC(icc) \
2893 TCGv r_cond; \
2894 int l1; \
2896 l1 = gen_new_label(); \
2897 r_cond = tcg_temp_new(); \
2898 cond = GET_FIELD_SP(insn, 14, 17); \
2899 gen_cond(r_cond, icc, cond, dc); \
2900 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2901 0, l1); \
2902 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2903 gen_set_label(l1); \
2904 tcg_temp_free(r_cond); \
2906 #define FMOVDCC(icc) \
2908 TCGv r_cond; \
2909 int l1; \
2911 l1 = gen_new_label(); \
2912 r_cond = tcg_temp_new(); \
2913 cond = GET_FIELD_SP(insn, 14, 17); \
2914 gen_cond(r_cond, icc, cond, dc); \
2915 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2916 0, l1); \
2917 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2918 cpu_fpr[DFPREG(rs2)]); \
2919 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2920 cpu_fpr[DFPREG(rs2) + 1]); \
2921 gen_set_label(l1); \
2922 tcg_temp_free(r_cond); \
2924 #define FMOVQCC(icc) \
2926 TCGv r_cond; \
2927 int l1; \
2929 l1 = gen_new_label(); \
2930 r_cond = tcg_temp_new(); \
2931 cond = GET_FIELD_SP(insn, 14, 17); \
2932 gen_cond(r_cond, icc, cond, dc); \
2933 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2934 0, l1); \
2935 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2936 cpu_fpr[QFPREG(rs2)]); \
2937 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2938 cpu_fpr[QFPREG(rs2) + 1]); \
2939 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2940 cpu_fpr[QFPREG(rs2) + 2]); \
2941 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2942 cpu_fpr[QFPREG(rs2) + 3]); \
2943 gen_set_label(l1); \
2944 tcg_temp_free(r_cond); \
2947 case 0x101: /* V9 fmovscc %icc */
2948 FMOVSCC(0);
2949 break;
2950 case 0x102: /* V9 fmovdcc %icc */
2951 FMOVDCC(0);
2952 case 0x103: /* V9 fmovqcc %icc */
2953 CHECK_FPU_FEATURE(dc, FLOAT128);
2954 FMOVQCC(0);
2955 break;
2956 case 0x181: /* V9 fmovscc %xcc */
2957 FMOVSCC(1);
2958 break;
2959 case 0x182: /* V9 fmovdcc %xcc */
2960 FMOVDCC(1);
2961 break;
2962 case 0x183: /* V9 fmovqcc %xcc */
2963 CHECK_FPU_FEATURE(dc, FLOAT128);
2964 FMOVQCC(1);
2965 break;
2966 #undef FMOVSCC
2967 #undef FMOVDCC
2968 #undef FMOVQCC
2969 #endif
2970 case 0x51: /* fcmps, V9 %fcc */
2971 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2972 break;
2973 case 0x52: /* fcmpd, V9 %fcc */
2974 gen_op_load_fpr_DT0(DFPREG(rs1));
2975 gen_op_load_fpr_DT1(DFPREG(rs2));
2976 gen_op_fcmpd(rd & 3);
2977 break;
2978 case 0x53: /* fcmpq, V9 %fcc */
2979 CHECK_FPU_FEATURE(dc, FLOAT128);
2980 gen_op_load_fpr_QT0(QFPREG(rs1));
2981 gen_op_load_fpr_QT1(QFPREG(rs2));
2982 gen_op_fcmpq(rd & 3);
2983 break;
2984 case 0x55: /* fcmpes, V9 %fcc */
2985 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2986 break;
2987 case 0x56: /* fcmped, V9 %fcc */
2988 gen_op_load_fpr_DT0(DFPREG(rs1));
2989 gen_op_load_fpr_DT1(DFPREG(rs2));
2990 gen_op_fcmped(rd & 3);
2991 break;
2992 case 0x57: /* fcmpeq, V9 %fcc */
2993 CHECK_FPU_FEATURE(dc, FLOAT128);
2994 gen_op_load_fpr_QT0(QFPREG(rs1));
2995 gen_op_load_fpr_QT1(QFPREG(rs2));
2996 gen_op_fcmpeq(rd & 3);
2997 break;
2998 default:
2999 goto illegal_insn;
3001 } else if (xop == 0x2) {
3002 // clr/mov shortcut
3004 rs1 = GET_FIELD(insn, 13, 17);
3005 if (rs1 == 0) {
3006 // or %g0, x, y -> mov T0, x; mov y, T0
3007 if (IS_IMM) { /* immediate */
3008 TCGv r_const;
3010 simm = GET_FIELDs(insn, 19, 31);
3011 r_const = tcg_const_tl(simm);
3012 gen_movl_TN_reg(rd, r_const);
3013 tcg_temp_free(r_const);
3014 } else { /* register */
3015 rs2 = GET_FIELD(insn, 27, 31);
3016 gen_movl_reg_TN(rs2, cpu_dst);
3017 gen_movl_TN_reg(rd, cpu_dst);
3019 } else {
3020 cpu_src1 = get_src1(insn, cpu_src1);
3021 if (IS_IMM) { /* immediate */
3022 simm = GET_FIELDs(insn, 19, 31);
3023 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3024 gen_movl_TN_reg(rd, cpu_dst);
3025 } else { /* register */
3026 // or x, %g0, y -> mov T1, x; mov y, T1
3027 rs2 = GET_FIELD(insn, 27, 31);
3028 if (rs2 != 0) {
3029 gen_movl_reg_TN(rs2, cpu_src2);
3030 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3031 gen_movl_TN_reg(rd, cpu_dst);
3032 } else
3033 gen_movl_TN_reg(rd, cpu_src1);
3036 #ifdef TARGET_SPARC64
3037 } else if (xop == 0x25) { /* sll, V9 sllx */
3038 cpu_src1 = get_src1(insn, cpu_src1);
3039 if (IS_IMM) { /* immediate */
3040 simm = GET_FIELDs(insn, 20, 31);
3041 if (insn & (1 << 12)) {
3042 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3043 } else {
3044 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3046 } else { /* register */
3047 rs2 = GET_FIELD(insn, 27, 31);
3048 gen_movl_reg_TN(rs2, cpu_src2);
3049 if (insn & (1 << 12)) {
3050 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3051 } else {
3052 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3054 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3056 gen_movl_TN_reg(rd, cpu_dst);
3057 } else if (xop == 0x26) { /* srl, V9 srlx */
3058 cpu_src1 = get_src1(insn, cpu_src1);
3059 if (IS_IMM) { /* immediate */
3060 simm = GET_FIELDs(insn, 20, 31);
3061 if (insn & (1 << 12)) {
3062 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3063 } else {
3064 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3065 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3067 } else { /* register */
3068 rs2 = GET_FIELD(insn, 27, 31);
3069 gen_movl_reg_TN(rs2, cpu_src2);
3070 if (insn & (1 << 12)) {
3071 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3073 } else {
3074 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3079 gen_movl_TN_reg(rd, cpu_dst);
3080 } else if (xop == 0x27) { /* sra, V9 srax */
3081 cpu_src1 = get_src1(insn, cpu_src1);
3082 if (IS_IMM) { /* immediate */
3083 simm = GET_FIELDs(insn, 20, 31);
3084 if (insn & (1 << 12)) {
3085 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3086 } else {
3087 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3088 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3089 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3091 } else { /* register */
3092 rs2 = GET_FIELD(insn, 27, 31);
3093 gen_movl_reg_TN(rs2, cpu_src2);
3094 if (insn & (1 << 12)) {
3095 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3096 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3097 } else {
3098 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3099 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3100 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3101 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3104 gen_movl_TN_reg(rd, cpu_dst);
3105 #endif
3106 } else if (xop < 0x36) {
3107 if (xop < 0x20) {
3108 cpu_src1 = get_src1(insn, cpu_src1);
3109 cpu_src2 = get_src2(insn, cpu_src2);
3110 switch (xop & ~0x10) {
3111 case 0x0: /* add */
3112 if (IS_IMM) {
3113 simm = GET_FIELDs(insn, 19, 31);
3114 if (xop & 0x10) {
3115 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3116 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3117 dc->cc_op = CC_OP_ADD;
3118 } else {
3119 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3121 } else {
3122 if (xop & 0x10) {
3123 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3124 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3125 dc->cc_op = CC_OP_ADD;
3126 } else {
3127 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3130 break;
3131 case 0x1: /* and */
3132 if (IS_IMM) {
3133 simm = GET_FIELDs(insn, 19, 31);
3134 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3135 } else {
3136 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3138 if (xop & 0x10) {
3139 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3140 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3141 dc->cc_op = CC_OP_LOGIC;
3143 break;
3144 case 0x2: /* or */
3145 if (IS_IMM) {
3146 simm = GET_FIELDs(insn, 19, 31);
3147 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3148 } else {
3149 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3151 if (xop & 0x10) {
3152 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3153 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3154 dc->cc_op = CC_OP_LOGIC;
3156 break;
3157 case 0x3: /* xor */
3158 if (IS_IMM) {
3159 simm = GET_FIELDs(insn, 19, 31);
3160 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3161 } else {
3162 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3164 if (xop & 0x10) {
3165 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3166 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3167 dc->cc_op = CC_OP_LOGIC;
3169 break;
3170 case 0x4: /* sub */
3171 if (IS_IMM) {
3172 simm = GET_FIELDs(insn, 19, 31);
3173 if (xop & 0x10) {
3174 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3175 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3176 dc->cc_op = CC_OP_FLAGS;
3177 } else {
3178 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3180 } else {
3181 if (xop & 0x10) {
3182 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3183 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3184 dc->cc_op = CC_OP_FLAGS;
3185 } else {
3186 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3189 break;
3190 case 0x5: /* andn */
3191 if (IS_IMM) {
3192 simm = GET_FIELDs(insn, 19, 31);
3193 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3194 } else {
3195 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3197 if (xop & 0x10) {
3198 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3199 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3200 dc->cc_op = CC_OP_LOGIC;
3202 break;
3203 case 0x6: /* orn */
3204 if (IS_IMM) {
3205 simm = GET_FIELDs(insn, 19, 31);
3206 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3207 } else {
3208 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3210 if (xop & 0x10) {
3211 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3212 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3213 dc->cc_op = CC_OP_LOGIC;
3215 break;
3216 case 0x7: /* xorn */
3217 if (IS_IMM) {
3218 simm = GET_FIELDs(insn, 19, 31);
3219 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3220 } else {
3221 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3222 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3224 if (xop & 0x10) {
3225 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3226 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3227 dc->cc_op = CC_OP_LOGIC;
3229 break;
3230 case 0x8: /* addx, V9 addc */
3231 if (IS_IMM) {
3232 simm = GET_FIELDs(insn, 19, 31);
3233 if (xop & 0x10) {
3234 gen_helper_compute_psr();
3235 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3236 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3237 dc->cc_op = CC_OP_ADDX;
3238 } else {
3239 gen_helper_compute_psr();
3240 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3241 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3242 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3244 } else {
3245 if (xop & 0x10) {
3246 gen_helper_compute_psr();
3247 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3248 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3249 dc->cc_op = CC_OP_ADDX;
3250 } else {
3251 gen_helper_compute_psr();
3252 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3253 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3254 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3257 break;
3258 #ifdef TARGET_SPARC64
3259 case 0x9: /* V9 mulx */
3260 if (IS_IMM) {
3261 simm = GET_FIELDs(insn, 19, 31);
3262 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3263 } else {
3264 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3266 break;
3267 #endif
3268 case 0xa: /* umul */
3269 CHECK_IU_FEATURE(dc, MUL);
3270 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3271 if (xop & 0x10) {
3272 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3273 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3274 dc->cc_op = CC_OP_LOGIC;
3276 break;
3277 case 0xb: /* smul */
3278 CHECK_IU_FEATURE(dc, MUL);
3279 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3280 if (xop & 0x10) {
3281 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3282 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3283 dc->cc_op = CC_OP_LOGIC;
3285 break;
3286 case 0xc: /* subx, V9 subc */
3287 if (IS_IMM) {
3288 simm = GET_FIELDs(insn, 19, 31);
3289 if (xop & 0x10) {
3290 gen_helper_compute_psr();
3291 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3292 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3293 dc->cc_op = CC_OP_FLAGS;
3294 } else {
3295 gen_helper_compute_psr();
3296 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3297 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3298 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3300 } else {
3301 if (xop & 0x10) {
3302 gen_helper_compute_psr();
3303 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3304 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3305 dc->cc_op = CC_OP_FLAGS;
3306 } else {
3307 gen_helper_compute_psr();
3308 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3309 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3310 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3313 break;
3314 #ifdef TARGET_SPARC64
3315 case 0xd: /* V9 udivx */
3316 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3317 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3318 gen_trap_ifdivzero_tl(cpu_cc_src2);
3319 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3320 break;
3321 #endif
3322 case 0xe: /* udiv */
3323 CHECK_IU_FEATURE(dc, DIV);
3324 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3325 if (xop & 0x10) {
3326 gen_op_div_cc(cpu_dst);
3327 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3328 dc->cc_op = CC_OP_FLAGS;
3330 break;
3331 case 0xf: /* sdiv */
3332 CHECK_IU_FEATURE(dc, DIV);
3333 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3334 if (xop & 0x10) {
3335 gen_op_div_cc(cpu_dst);
3336 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3337 dc->cc_op = CC_OP_FLAGS;
3339 break;
3340 default:
3341 goto illegal_insn;
3343 gen_movl_TN_reg(rd, cpu_dst);
3344 } else {
3345 cpu_src1 = get_src1(insn, cpu_src1);
3346 cpu_src2 = get_src2(insn, cpu_src2);
3347 switch (xop) {
3348 case 0x20: /* taddcc */
3349 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3350 gen_movl_TN_reg(rd, cpu_dst);
3351 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3352 dc->cc_op = CC_OP_FLAGS;
3353 break;
3354 case 0x21: /* tsubcc */
3355 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3356 gen_movl_TN_reg(rd, cpu_dst);
3357 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3358 dc->cc_op = CC_OP_FLAGS;
3359 break;
3360 case 0x22: /* taddcctv */
3361 save_state(dc, cpu_cond);
3362 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3363 gen_movl_TN_reg(rd, cpu_dst);
3364 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3365 dc->cc_op = CC_OP_FLAGS;
3366 break;
3367 case 0x23: /* tsubcctv */
3368 save_state(dc, cpu_cond);
3369 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3370 gen_movl_TN_reg(rd, cpu_dst);
3371 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3372 dc->cc_op = CC_OP_FLAGS;
3373 break;
3374 case 0x24: /* mulscc */
3375 gen_helper_compute_psr();
3376 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3377 gen_movl_TN_reg(rd, cpu_dst);
3378 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3379 dc->cc_op = CC_OP_FLAGS;
3380 break;
3381 #ifndef TARGET_SPARC64
3382 case 0x25: /* sll */
3383 if (IS_IMM) { /* immediate */
3384 simm = GET_FIELDs(insn, 20, 31);
3385 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3386 } else { /* register */
3387 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3388 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3390 gen_movl_TN_reg(rd, cpu_dst);
3391 break;
3392 case 0x26: /* srl */
3393 if (IS_IMM) { /* immediate */
3394 simm = GET_FIELDs(insn, 20, 31);
3395 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3396 } else { /* register */
3397 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3398 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3400 gen_movl_TN_reg(rd, cpu_dst);
3401 break;
3402 case 0x27: /* sra */
3403 if (IS_IMM) { /* immediate */
3404 simm = GET_FIELDs(insn, 20, 31);
3405 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3406 } else { /* register */
3407 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3408 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3410 gen_movl_TN_reg(rd, cpu_dst);
3411 break;
3412 #endif
3413 case 0x30:
3415 switch(rd) {
3416 case 0: /* wry */
3417 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3418 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3419 break;
3420 #ifndef TARGET_SPARC64
3421 case 0x01 ... 0x0f: /* undefined in the
3422 SPARCv8 manual, nop
3423 on the microSPARC
3424 II */
3425 case 0x10 ... 0x1f: /* implementation-dependent
3426 in the SPARCv8
3427 manual, nop on the
3428 microSPARC II */
3429 break;
3430 #else
3431 case 0x2: /* V9 wrccr */
3432 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3433 gen_helper_wrccr(cpu_dst);
3434 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3435 dc->cc_op = CC_OP_FLAGS;
3436 break;
3437 case 0x3: /* V9 wrasi */
3438 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3439 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3440 break;
3441 case 0x6: /* V9 wrfprs */
3442 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3443 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3444 save_state(dc, cpu_cond);
3445 gen_op_next_insn();
3446 tcg_gen_exit_tb(0);
3447 dc->is_br = 1;
3448 break;
3449 case 0xf: /* V9 sir, nop if user */
3450 #if !defined(CONFIG_USER_ONLY)
3451 if (supervisor(dc))
3452 ; // XXX
3453 #endif
3454 break;
3455 case 0x13: /* Graphics Status */
3456 if (gen_trap_ifnofpu(dc, cpu_cond))
3457 goto jmp_insn;
3458 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3459 break;
3460 case 0x14: /* Softint set */
3461 if (!supervisor(dc))
3462 goto illegal_insn;
3463 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3464 gen_helper_set_softint(cpu_tmp64);
3465 break;
3466 case 0x15: /* Softint clear */
3467 if (!supervisor(dc))
3468 goto illegal_insn;
3469 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3470 gen_helper_clear_softint(cpu_tmp64);
3471 break;
3472 case 0x16: /* Softint write */
3473 if (!supervisor(dc))
3474 goto illegal_insn;
3475 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3476 gen_helper_write_softint(cpu_tmp64);
3477 break;
3478 case 0x17: /* Tick compare */
3479 #if !defined(CONFIG_USER_ONLY)
3480 if (!supervisor(dc))
3481 goto illegal_insn;
3482 #endif
3484 TCGv_ptr r_tickptr;
3486 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3487 cpu_src2);
3488 r_tickptr = tcg_temp_new_ptr();
3489 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3490 offsetof(CPUState, tick));
3491 gen_helper_tick_set_limit(r_tickptr,
3492 cpu_tick_cmpr);
3493 tcg_temp_free_ptr(r_tickptr);
3495 break;
3496 case 0x18: /* System tick */
3497 #if !defined(CONFIG_USER_ONLY)
3498 if (!supervisor(dc))
3499 goto illegal_insn;
3500 #endif
3502 TCGv_ptr r_tickptr;
3504 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3505 cpu_src2);
3506 r_tickptr = tcg_temp_new_ptr();
3507 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3508 offsetof(CPUState, stick));
3509 gen_helper_tick_set_count(r_tickptr,
3510 cpu_dst);
3511 tcg_temp_free_ptr(r_tickptr);
3513 break;
3514 case 0x19: /* System tick compare */
3515 #if !defined(CONFIG_USER_ONLY)
3516 if (!supervisor(dc))
3517 goto illegal_insn;
3518 #endif
3520 TCGv_ptr r_tickptr;
3522 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3523 cpu_src2);
3524 r_tickptr = tcg_temp_new_ptr();
3525 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3526 offsetof(CPUState, stick));
3527 gen_helper_tick_set_limit(r_tickptr,
3528 cpu_stick_cmpr);
3529 tcg_temp_free_ptr(r_tickptr);
3531 break;
3533 case 0x10: /* Performance Control */
3534 case 0x11: /* Performance Instrumentation
3535 Counter */
3536 case 0x12: /* Dispatch Control */
3537 #endif
3538 default:
3539 goto illegal_insn;
3542 break;
3543 #if !defined(CONFIG_USER_ONLY)
3544 case 0x31: /* wrpsr, V9 saved, restored */
3546 if (!supervisor(dc))
3547 goto priv_insn;
3548 #ifdef TARGET_SPARC64
3549 switch (rd) {
3550 case 0:
3551 gen_helper_saved();
3552 break;
3553 case 1:
3554 gen_helper_restored();
3555 break;
3556 case 2: /* UA2005 allclean */
3557 case 3: /* UA2005 otherw */
3558 case 4: /* UA2005 normalw */
3559 case 5: /* UA2005 invalw */
3560 // XXX
3561 default:
3562 goto illegal_insn;
3564 #else
3565 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3566 gen_helper_wrpsr(cpu_dst);
3567 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3568 dc->cc_op = CC_OP_FLAGS;
3569 save_state(dc, cpu_cond);
3570 gen_op_next_insn();
3571 tcg_gen_exit_tb(0);
3572 dc->is_br = 1;
3573 #endif
3575 break;
3576 case 0x32: /* wrwim, V9 wrpr */
3578 if (!supervisor(dc))
3579 goto priv_insn;
3580 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3581 #ifdef TARGET_SPARC64
3582 switch (rd) {
3583 case 0: // tpc
3585 TCGv_ptr r_tsptr;
3587 r_tsptr = tcg_temp_new_ptr();
3588 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3589 offsetof(CPUState, tsptr));
3590 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3591 offsetof(trap_state, tpc));
3592 tcg_temp_free_ptr(r_tsptr);
3594 break;
3595 case 1: // tnpc
3597 TCGv_ptr r_tsptr;
3599 r_tsptr = tcg_temp_new_ptr();
3600 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3601 offsetof(CPUState, tsptr));
3602 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3603 offsetof(trap_state, tnpc));
3604 tcg_temp_free_ptr(r_tsptr);
3606 break;
3607 case 2: // tstate
3609 TCGv_ptr r_tsptr;
3611 r_tsptr = tcg_temp_new_ptr();
3612 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3613 offsetof(CPUState, tsptr));
3614 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3615 offsetof(trap_state,
3616 tstate));
3617 tcg_temp_free_ptr(r_tsptr);
3619 break;
3620 case 3: // tt
3622 TCGv_ptr r_tsptr;
3624 r_tsptr = tcg_temp_new_ptr();
3625 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3626 offsetof(CPUState, tsptr));
3627 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3628 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3629 offsetof(trap_state, tt));
3630 tcg_temp_free_ptr(r_tsptr);
3632 break;
3633 case 4: // tick
3635 TCGv_ptr r_tickptr;
3637 r_tickptr = tcg_temp_new_ptr();
3638 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3639 offsetof(CPUState, tick));
3640 gen_helper_tick_set_count(r_tickptr,
3641 cpu_tmp0);
3642 tcg_temp_free_ptr(r_tickptr);
3644 break;
3645 case 5: // tba
3646 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3647 break;
3648 case 6: // pstate
3649 save_state(dc, cpu_cond);
3650 gen_helper_wrpstate(cpu_tmp0);
3651 gen_op_next_insn();
3652 tcg_gen_exit_tb(0);
3653 dc->is_br = 1;
3654 break;
3655 case 7: // tl
3656 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3657 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3658 offsetof(CPUSPARCState, tl));
3659 break;
3660 case 8: // pil
3661 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3662 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3663 offsetof(CPUSPARCState,
3664 psrpil));
3665 break;
3666 case 9: // cwp
3667 gen_helper_wrcwp(cpu_tmp0);
3668 break;
3669 case 10: // cansave
3670 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3671 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3672 offsetof(CPUSPARCState,
3673 cansave));
3674 break;
3675 case 11: // canrestore
3676 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3677 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3678 offsetof(CPUSPARCState,
3679 canrestore));
3680 break;
3681 case 12: // cleanwin
3682 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3683 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3684 offsetof(CPUSPARCState,
3685 cleanwin));
3686 break;
3687 case 13: // otherwin
3688 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3689 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3690 offsetof(CPUSPARCState,
3691 otherwin));
3692 break;
3693 case 14: // wstate
3694 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3695 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3696 offsetof(CPUSPARCState,
3697 wstate));
3698 break;
3699 case 16: // UA2005 gl
3700 CHECK_IU_FEATURE(dc, GL);
3701 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3702 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3703 offsetof(CPUSPARCState, gl));
3704 break;
3705 case 26: // UA2005 strand status
3706 CHECK_IU_FEATURE(dc, HYPV);
3707 if (!hypervisor(dc))
3708 goto priv_insn;
3709 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3710 break;
3711 default:
3712 goto illegal_insn;
3714 #else
3715 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3716 if (dc->def->nwindows != 32)
3717 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3718 (1 << dc->def->nwindows) - 1);
3719 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3720 #endif
3722 break;
3723 case 0x33: /* wrtbr, UA2005 wrhpr */
3725 #ifndef TARGET_SPARC64
3726 if (!supervisor(dc))
3727 goto priv_insn;
3728 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3729 #else
3730 CHECK_IU_FEATURE(dc, HYPV);
3731 if (!hypervisor(dc))
3732 goto priv_insn;
3733 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3734 switch (rd) {
3735 case 0: // hpstate
3736 // XXX gen_op_wrhpstate();
3737 save_state(dc, cpu_cond);
3738 gen_op_next_insn();
3739 tcg_gen_exit_tb(0);
3740 dc->is_br = 1;
3741 break;
3742 case 1: // htstate
3743 // XXX gen_op_wrhtstate();
3744 break;
3745 case 3: // hintp
3746 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3747 break;
3748 case 5: // htba
3749 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3750 break;
3751 case 31: // hstick_cmpr
3753 TCGv_ptr r_tickptr;
3755 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3756 r_tickptr = tcg_temp_new_ptr();
3757 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3758 offsetof(CPUState, hstick));
3759 gen_helper_tick_set_limit(r_tickptr,
3760 cpu_hstick_cmpr);
3761 tcg_temp_free_ptr(r_tickptr);
3763 break;
3764 case 6: // hver readonly
3765 default:
3766 goto illegal_insn;
3768 #endif
3770 break;
3771 #endif
3772 #ifdef TARGET_SPARC64
3773 case 0x2c: /* V9 movcc */
3775 int cc = GET_FIELD_SP(insn, 11, 12);
3776 int cond = GET_FIELD_SP(insn, 14, 17);
3777 TCGv r_cond;
3778 int l1;
3780 r_cond = tcg_temp_new();
3781 if (insn & (1 << 18)) {
3782 if (cc == 0)
3783 gen_cond(r_cond, 0, cond, dc);
3784 else if (cc == 2)
3785 gen_cond(r_cond, 1, cond, dc);
3786 else
3787 goto illegal_insn;
3788 } else {
3789 gen_fcond(r_cond, cc, cond);
3792 l1 = gen_new_label();
3794 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3795 if (IS_IMM) { /* immediate */
3796 TCGv r_const;
3798 simm = GET_FIELD_SPs(insn, 0, 10);
3799 r_const = tcg_const_tl(simm);
3800 gen_movl_TN_reg(rd, r_const);
3801 tcg_temp_free(r_const);
3802 } else {
3803 rs2 = GET_FIELD_SP(insn, 0, 4);
3804 gen_movl_reg_TN(rs2, cpu_tmp0);
3805 gen_movl_TN_reg(rd, cpu_tmp0);
3807 gen_set_label(l1);
3808 tcg_temp_free(r_cond);
3809 break;
3811 case 0x2d: /* V9 sdivx */
3812 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3813 gen_movl_TN_reg(rd, cpu_dst);
3814 break;
3815 case 0x2e: /* V9 popc */
3817 cpu_src2 = get_src2(insn, cpu_src2);
3818 gen_helper_popc(cpu_dst, cpu_src2);
3819 gen_movl_TN_reg(rd, cpu_dst);
3821 case 0x2f: /* V9 movr */
3823 int cond = GET_FIELD_SP(insn, 10, 12);
3824 int l1;
3826 cpu_src1 = get_src1(insn, cpu_src1);
3828 l1 = gen_new_label();
3830 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3831 cpu_src1, 0, l1);
3832 if (IS_IMM) { /* immediate */
3833 TCGv r_const;
3835 simm = GET_FIELD_SPs(insn, 0, 9);
3836 r_const = tcg_const_tl(simm);
3837 gen_movl_TN_reg(rd, r_const);
3838 tcg_temp_free(r_const);
3839 } else {
3840 rs2 = GET_FIELD_SP(insn, 0, 4);
3841 gen_movl_reg_TN(rs2, cpu_tmp0);
3842 gen_movl_TN_reg(rd, cpu_tmp0);
3844 gen_set_label(l1);
3845 break;
3847 #endif
3848 default:
3849 goto illegal_insn;
3852 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3853 #ifdef TARGET_SPARC64
3854 int opf = GET_FIELD_SP(insn, 5, 13);
3855 rs1 = GET_FIELD(insn, 13, 17);
3856 rs2 = GET_FIELD(insn, 27, 31);
3857 if (gen_trap_ifnofpu(dc, cpu_cond))
3858 goto jmp_insn;
3860 switch (opf) {
3861 case 0x000: /* VIS I edge8cc */
3862 case 0x001: /* VIS II edge8n */
3863 case 0x002: /* VIS I edge8lcc */
3864 case 0x003: /* VIS II edge8ln */
3865 case 0x004: /* VIS I edge16cc */
3866 case 0x005: /* VIS II edge16n */
3867 case 0x006: /* VIS I edge16lcc */
3868 case 0x007: /* VIS II edge16ln */
3869 case 0x008: /* VIS I edge32cc */
3870 case 0x009: /* VIS II edge32n */
3871 case 0x00a: /* VIS I edge32lcc */
3872 case 0x00b: /* VIS II edge32ln */
3873 // XXX
3874 goto illegal_insn;
3875 case 0x010: /* VIS I array8 */
3876 CHECK_FPU_FEATURE(dc, VIS1);
3877 cpu_src1 = get_src1(insn, cpu_src1);
3878 gen_movl_reg_TN(rs2, cpu_src2);
3879 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3880 gen_movl_TN_reg(rd, cpu_dst);
3881 break;
3882 case 0x012: /* VIS I array16 */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 cpu_src1 = get_src1(insn, cpu_src1);
3885 gen_movl_reg_TN(rs2, cpu_src2);
3886 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3887 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3888 gen_movl_TN_reg(rd, cpu_dst);
3889 break;
3890 case 0x014: /* VIS I array32 */
3891 CHECK_FPU_FEATURE(dc, VIS1);
3892 cpu_src1 = get_src1(insn, cpu_src1);
3893 gen_movl_reg_TN(rs2, cpu_src2);
3894 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3895 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3896 gen_movl_TN_reg(rd, cpu_dst);
3897 break;
3898 case 0x018: /* VIS I alignaddr */
3899 CHECK_FPU_FEATURE(dc, VIS1);
3900 cpu_src1 = get_src1(insn, cpu_src1);
3901 gen_movl_reg_TN(rs2, cpu_src2);
3902 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3903 gen_movl_TN_reg(rd, cpu_dst);
3904 break;
3905 case 0x019: /* VIS II bmask */
3906 case 0x01a: /* VIS I alignaddrl */
3907 // XXX
3908 goto illegal_insn;
3909 case 0x020: /* VIS I fcmple16 */
3910 CHECK_FPU_FEATURE(dc, VIS1);
3911 gen_op_load_fpr_DT0(DFPREG(rs1));
3912 gen_op_load_fpr_DT1(DFPREG(rs2));
3913 gen_helper_fcmple16();
3914 gen_op_store_DT0_fpr(DFPREG(rd));
3915 break;
3916 case 0x022: /* VIS I fcmpne16 */
3917 CHECK_FPU_FEATURE(dc, VIS1);
3918 gen_op_load_fpr_DT0(DFPREG(rs1));
3919 gen_op_load_fpr_DT1(DFPREG(rs2));
3920 gen_helper_fcmpne16();
3921 gen_op_store_DT0_fpr(DFPREG(rd));
3922 break;
3923 case 0x024: /* VIS I fcmple32 */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 gen_op_load_fpr_DT0(DFPREG(rs1));
3926 gen_op_load_fpr_DT1(DFPREG(rs2));
3927 gen_helper_fcmple32();
3928 gen_op_store_DT0_fpr(DFPREG(rd));
3929 break;
3930 case 0x026: /* VIS I fcmpne32 */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 gen_op_load_fpr_DT0(DFPREG(rs1));
3933 gen_op_load_fpr_DT1(DFPREG(rs2));
3934 gen_helper_fcmpne32();
3935 gen_op_store_DT0_fpr(DFPREG(rd));
3936 break;
3937 case 0x028: /* VIS I fcmpgt16 */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 gen_op_load_fpr_DT0(DFPREG(rs1));
3940 gen_op_load_fpr_DT1(DFPREG(rs2));
3941 gen_helper_fcmpgt16();
3942 gen_op_store_DT0_fpr(DFPREG(rd));
3943 break;
3944 case 0x02a: /* VIS I fcmpeq16 */
3945 CHECK_FPU_FEATURE(dc, VIS1);
3946 gen_op_load_fpr_DT0(DFPREG(rs1));
3947 gen_op_load_fpr_DT1(DFPREG(rs2));
3948 gen_helper_fcmpeq16();
3949 gen_op_store_DT0_fpr(DFPREG(rd));
3950 break;
3951 case 0x02c: /* VIS I fcmpgt32 */
3952 CHECK_FPU_FEATURE(dc, VIS1);
3953 gen_op_load_fpr_DT0(DFPREG(rs1));
3954 gen_op_load_fpr_DT1(DFPREG(rs2));
3955 gen_helper_fcmpgt32();
3956 gen_op_store_DT0_fpr(DFPREG(rd));
3957 break;
3958 case 0x02e: /* VIS I fcmpeq32 */
3959 CHECK_FPU_FEATURE(dc, VIS1);
3960 gen_op_load_fpr_DT0(DFPREG(rs1));
3961 gen_op_load_fpr_DT1(DFPREG(rs2));
3962 gen_helper_fcmpeq32();
3963 gen_op_store_DT0_fpr(DFPREG(rd));
3964 break;
3965 case 0x031: /* VIS I fmul8x16 */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 gen_op_load_fpr_DT0(DFPREG(rs1));
3968 gen_op_load_fpr_DT1(DFPREG(rs2));
3969 gen_helper_fmul8x16();
3970 gen_op_store_DT0_fpr(DFPREG(rd));
3971 break;
3972 case 0x033: /* VIS I fmul8x16au */
3973 CHECK_FPU_FEATURE(dc, VIS1);
3974 gen_op_load_fpr_DT0(DFPREG(rs1));
3975 gen_op_load_fpr_DT1(DFPREG(rs2));
3976 gen_helper_fmul8x16au();
3977 gen_op_store_DT0_fpr(DFPREG(rd));
3978 break;
3979 case 0x035: /* VIS I fmul8x16al */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 gen_op_load_fpr_DT0(DFPREG(rs1));
3982 gen_op_load_fpr_DT1(DFPREG(rs2));
3983 gen_helper_fmul8x16al();
3984 gen_op_store_DT0_fpr(DFPREG(rd));
3985 break;
3986 case 0x036: /* VIS I fmul8sux16 */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_DT0(DFPREG(rs1));
3989 gen_op_load_fpr_DT1(DFPREG(rs2));
3990 gen_helper_fmul8sux16();
3991 gen_op_store_DT0_fpr(DFPREG(rd));
3992 break;
3993 case 0x037: /* VIS I fmul8ulx16 */
3994 CHECK_FPU_FEATURE(dc, VIS1);
3995 gen_op_load_fpr_DT0(DFPREG(rs1));
3996 gen_op_load_fpr_DT1(DFPREG(rs2));
3997 gen_helper_fmul8ulx16();
3998 gen_op_store_DT0_fpr(DFPREG(rd));
3999 break;
4000 case 0x038: /* VIS I fmuld8sux16 */
4001 CHECK_FPU_FEATURE(dc, VIS1);
4002 gen_op_load_fpr_DT0(DFPREG(rs1));
4003 gen_op_load_fpr_DT1(DFPREG(rs2));
4004 gen_helper_fmuld8sux16();
4005 gen_op_store_DT0_fpr(DFPREG(rd));
4006 break;
4007 case 0x039: /* VIS I fmuld8ulx16 */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 gen_op_load_fpr_DT0(DFPREG(rs1));
4010 gen_op_load_fpr_DT1(DFPREG(rs2));
4011 gen_helper_fmuld8ulx16();
4012 gen_op_store_DT0_fpr(DFPREG(rd));
4013 break;
4014 case 0x03a: /* VIS I fpack32 */
4015 case 0x03b: /* VIS I fpack16 */
4016 case 0x03d: /* VIS I fpackfix */
4017 case 0x03e: /* VIS I pdist */
4018 // XXX
4019 goto illegal_insn;
4020 case 0x048: /* VIS I faligndata */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 gen_op_load_fpr_DT0(DFPREG(rs1));
4023 gen_op_load_fpr_DT1(DFPREG(rs2));
4024 gen_helper_faligndata();
4025 gen_op_store_DT0_fpr(DFPREG(rd));
4026 break;
4027 case 0x04b: /* VIS I fpmerge */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 gen_op_load_fpr_DT0(DFPREG(rs1));
4030 gen_op_load_fpr_DT1(DFPREG(rs2));
4031 gen_helper_fpmerge();
4032 gen_op_store_DT0_fpr(DFPREG(rd));
4033 break;
4034 case 0x04c: /* VIS II bshuffle */
4035 // XXX
4036 goto illegal_insn;
4037 case 0x04d: /* VIS I fexpand */
4038 CHECK_FPU_FEATURE(dc, VIS1);
4039 gen_op_load_fpr_DT0(DFPREG(rs1));
4040 gen_op_load_fpr_DT1(DFPREG(rs2));
4041 gen_helper_fexpand();
4042 gen_op_store_DT0_fpr(DFPREG(rd));
4043 break;
4044 case 0x050: /* VIS I fpadd16 */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 gen_op_load_fpr_DT0(DFPREG(rs1));
4047 gen_op_load_fpr_DT1(DFPREG(rs2));
4048 gen_helper_fpadd16();
4049 gen_op_store_DT0_fpr(DFPREG(rd));
4050 break;
4051 case 0x051: /* VIS I fpadd16s */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 gen_helper_fpadd16s(cpu_fpr[rd],
4054 cpu_fpr[rs1], cpu_fpr[rs2]);
4055 break;
4056 case 0x052: /* VIS I fpadd32 */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 gen_op_load_fpr_DT0(DFPREG(rs1));
4059 gen_op_load_fpr_DT1(DFPREG(rs2));
4060 gen_helper_fpadd32();
4061 gen_op_store_DT0_fpr(DFPREG(rd));
4062 break;
4063 case 0x053: /* VIS I fpadd32s */
4064 CHECK_FPU_FEATURE(dc, VIS1);
4065 gen_helper_fpadd32s(cpu_fpr[rd],
4066 cpu_fpr[rs1], cpu_fpr[rs2]);
4067 break;
4068 case 0x054: /* VIS I fpsub16 */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 gen_op_load_fpr_DT0(DFPREG(rs1));
4071 gen_op_load_fpr_DT1(DFPREG(rs2));
4072 gen_helper_fpsub16();
4073 gen_op_store_DT0_fpr(DFPREG(rd));
4074 break;
4075 case 0x055: /* VIS I fpsub16s */
4076 CHECK_FPU_FEATURE(dc, VIS1);
4077 gen_helper_fpsub16s(cpu_fpr[rd],
4078 cpu_fpr[rs1], cpu_fpr[rs2]);
4079 break;
4080 case 0x056: /* VIS I fpsub32 */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 gen_op_load_fpr_DT0(DFPREG(rs1));
4083 gen_op_load_fpr_DT1(DFPREG(rs2));
4084 gen_helper_fpsub32();
4085 gen_op_store_DT0_fpr(DFPREG(rd));
4086 break;
4087 case 0x057: /* VIS I fpsub32s */
4088 CHECK_FPU_FEATURE(dc, VIS1);
4089 gen_helper_fpsub32s(cpu_fpr[rd],
4090 cpu_fpr[rs1], cpu_fpr[rs2]);
4091 break;
4092 case 0x060: /* VIS I fzero */
4093 CHECK_FPU_FEATURE(dc, VIS1);
4094 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4095 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4096 break;
4097 case 0x061: /* VIS I fzeros */
4098 CHECK_FPU_FEATURE(dc, VIS1);
4099 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4100 break;
4101 case 0x062: /* VIS I fnor */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4104 cpu_fpr[DFPREG(rs2)]);
4105 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4106 cpu_fpr[DFPREG(rs2) + 1]);
4107 break;
4108 case 0x063: /* VIS I fnors */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4111 break;
4112 case 0x064: /* VIS I fandnot2 */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4115 cpu_fpr[DFPREG(rs2)]);
4116 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4117 cpu_fpr[DFPREG(rs1) + 1],
4118 cpu_fpr[DFPREG(rs2) + 1]);
4119 break;
4120 case 0x065: /* VIS I fandnot2s */
4121 CHECK_FPU_FEATURE(dc, VIS1);
4122 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4123 break;
4124 case 0x066: /* VIS I fnot2 */
4125 CHECK_FPU_FEATURE(dc, VIS1);
4126 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4127 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4128 cpu_fpr[DFPREG(rs2) + 1]);
4129 break;
4130 case 0x067: /* VIS I fnot2s */
4131 CHECK_FPU_FEATURE(dc, VIS1);
4132 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4133 break;
4134 case 0x068: /* VIS I fandnot1 */
4135 CHECK_FPU_FEATURE(dc, VIS1);
4136 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4137 cpu_fpr[DFPREG(rs1)]);
4138 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4139 cpu_fpr[DFPREG(rs2) + 1],
4140 cpu_fpr[DFPREG(rs1) + 1]);
4141 break;
4142 case 0x069: /* VIS I fandnot1s */
4143 CHECK_FPU_FEATURE(dc, VIS1);
4144 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4145 break;
4146 case 0x06a: /* VIS I fnot1 */
4147 CHECK_FPU_FEATURE(dc, VIS1);
4148 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4149 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4150 cpu_fpr[DFPREG(rs1) + 1]);
4151 break;
4152 case 0x06b: /* VIS I fnot1s */
4153 CHECK_FPU_FEATURE(dc, VIS1);
4154 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4155 break;
4156 case 0x06c: /* VIS I fxor */
4157 CHECK_FPU_FEATURE(dc, VIS1);
4158 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4159 cpu_fpr[DFPREG(rs2)]);
4160 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4161 cpu_fpr[DFPREG(rs1) + 1],
4162 cpu_fpr[DFPREG(rs2) + 1]);
4163 break;
4164 case 0x06d: /* VIS I fxors */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4167 break;
4168 case 0x06e: /* VIS I fnand */
4169 CHECK_FPU_FEATURE(dc, VIS1);
4170 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4171 cpu_fpr[DFPREG(rs2)]);
4172 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4173 cpu_fpr[DFPREG(rs2) + 1]);
4174 break;
4175 case 0x06f: /* VIS I fnands */
4176 CHECK_FPU_FEATURE(dc, VIS1);
4177 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4178 break;
4179 case 0x070: /* VIS I fand */
4180 CHECK_FPU_FEATURE(dc, VIS1);
4181 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4182 cpu_fpr[DFPREG(rs2)]);
4183 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4184 cpu_fpr[DFPREG(rs1) + 1],
4185 cpu_fpr[DFPREG(rs2) + 1]);
4186 break;
4187 case 0x071: /* VIS I fands */
4188 CHECK_FPU_FEATURE(dc, VIS1);
4189 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4190 break;
4191 case 0x072: /* VIS I fxnor */
4192 CHECK_FPU_FEATURE(dc, VIS1);
4193 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4194 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4195 cpu_fpr[DFPREG(rs1)]);
4196 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4197 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4198 cpu_fpr[DFPREG(rs1) + 1]);
4199 break;
4200 case 0x073: /* VIS I fxnors */
4201 CHECK_FPU_FEATURE(dc, VIS1);
4202 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4203 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4204 break;
4205 case 0x074: /* VIS I fsrc1 */
4206 CHECK_FPU_FEATURE(dc, VIS1);
4207 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4208 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4209 cpu_fpr[DFPREG(rs1) + 1]);
4210 break;
4211 case 0x075: /* VIS I fsrc1s */
4212 CHECK_FPU_FEATURE(dc, VIS1);
4213 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4214 break;
4215 case 0x076: /* VIS I fornot2 */
4216 CHECK_FPU_FEATURE(dc, VIS1);
4217 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4218 cpu_fpr[DFPREG(rs2)]);
4219 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4220 cpu_fpr[DFPREG(rs1) + 1],
4221 cpu_fpr[DFPREG(rs2) + 1]);
4222 break;
4223 case 0x077: /* VIS I fornot2s */
4224 CHECK_FPU_FEATURE(dc, VIS1);
4225 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4226 break;
4227 case 0x078: /* VIS I fsrc2 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 gen_op_load_fpr_DT0(DFPREG(rs2));
4230 gen_op_store_DT0_fpr(DFPREG(rd));
4231 break;
4232 case 0x079: /* VIS I fsrc2s */
4233 CHECK_FPU_FEATURE(dc, VIS1);
4234 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4235 break;
4236 case 0x07a: /* VIS I fornot1 */
4237 CHECK_FPU_FEATURE(dc, VIS1);
4238 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4239 cpu_fpr[DFPREG(rs1)]);
4240 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4241 cpu_fpr[DFPREG(rs2) + 1],
4242 cpu_fpr[DFPREG(rs1) + 1]);
4243 break;
4244 case 0x07b: /* VIS I fornot1s */
4245 CHECK_FPU_FEATURE(dc, VIS1);
4246 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4247 break;
4248 case 0x07c: /* VIS I for */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4251 cpu_fpr[DFPREG(rs2)]);
4252 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4253 cpu_fpr[DFPREG(rs1) + 1],
4254 cpu_fpr[DFPREG(rs2) + 1]);
4255 break;
4256 case 0x07d: /* VIS I fors */
4257 CHECK_FPU_FEATURE(dc, VIS1);
4258 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4259 break;
4260 case 0x07e: /* VIS I fone */
4261 CHECK_FPU_FEATURE(dc, VIS1);
4262 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4263 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4264 break;
4265 case 0x07f: /* VIS I fones */
4266 CHECK_FPU_FEATURE(dc, VIS1);
4267 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4268 break;
4269 case 0x080: /* VIS I shutdown */
4270 case 0x081: /* VIS II siam */
4271 // XXX
4272 goto illegal_insn;
4273 default:
4274 goto illegal_insn;
4276 #else
4277 goto ncp_insn;
4278 #endif
4279 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4280 #ifdef TARGET_SPARC64
4281 goto illegal_insn;
4282 #else
4283 goto ncp_insn;
4284 #endif
4285 #ifdef TARGET_SPARC64
4286 } else if (xop == 0x39) { /* V9 return */
4287 TCGv_i32 r_const;
4289 save_state(dc, cpu_cond);
4290 cpu_src1 = get_src1(insn, cpu_src1);
4291 if (IS_IMM) { /* immediate */
4292 simm = GET_FIELDs(insn, 19, 31);
4293 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4294 } else { /* register */
4295 rs2 = GET_FIELD(insn, 27, 31);
4296 if (rs2) {
4297 gen_movl_reg_TN(rs2, cpu_src2);
4298 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4299 } else
4300 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4302 gen_helper_restore();
4303 gen_mov_pc_npc(dc, cpu_cond);
4304 r_const = tcg_const_i32(3);
4305 gen_helper_check_align(cpu_dst, r_const);
4306 tcg_temp_free_i32(r_const);
4307 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4308 dc->npc = DYNAMIC_PC;
4309 goto jmp_insn;
4310 #endif
4311 } else {
4312 cpu_src1 = get_src1(insn, cpu_src1);
4313 if (IS_IMM) { /* immediate */
4314 simm = GET_FIELDs(insn, 19, 31);
4315 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4316 } else { /* register */
4317 rs2 = GET_FIELD(insn, 27, 31);
4318 if (rs2) {
4319 gen_movl_reg_TN(rs2, cpu_src2);
4320 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4321 } else
4322 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4324 switch (xop) {
4325 case 0x38: /* jmpl */
4327 TCGv r_pc;
4328 TCGv_i32 r_const;
4330 r_pc = tcg_const_tl(dc->pc);
4331 gen_movl_TN_reg(rd, r_pc);
4332 tcg_temp_free(r_pc);
4333 gen_mov_pc_npc(dc, cpu_cond);
4334 r_const = tcg_const_i32(3);
4335 gen_helper_check_align(cpu_dst, r_const);
4336 tcg_temp_free_i32(r_const);
4337 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4338 dc->npc = DYNAMIC_PC;
4340 goto jmp_insn;
4341 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4342 case 0x39: /* rett, V9 return */
4344 TCGv_i32 r_const;
4346 if (!supervisor(dc))
4347 goto priv_insn;
4348 gen_mov_pc_npc(dc, cpu_cond);
4349 r_const = tcg_const_i32(3);
4350 gen_helper_check_align(cpu_dst, r_const);
4351 tcg_temp_free_i32(r_const);
4352 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4353 dc->npc = DYNAMIC_PC;
4354 gen_helper_rett();
4356 goto jmp_insn;
4357 #endif
4358 case 0x3b: /* flush */
4359 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4360 goto unimp_flush;
4361 gen_helper_flush(cpu_dst);
4362 break;
4363 case 0x3c: /* save */
4364 save_state(dc, cpu_cond);
4365 gen_helper_save();
4366 gen_movl_TN_reg(rd, cpu_dst);
4367 break;
4368 case 0x3d: /* restore */
4369 save_state(dc, cpu_cond);
4370 gen_helper_restore();
4371 gen_movl_TN_reg(rd, cpu_dst);
4372 break;
4373 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4374 case 0x3e: /* V9 done/retry */
4376 switch (rd) {
4377 case 0:
4378 if (!supervisor(dc))
4379 goto priv_insn;
4380 dc->npc = DYNAMIC_PC;
4381 dc->pc = DYNAMIC_PC;
4382 gen_helper_done();
4383 goto jmp_insn;
4384 case 1:
4385 if (!supervisor(dc))
4386 goto priv_insn;
4387 dc->npc = DYNAMIC_PC;
4388 dc->pc = DYNAMIC_PC;
4389 gen_helper_retry();
4390 goto jmp_insn;
4391 default:
4392 goto illegal_insn;
4395 break;
4396 #endif
4397 default:
4398 goto illegal_insn;
4401 break;
4403 break;
4404 case 3: /* load/store instructions */
4406 unsigned int xop = GET_FIELD(insn, 7, 12);
4408 cpu_src1 = get_src1(insn, cpu_src1);
4409 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4410 rs2 = GET_FIELD(insn, 27, 31);
4411 gen_movl_reg_TN(rs2, cpu_src2);
4412 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4413 } else if (IS_IMM) { /* immediate */
4414 simm = GET_FIELDs(insn, 19, 31);
4415 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4416 } else { /* register */
4417 rs2 = GET_FIELD(insn, 27, 31);
4418 if (rs2 != 0) {
4419 gen_movl_reg_TN(rs2, cpu_src2);
4420 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4421 } else
4422 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4424 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4425 (xop > 0x17 && xop <= 0x1d ) ||
4426 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4427 switch (xop) {
4428 case 0x0: /* ld, V9 lduw, load unsigned word */
4429 gen_address_mask(dc, cpu_addr);
4430 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4431 break;
4432 case 0x1: /* ldub, load unsigned byte */
4433 gen_address_mask(dc, cpu_addr);
4434 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4435 break;
4436 case 0x2: /* lduh, load unsigned halfword */
4437 gen_address_mask(dc, cpu_addr);
4438 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4439 break;
4440 case 0x3: /* ldd, load double word */
4441 if (rd & 1)
4442 goto illegal_insn;
4443 else {
4444 TCGv_i32 r_const;
4446 save_state(dc, cpu_cond);
4447 r_const = tcg_const_i32(7);
4448 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4449 tcg_temp_free_i32(r_const);
4450 gen_address_mask(dc, cpu_addr);
4451 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4452 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4453 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4454 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4455 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4456 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4457 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4459 break;
4460 case 0x9: /* ldsb, load signed byte */
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4463 break;
4464 case 0xa: /* ldsh, load signed halfword */
4465 gen_address_mask(dc, cpu_addr);
4466 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4467 break;
4468 case 0xd: /* ldstub -- XXX: should be atomically */
4470 TCGv r_const;
4472 gen_address_mask(dc, cpu_addr);
4473 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4474 r_const = tcg_const_tl(0xff);
4475 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4476 tcg_temp_free(r_const);
4478 break;
4479 case 0x0f: /* swap, swap register with memory. Also
4480 atomically */
4481 CHECK_IU_FEATURE(dc, SWAP);
4482 gen_movl_reg_TN(rd, cpu_val);
4483 gen_address_mask(dc, cpu_addr);
4484 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4485 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4486 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4487 break;
4488 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4489 case 0x10: /* lda, V9 lduwa, load word alternate */
4490 #ifndef TARGET_SPARC64
4491 if (IS_IMM)
4492 goto illegal_insn;
4493 if (!supervisor(dc))
4494 goto priv_insn;
4495 #endif
4496 save_state(dc, cpu_cond);
4497 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4498 break;
4499 case 0x11: /* lduba, load unsigned byte alternate */
4500 #ifndef TARGET_SPARC64
4501 if (IS_IMM)
4502 goto illegal_insn;
4503 if (!supervisor(dc))
4504 goto priv_insn;
4505 #endif
4506 save_state(dc, cpu_cond);
4507 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4508 break;
4509 case 0x12: /* lduha, load unsigned halfword alternate */
4510 #ifndef TARGET_SPARC64
4511 if (IS_IMM)
4512 goto illegal_insn;
4513 if (!supervisor(dc))
4514 goto priv_insn;
4515 #endif
4516 save_state(dc, cpu_cond);
4517 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4518 break;
4519 case 0x13: /* ldda, load double word alternate */
4520 #ifndef TARGET_SPARC64
4521 if (IS_IMM)
4522 goto illegal_insn;
4523 if (!supervisor(dc))
4524 goto priv_insn;
4525 #endif
4526 if (rd & 1)
4527 goto illegal_insn;
4528 save_state(dc, cpu_cond);
4529 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4530 goto skip_move;
4531 case 0x19: /* ldsba, load signed byte alternate */
4532 #ifndef TARGET_SPARC64
4533 if (IS_IMM)
4534 goto illegal_insn;
4535 if (!supervisor(dc))
4536 goto priv_insn;
4537 #endif
4538 save_state(dc, cpu_cond);
4539 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4540 break;
4541 case 0x1a: /* ldsha, load signed halfword alternate */
4542 #ifndef TARGET_SPARC64
4543 if (IS_IMM)
4544 goto illegal_insn;
4545 if (!supervisor(dc))
4546 goto priv_insn;
4547 #endif
4548 save_state(dc, cpu_cond);
4549 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4550 break;
4551 case 0x1d: /* ldstuba -- XXX: should be atomically */
4552 #ifndef TARGET_SPARC64
4553 if (IS_IMM)
4554 goto illegal_insn;
4555 if (!supervisor(dc))
4556 goto priv_insn;
4557 #endif
4558 save_state(dc, cpu_cond);
4559 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4560 break;
4561 case 0x1f: /* swapa, swap reg with alt. memory. Also
4562 atomically */
4563 CHECK_IU_FEATURE(dc, SWAP);
4564 #ifndef TARGET_SPARC64
4565 if (IS_IMM)
4566 goto illegal_insn;
4567 if (!supervisor(dc))
4568 goto priv_insn;
4569 #endif
4570 save_state(dc, cpu_cond);
4571 gen_movl_reg_TN(rd, cpu_val);
4572 gen_swap_asi(cpu_val, cpu_addr, insn);
4573 break;
4575 #ifndef TARGET_SPARC64
4576 case 0x30: /* ldc */
4577 case 0x31: /* ldcsr */
4578 case 0x33: /* lddc */
4579 goto ncp_insn;
4580 #endif
4581 #endif
4582 #ifdef TARGET_SPARC64
4583 case 0x08: /* V9 ldsw */
4584 gen_address_mask(dc, cpu_addr);
4585 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4586 break;
4587 case 0x0b: /* V9 ldx */
4588 gen_address_mask(dc, cpu_addr);
4589 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4590 break;
4591 case 0x18: /* V9 ldswa */
4592 save_state(dc, cpu_cond);
4593 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4594 break;
4595 case 0x1b: /* V9 ldxa */
4596 save_state(dc, cpu_cond);
4597 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4598 break;
4599 case 0x2d: /* V9 prefetch, no effect */
4600 goto skip_move;
4601 case 0x30: /* V9 ldfa */
4602 save_state(dc, cpu_cond);
4603 gen_ldf_asi(cpu_addr, insn, 4, rd);
4604 goto skip_move;
4605 case 0x33: /* V9 lddfa */
4606 save_state(dc, cpu_cond);
4607 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4608 goto skip_move;
4609 case 0x3d: /* V9 prefetcha, no effect */
4610 goto skip_move;
4611 case 0x32: /* V9 ldqfa */
4612 CHECK_FPU_FEATURE(dc, FLOAT128);
4613 save_state(dc, cpu_cond);
4614 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4615 goto skip_move;
4616 #endif
4617 default:
4618 goto illegal_insn;
4620 gen_movl_TN_reg(rd, cpu_val);
4621 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4622 skip_move: ;
4623 #endif
4624 } else if (xop >= 0x20 && xop < 0x24) {
4625 if (gen_trap_ifnofpu(dc, cpu_cond))
4626 goto jmp_insn;
4627 save_state(dc, cpu_cond);
4628 switch (xop) {
4629 case 0x20: /* ldf, load fpreg */
4630 gen_address_mask(dc, cpu_addr);
4631 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4632 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4633 break;
4634 case 0x21: /* ldfsr, V9 ldxfsr */
4635 #ifdef TARGET_SPARC64
4636 gen_address_mask(dc, cpu_addr);
4637 if (rd == 1) {
4638 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4639 gen_helper_ldxfsr(cpu_tmp64);
4640 } else
4641 #else
4643 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4644 gen_helper_ldfsr(cpu_tmp32);
4646 #endif
4647 break;
4648 case 0x22: /* ldqf, load quad fpreg */
4650 TCGv_i32 r_const;
4652 CHECK_FPU_FEATURE(dc, FLOAT128);
4653 r_const = tcg_const_i32(dc->mem_idx);
4654 gen_helper_ldqf(cpu_addr, r_const);
4655 tcg_temp_free_i32(r_const);
4656 gen_op_store_QT0_fpr(QFPREG(rd));
4658 break;
4659 case 0x23: /* lddf, load double fpreg */
4661 TCGv_i32 r_const;
4663 r_const = tcg_const_i32(dc->mem_idx);
4664 gen_helper_lddf(cpu_addr, r_const);
4665 tcg_temp_free_i32(r_const);
4666 gen_op_store_DT0_fpr(DFPREG(rd));
4668 break;
4669 default:
4670 goto illegal_insn;
4672 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4673 xop == 0xe || xop == 0x1e) {
4674 gen_movl_reg_TN(rd, cpu_val);
4675 switch (xop) {
4676 case 0x4: /* st, store word */
4677 gen_address_mask(dc, cpu_addr);
4678 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4679 break;
4680 case 0x5: /* stb, store byte */
4681 gen_address_mask(dc, cpu_addr);
4682 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4683 break;
4684 case 0x6: /* sth, store halfword */
4685 gen_address_mask(dc, cpu_addr);
4686 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4687 break;
4688 case 0x7: /* std, store double word */
4689 if (rd & 1)
4690 goto illegal_insn;
4691 else {
4692 TCGv_i32 r_const;
4694 save_state(dc, cpu_cond);
4695 gen_address_mask(dc, cpu_addr);
4696 r_const = tcg_const_i32(7);
4697 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4698 tcg_temp_free_i32(r_const);
4699 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4700 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4701 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4703 break;
4704 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4705 case 0x14: /* sta, V9 stwa, store word alternate */
4706 #ifndef TARGET_SPARC64
4707 if (IS_IMM)
4708 goto illegal_insn;
4709 if (!supervisor(dc))
4710 goto priv_insn;
4711 #endif
4712 save_state(dc, cpu_cond);
4713 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4714 break;
4715 case 0x15: /* stba, store byte alternate */
4716 #ifndef TARGET_SPARC64
4717 if (IS_IMM)
4718 goto illegal_insn;
4719 if (!supervisor(dc))
4720 goto priv_insn;
4721 #endif
4722 save_state(dc, cpu_cond);
4723 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4724 break;
4725 case 0x16: /* stha, store halfword alternate */
4726 #ifndef TARGET_SPARC64
4727 if (IS_IMM)
4728 goto illegal_insn;
4729 if (!supervisor(dc))
4730 goto priv_insn;
4731 #endif
4732 save_state(dc, cpu_cond);
4733 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4734 break;
4735 case 0x17: /* stda, store double word alternate */
4736 #ifndef TARGET_SPARC64
4737 if (IS_IMM)
4738 goto illegal_insn;
4739 if (!supervisor(dc))
4740 goto priv_insn;
4741 #endif
4742 if (rd & 1)
4743 goto illegal_insn;
4744 else {
4745 save_state(dc, cpu_cond);
4746 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4748 break;
4749 #endif
4750 #ifdef TARGET_SPARC64
4751 case 0x0e: /* V9 stx */
4752 gen_address_mask(dc, cpu_addr);
4753 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4754 break;
4755 case 0x1e: /* V9 stxa */
4756 save_state(dc, cpu_cond);
4757 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4758 break;
4759 #endif
4760 default:
4761 goto illegal_insn;
4763 } else if (xop > 0x23 && xop < 0x28) {
4764 if (gen_trap_ifnofpu(dc, cpu_cond))
4765 goto jmp_insn;
4766 save_state(dc, cpu_cond);
4767 switch (xop) {
4768 case 0x24: /* stf, store fpreg */
4769 gen_address_mask(dc, cpu_addr);
4770 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4771 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4772 break;
4773 case 0x25: /* stfsr, V9 stxfsr */
4774 #ifdef TARGET_SPARC64
4775 gen_address_mask(dc, cpu_addr);
4776 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4777 if (rd == 1)
4778 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4779 else
4780 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4781 #else
4782 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4783 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4784 #endif
4785 break;
4786 case 0x26:
4787 #ifdef TARGET_SPARC64
4788 /* V9 stqf, store quad fpreg */
4790 TCGv_i32 r_const;
4792 CHECK_FPU_FEATURE(dc, FLOAT128);
4793 gen_op_load_fpr_QT0(QFPREG(rd));
4794 r_const = tcg_const_i32(dc->mem_idx);
4795 gen_helper_stqf(cpu_addr, r_const);
4796 tcg_temp_free_i32(r_const);
4798 break;
4799 #else /* !TARGET_SPARC64 */
4800 /* stdfq, store floating point queue */
4801 #if defined(CONFIG_USER_ONLY)
4802 goto illegal_insn;
4803 #else
4804 if (!supervisor(dc))
4805 goto priv_insn;
4806 if (gen_trap_ifnofpu(dc, cpu_cond))
4807 goto jmp_insn;
4808 goto nfq_insn;
4809 #endif
4810 #endif
4811 case 0x27: /* stdf, store double fpreg */
4813 TCGv_i32 r_const;
4815 gen_op_load_fpr_DT0(DFPREG(rd));
4816 r_const = tcg_const_i32(dc->mem_idx);
4817 gen_helper_stdf(cpu_addr, r_const);
4818 tcg_temp_free_i32(r_const);
4820 break;
4821 default:
4822 goto illegal_insn;
4824 } else if (xop > 0x33 && xop < 0x3f) {
4825 save_state(dc, cpu_cond);
4826 switch (xop) {
4827 #ifdef TARGET_SPARC64
4828 case 0x34: /* V9 stfa */
4829 gen_stf_asi(cpu_addr, insn, 4, rd);
4830 break;
4831 case 0x36: /* V9 stqfa */
4833 TCGv_i32 r_const;
4835 CHECK_FPU_FEATURE(dc, FLOAT128);
4836 r_const = tcg_const_i32(7);
4837 gen_helper_check_align(cpu_addr, r_const);
4838 tcg_temp_free_i32(r_const);
4839 gen_op_load_fpr_QT0(QFPREG(rd));
4840 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4842 break;
4843 case 0x37: /* V9 stdfa */
4844 gen_op_load_fpr_DT0(DFPREG(rd));
4845 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4846 break;
4847 case 0x3c: /* V9 casa */
4848 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4849 gen_movl_TN_reg(rd, cpu_val);
4850 break;
4851 case 0x3e: /* V9 casxa */
4852 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4853 gen_movl_TN_reg(rd, cpu_val);
4854 break;
4855 #else
4856 case 0x34: /* stc */
4857 case 0x35: /* stcsr */
4858 case 0x36: /* stdcq */
4859 case 0x37: /* stdc */
4860 goto ncp_insn;
4861 #endif
4862 default:
4863 goto illegal_insn;
4865 } else
4866 goto illegal_insn;
4868 break;
4870 /* default case for non jump instructions */
4871 if (dc->npc == DYNAMIC_PC) {
4872 dc->pc = DYNAMIC_PC;
4873 gen_op_next_insn();
4874 } else if (dc->npc == JUMP_PC) {
4875 /* we can do a static jump */
4876 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4877 dc->is_br = 1;
4878 } else {
4879 dc->pc = dc->npc;
4880 dc->npc = dc->npc + 4;
4882 jmp_insn:
4883 return;
4884 illegal_insn:
4886 TCGv_i32 r_const;
4888 save_state(dc, cpu_cond);
4889 r_const = tcg_const_i32(TT_ILL_INSN);
4890 gen_helper_raise_exception(r_const);
4891 tcg_temp_free_i32(r_const);
4892 dc->is_br = 1;
4894 return;
4895 unimp_flush:
4897 TCGv_i32 r_const;
4899 save_state(dc, cpu_cond);
4900 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4901 gen_helper_raise_exception(r_const);
4902 tcg_temp_free_i32(r_const);
4903 dc->is_br = 1;
4905 return;
4906 #if !defined(CONFIG_USER_ONLY)
4907 priv_insn:
4909 TCGv_i32 r_const;
4911 save_state(dc, cpu_cond);
4912 r_const = tcg_const_i32(TT_PRIV_INSN);
4913 gen_helper_raise_exception(r_const);
4914 tcg_temp_free_i32(r_const);
4915 dc->is_br = 1;
4917 return;
4918 #endif
4919 nfpu_insn:
4920 save_state(dc, cpu_cond);
4921 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4922 dc->is_br = 1;
4923 return;
4924 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4925 nfq_insn:
4926 save_state(dc, cpu_cond);
4927 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4928 dc->is_br = 1;
4929 return;
4930 #endif
4931 #ifndef TARGET_SPARC64
4932 ncp_insn:
4934 TCGv r_const;
4936 save_state(dc, cpu_cond);
4937 r_const = tcg_const_i32(TT_NCP_INSN);
4938 gen_helper_raise_exception(r_const);
4939 tcg_temp_free(r_const);
4940 dc->is_br = 1;
4942 return;
4943 #endif
4946 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4947 int spc, CPUSPARCState *env)
4949 target_ulong pc_start, last_pc;
4950 uint16_t *gen_opc_end;
4951 DisasContext dc1, *dc = &dc1;
4952 CPUBreakpoint *bp;
4953 int j, lj = -1;
4954 int num_insns;
4955 int max_insns;
4957 memset(dc, 0, sizeof(DisasContext));
4958 dc->tb = tb;
4959 pc_start = tb->pc;
4960 dc->pc = pc_start;
4961 last_pc = dc->pc;
4962 dc->npc = (target_ulong) tb->cs_base;
4963 dc->cc_op = CC_OP_DYNAMIC;
4964 dc->mem_idx = cpu_mmu_index(env);
4965 dc->def = env->def;
4966 if ((dc->def->features & CPU_FEATURE_FLOAT))
4967 dc->fpu_enabled = cpu_fpu_enabled(env);
4968 else
4969 dc->fpu_enabled = 0;
4970 #ifdef TARGET_SPARC64
4971 dc->address_mask_32bit = env->pstate & PS_AM;
4972 #endif
4973 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4975 cpu_tmp0 = tcg_temp_new();
4976 cpu_tmp32 = tcg_temp_new_i32();
4977 cpu_tmp64 = tcg_temp_new_i64();
4979 cpu_dst = tcg_temp_local_new();
4981 // loads and stores
4982 cpu_val = tcg_temp_local_new();
4983 cpu_addr = tcg_temp_local_new();
4985 num_insns = 0;
4986 max_insns = tb->cflags & CF_COUNT_MASK;
4987 if (max_insns == 0)
4988 max_insns = CF_COUNT_MASK;
4989 gen_icount_start();
4990 do {
4991 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4992 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4993 if (bp->pc == dc->pc) {
4994 if (dc->pc != pc_start)
4995 save_state(dc, cpu_cond);
4996 gen_helper_debug();
4997 tcg_gen_exit_tb(0);
4998 dc->is_br = 1;
4999 goto exit_gen_loop;
5003 if (spc) {
5004 qemu_log("Search PC...\n");
5005 j = gen_opc_ptr - gen_opc_buf;
5006 if (lj < j) {
5007 lj++;
5008 while (lj < j)
5009 gen_opc_instr_start[lj++] = 0;
5010 gen_opc_pc[lj] = dc->pc;
5011 gen_opc_npc[lj] = dc->npc;
5012 gen_opc_instr_start[lj] = 1;
5013 gen_opc_icount[lj] = num_insns;
5016 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5017 gen_io_start();
5018 last_pc = dc->pc;
5019 disas_sparc_insn(dc);
5020 num_insns++;
5022 if (dc->is_br)
5023 break;
5024 /* if the next PC is different, we abort now */
5025 if (dc->pc != (last_pc + 4))
5026 break;
5027 /* if we reach a page boundary, we stop generation so that the
5028 PC of a TT_TFAULT exception is always in the right page */
5029 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5030 break;
5031 /* if single step mode, we generate only one instruction and
5032 generate an exception */
5033 if (env->singlestep_enabled || singlestep) {
5034 tcg_gen_movi_tl(cpu_pc, dc->pc);
5035 tcg_gen_exit_tb(0);
5036 break;
5038 } while ((gen_opc_ptr < gen_opc_end) &&
5039 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5040 num_insns < max_insns);
5042 exit_gen_loop:
5043 tcg_temp_free(cpu_addr);
5044 tcg_temp_free(cpu_val);
5045 tcg_temp_free(cpu_dst);
5046 tcg_temp_free_i64(cpu_tmp64);
5047 tcg_temp_free_i32(cpu_tmp32);
5048 tcg_temp_free(cpu_tmp0);
5049 if (tb->cflags & CF_LAST_IO)
5050 gen_io_end();
5051 if (!dc->is_br) {
5052 if (dc->pc != DYNAMIC_PC &&
5053 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5054 /* static PC and NPC: we can use direct chaining */
5055 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5056 } else {
5057 if (dc->pc != DYNAMIC_PC)
5058 tcg_gen_movi_tl(cpu_pc, dc->pc);
5059 save_npc(dc, cpu_cond);
5060 tcg_gen_exit_tb(0);
5063 gen_icount_end(tb, num_insns);
5064 *gen_opc_ptr = INDEX_op_end;
5065 if (spc) {
5066 j = gen_opc_ptr - gen_opc_buf;
5067 lj++;
5068 while (lj <= j)
5069 gen_opc_instr_start[lj++] = 0;
5070 #if 0
5071 log_page_dump();
5072 #endif
5073 gen_opc_jump_pc[0] = dc->jump_pc[0];
5074 gen_opc_jump_pc[1] = dc->jump_pc[1];
5075 } else {
5076 tb->size = last_pc + 4 - pc_start;
5077 tb->icount = num_insns;
5079 #ifdef DEBUG_DISAS
5080 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5081 qemu_log("--------------\n");
5082 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5083 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5084 qemu_log("\n");
5086 #endif
5089 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5091 gen_intermediate_code_internal(tb, 0, env);
5094 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5096 gen_intermediate_code_internal(tb, 1, env);
5099 void gen_intermediate_code_init(CPUSPARCState *env)
5101 unsigned int i;
5102 static int inited;
5103 static const char * const gregnames[8] = {
5104 NULL, // g0 not used
5105 "g1",
5106 "g2",
5107 "g3",
5108 "g4",
5109 "g5",
5110 "g6",
5111 "g7",
5113 static const char * const fregnames[64] = {
5114 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5115 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5116 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5117 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5118 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5119 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5120 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5121 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5124 /* init various static tables */
5125 if (!inited) {
5126 inited = 1;
5128 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5129 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5130 offsetof(CPUState, regwptr),
5131 "regwptr");
5132 #ifdef TARGET_SPARC64
5133 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5134 "xcc");
5135 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5136 "asi");
5137 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5138 "fprs");
5139 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5140 "gsr");
5141 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5142 offsetof(CPUState, tick_cmpr),
5143 "tick_cmpr");
5144 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5145 offsetof(CPUState, stick_cmpr),
5146 "stick_cmpr");
5147 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5148 offsetof(CPUState, hstick_cmpr),
5149 "hstick_cmpr");
5150 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5151 "hintp");
5152 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5153 "htba");
5154 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5155 "hver");
5156 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5157 offsetof(CPUState, ssr), "ssr");
5158 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5159 offsetof(CPUState, version), "ver");
5160 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5161 offsetof(CPUState, softint),
5162 "softint");
5163 #else
5164 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5165 "wim");
5166 #endif
5167 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5168 "cond");
5169 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5170 "cc_src");
5171 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5172 offsetof(CPUState, cc_src2),
5173 "cc_src2");
5174 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5175 "cc_dst");
5176 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5177 "cc_op");
5178 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5179 "psr");
5180 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5181 "fsr");
5182 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5183 "pc");
5184 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5185 "npc");
5186 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5187 #ifndef CONFIG_USER_ONLY
5188 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5189 "tbr");
5190 #endif
5191 for (i = 1; i < 8; i++)
5192 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5193 offsetof(CPUState, gregs[i]),
5194 gregnames[i]);
5195 for (i = 0; i < TARGET_FPREGS; i++)
5196 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5197 offsetof(CPUState, fpr[i]),
5198 fregnames[i]);
5200 /* register helpers */
5202 #define GEN_HELPER 2
5203 #include "helper.h"
5207 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5208 unsigned long searched_pc, int pc_pos, void *puc)
5210 target_ulong npc;
5211 env->pc = gen_opc_pc[pc_pos];
5212 npc = gen_opc_npc[pc_pos];
5213 if (npc == 1) {
5214 /* dynamic NPC: already stored */
5215 } else if (npc == 2) {
5216 target_ulong t2 = (target_ulong)(unsigned long)puc;
5217 /* jump PC: use T2 and the jump targets of the translation */
5218 if (t2)
5219 env->npc = gen_opc_jump_pc[0];
5220 else
5221 env->npc = gen_opc_jump_pc[1];
5222 } else {
5223 env->npc = npc;