Add iov_clear()
[qemu.git] / target-sparc / translate.c
blob958fbc5a9d0dd1b153a00fcd4d743087178b3121
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x, int len)
111 len = 32 - len;
112 return (x << len) >> len;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
126 static void gen_op_load_fpr_DT1(unsigned int src)
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
134 static void gen_op_store_DT0_fpr(unsigned int dst)
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
142 static void gen_op_load_fpr_QT0(unsigned int src)
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
154 static void gen_op_load_fpr_QT1(unsigned int src)
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
166 static void gen_op_store_QT0_fpr(unsigned int dst)
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188 #else
189 #endif
190 #endif
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
233 TranslationBlock *tb;
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 !s->singlestep) {
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num);
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244 } else {
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc, pc);
247 tcg_gen_movi_tl(cpu_npc, npc);
248 tcg_gen_exit_tb(0);
252 // XXX suboptimal
253 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269 tcg_gen_extu_i32_tl(reg, src);
270 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271 tcg_gen_andi_tl(reg, reg, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276 tcg_gen_extu_i32_tl(reg, src);
277 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278 tcg_gen_andi_tl(reg, reg, 0x1);
281 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283 TCGv r_temp;
284 TCGv_i32 r_const;
285 int l1;
287 l1 = gen_new_label();
289 r_temp = tcg_temp_new();
290 tcg_gen_xor_tl(r_temp, src1, src2);
291 tcg_gen_not_tl(r_temp, r_temp);
292 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296 r_const = tcg_const_i32(TT_TOVF);
297 gen_helper_raise_exception(r_const);
298 tcg_temp_free_i32(r_const);
299 gen_set_label(l1);
300 tcg_temp_free(r_temp);
303 static inline void gen_tag_tv(TCGv src1, TCGv src2)
305 int l1;
306 TCGv_i32 r_const;
308 l1 = gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0, src1, src2);
310 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312 r_const = tcg_const_i32(TT_TOVF);
313 gen_helper_raise_exception(r_const);
314 tcg_temp_free_i32(r_const);
315 gen_set_label(l1);
318 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320 tcg_gen_mov_tl(cpu_cc_src, src1);
321 tcg_gen_movi_tl(cpu_cc_src2, src2);
322 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323 tcg_gen_mov_tl(dst, cpu_cc_dst);
326 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328 tcg_gen_mov_tl(cpu_cc_src, src1);
329 tcg_gen_mov_tl(cpu_cc_src2, src2);
330 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331 tcg_gen_mov_tl(dst, cpu_cc_dst);
334 static TCGv_i32 gen_add32_carry32(void)
336 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32 = tcg_temp_new_i32();
341 cc_src2_32 = tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344 #else
345 cc_src1_32 = cpu_cc_dst;
346 cc_src2_32 = cpu_cc_src;
347 #endif
349 carry_32 = tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32);
354 tcg_temp_free_i32(cc_src2_32);
355 #endif
357 return carry_32;
360 static TCGv_i32 gen_sub32_carry32(void)
362 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32 = tcg_temp_new_i32();
367 cc_src2_32 = tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370 #else
371 cc_src1_32 = cpu_cc_src;
372 cc_src2_32 = cpu_cc_src2;
373 #endif
375 carry_32 = tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32);
380 tcg_temp_free_i32(cc_src2_32);
381 #endif
383 return carry_32;
386 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387 TCGv src2, int update_cc)
389 TCGv_i32 carry_32;
390 TCGv carry;
392 switch (dc->cc_op) {
393 case CC_OP_DIV:
394 case CC_OP_LOGIC:
395 /* Carry is known to be zero. Fall back to plain ADD. */
396 if (update_cc) {
397 gen_op_add_cc(dst, src1, src2);
398 } else {
399 tcg_gen_add_tl(dst, src1, src2);
401 return;
403 case CC_OP_ADD:
404 case CC_OP_TADD:
405 case CC_OP_TADDTV:
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low = tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414 cpu_cc_src, src1, cpu_cc_src2, src2);
415 tcg_temp_free(dst_low);
416 goto add_done;
418 #endif
419 carry_32 = gen_add32_carry32();
420 break;
422 case CC_OP_SUB:
423 case CC_OP_TSUB:
424 case CC_OP_TSUBTV:
425 carry_32 = gen_sub32_carry32();
426 break;
428 default:
429 /* We need external help to produce the carry. */
430 carry_32 = tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32);
432 break;
435 #if TARGET_LONG_BITS == 64
436 carry = tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry, carry_32);
438 #else
439 carry = carry_32;
440 #endif
442 tcg_gen_add_tl(dst, src1, src2);
443 tcg_gen_add_tl(dst, dst, carry);
445 tcg_temp_free_i32(carry_32);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry);
448 #endif
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451 add_done:
452 #endif
453 if (update_cc) {
454 tcg_gen_mov_tl(cpu_cc_src, src1);
455 tcg_gen_mov_tl(cpu_cc_src2, src2);
456 tcg_gen_mov_tl(cpu_cc_dst, dst);
457 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458 dc->cc_op = CC_OP_ADDX;
462 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_mov_tl(cpu_cc_src2, src2);
466 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 tcg_gen_mov_tl(dst, cpu_cc_dst);
480 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
482 TCGv r_temp;
483 TCGv_i32 r_const;
484 int l1;
486 l1 = gen_new_label();
488 r_temp = tcg_temp_new();
489 tcg_gen_xor_tl(r_temp, src1, src2);
490 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494 r_const = tcg_const_i32(TT_TOVF);
495 gen_helper_raise_exception(r_const);
496 tcg_temp_free_i32(r_const);
497 gen_set_label(l1);
498 tcg_temp_free(r_temp);
501 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503 tcg_gen_mov_tl(cpu_cc_src, src1);
504 tcg_gen_movi_tl(cpu_cc_src2, src2);
505 if (src2 == 0) {
506 tcg_gen_mov_tl(cpu_cc_dst, src1);
507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508 dc->cc_op = CC_OP_LOGIC;
509 } else {
510 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512 dc->cc_op = CC_OP_SUB;
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
517 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 tcg_gen_mov_tl(dst, cpu_cc_dst);
525 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526 TCGv src2, int update_cc)
528 TCGv_i32 carry_32;
529 TCGv carry;
531 switch (dc->cc_op) {
532 case CC_OP_DIV:
533 case CC_OP_LOGIC:
534 /* Carry is known to be zero. Fall back to plain SUB. */
535 if (update_cc) {
536 gen_op_sub_cc(dst, src1, src2);
537 } else {
538 tcg_gen_sub_tl(dst, src1, src2);
540 return;
542 case CC_OP_ADD:
543 case CC_OP_TADD:
544 case CC_OP_TADDTV:
545 carry_32 = gen_add32_carry32();
546 break;
548 case CC_OP_SUB:
549 case CC_OP_TSUB:
550 case CC_OP_TSUBTV:
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low = tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559 cpu_cc_src, src1, cpu_cc_src2, src2);
560 tcg_temp_free(dst_low);
561 goto sub_done;
563 #endif
564 carry_32 = gen_sub32_carry32();
565 break;
567 default:
568 /* We need external help to produce the carry. */
569 carry_32 = tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32);
571 break;
574 #if TARGET_LONG_BITS == 64
575 carry = tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry, carry_32);
577 #else
578 carry = carry_32;
579 #endif
581 tcg_gen_sub_tl(dst, src1, src2);
582 tcg_gen_sub_tl(dst, dst, carry);
584 tcg_temp_free_i32(carry_32);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry);
587 #endif
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590 sub_done:
591 #endif
592 if (update_cc) {
593 tcg_gen_mov_tl(cpu_cc_src, src1);
594 tcg_gen_mov_tl(cpu_cc_src2, src2);
595 tcg_gen_mov_tl(cpu_cc_dst, dst);
596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597 dc->cc_op = CC_OP_SUBX;
601 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
603 tcg_gen_mov_tl(cpu_cc_src, src1);
604 tcg_gen_mov_tl(cpu_cc_src2, src2);
605 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606 tcg_gen_mov_tl(dst, cpu_cc_dst);
609 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
611 tcg_gen_mov_tl(cpu_cc_src, src1);
612 tcg_gen_mov_tl(cpu_cc_src2, src2);
613 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
619 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
621 TCGv r_temp;
622 int l1;
624 l1 = gen_new_label();
625 r_temp = tcg_temp_new();
627 /* old op:
628 if (!(env->y & 1))
629 T1 = 0;
631 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635 tcg_gen_movi_tl(cpu_cc_src2, 0);
636 gen_set_label(l1);
638 // b2 = T0 & 1;
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641 tcg_gen_shli_tl(r_temp, r_temp, 31);
642 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
647 // b1 = N ^ V;
648 gen_mov_reg_N(cpu_tmp0, cpu_psr);
649 gen_mov_reg_V(r_temp, cpu_psr);
650 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651 tcg_temp_free(r_temp);
653 // T0 = (b1 << 31) | (T0 >> 1);
654 // src1 = T0;
655 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
659 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661 tcg_gen_mov_tl(dst, cpu_cc_dst);
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
666 TCGv_i32 r_src1, r_src2;
667 TCGv_i64 r_temp, r_temp2;
669 r_src1 = tcg_temp_new_i32();
670 r_src2 = tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1, src1);
673 tcg_gen_trunc_tl_i32(r_src2, src2);
675 r_temp = tcg_temp_new_i64();
676 r_temp2 = tcg_temp_new_i64();
678 if (sign_ext) {
679 tcg_gen_ext_i32_i64(r_temp, r_src2);
680 tcg_gen_ext_i32_i64(r_temp2, r_src1);
681 } else {
682 tcg_gen_extu_i32_i64(r_temp, r_src2);
683 tcg_gen_extu_i32_i64(r_temp2, r_src1);
686 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
688 tcg_gen_shri_i64(r_temp, r_temp2, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690 tcg_temp_free_i64(r_temp);
691 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst, r_temp2);
695 tcg_temp_free_i64(r_temp2);
697 tcg_temp_free_i32(r_src1);
698 tcg_temp_free_i32(r_src2);
701 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst, src1, src2, 0);
707 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst, src1, src2, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
716 TCGv_i32 r_const;
717 int l1;
719 l1 = gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721 r_const = tcg_const_i32(TT_DIV_ZERO);
722 gen_helper_raise_exception(r_const);
723 tcg_temp_free_i32(r_const);
724 gen_set_label(l1);
727 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
729 int l1, l2;
730 TCGv r_temp1, r_temp2;
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 r_temp1 = tcg_temp_local_new();
735 r_temp2 = tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1, src1);
737 tcg_gen_mov_tl(r_temp2, src2);
738 gen_trap_ifdivzero_tl(r_temp2);
739 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741 tcg_gen_movi_i64(dst, INT64_MIN);
742 tcg_gen_br(l2);
743 gen_set_label(l1);
744 tcg_gen_div_i64(dst, r_temp1, r_temp2);
745 gen_set_label(l2);
746 tcg_temp_free(r_temp1);
747 tcg_temp_free(r_temp2);
749 #endif
751 // 1
752 static inline void gen_op_eval_ba(TCGv dst)
754 tcg_gen_movi_tl(dst, 1);
757 // Z
758 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
760 gen_mov_reg_Z(dst, src);
763 // Z | (N ^ V)
764 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
766 gen_mov_reg_N(cpu_tmp0, src);
767 gen_mov_reg_V(dst, src);
768 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769 gen_mov_reg_Z(cpu_tmp0, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
773 // N ^ V
774 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
776 gen_mov_reg_V(cpu_tmp0, src);
777 gen_mov_reg_N(dst, src);
778 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
781 // C | Z
782 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
784 gen_mov_reg_Z(cpu_tmp0, src);
785 gen_mov_reg_C(dst, src);
786 tcg_gen_or_tl(dst, dst, cpu_tmp0);
789 // C
790 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
792 gen_mov_reg_C(dst, src);
795 // V
796 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
798 gen_mov_reg_V(dst, src);
801 // 0
802 static inline void gen_op_eval_bn(TCGv dst)
804 tcg_gen_movi_tl(dst, 0);
807 // N
808 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
810 gen_mov_reg_N(dst, src);
813 // !Z
814 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
816 gen_mov_reg_Z(dst, src);
817 tcg_gen_xori_tl(dst, dst, 0x1);
820 // !(Z | (N ^ V))
821 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
823 gen_mov_reg_N(cpu_tmp0, src);
824 gen_mov_reg_V(dst, src);
825 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826 gen_mov_reg_Z(cpu_tmp0, src);
827 tcg_gen_or_tl(dst, dst, cpu_tmp0);
828 tcg_gen_xori_tl(dst, dst, 0x1);
831 // !(N ^ V)
832 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
834 gen_mov_reg_V(cpu_tmp0, src);
835 gen_mov_reg_N(dst, src);
836 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837 tcg_gen_xori_tl(dst, dst, 0x1);
840 // !(C | Z)
841 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
843 gen_mov_reg_Z(cpu_tmp0, src);
844 gen_mov_reg_C(dst, src);
845 tcg_gen_or_tl(dst, dst, cpu_tmp0);
846 tcg_gen_xori_tl(dst, dst, 0x1);
849 // !C
850 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
852 gen_mov_reg_C(dst, src);
853 tcg_gen_xori_tl(dst, dst, 0x1);
856 // !N
857 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
859 gen_mov_reg_N(dst, src);
860 tcg_gen_xori_tl(dst, dst, 0x1);
863 // !V
864 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
866 gen_mov_reg_V(dst, src);
867 tcg_gen_xori_tl(dst, dst, 0x1);
871 FPSR bit field FCC1 | FCC0:
875 3 unordered
877 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878 unsigned int fcc_offset)
880 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881 tcg_gen_andi_tl(reg, reg, 0x1);
884 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885 unsigned int fcc_offset)
887 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888 tcg_gen_andi_tl(reg, reg, 0x1);
891 // !0: FCC0 | FCC1
892 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893 unsigned int fcc_offset)
895 gen_mov_reg_FCC0(dst, src, fcc_offset);
896 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897 tcg_gen_or_tl(dst, dst, cpu_tmp0);
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902 unsigned int fcc_offset)
904 gen_mov_reg_FCC0(dst, src, fcc_offset);
905 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
909 // 1 or 3: FCC0
910 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
916 // 1: FCC0 & !FCC1
917 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
920 gen_mov_reg_FCC0(dst, src, fcc_offset);
921 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923 tcg_gen_and_tl(dst, dst, cpu_tmp0);
926 // 2 or 3: FCC1
927 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928 unsigned int fcc_offset)
930 gen_mov_reg_FCC1(dst, src, fcc_offset);
933 // 2: !FCC0 & FCC1
934 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
937 gen_mov_reg_FCC0(dst, src, fcc_offset);
938 tcg_gen_xori_tl(dst, dst, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940 tcg_gen_and_tl(dst, dst, cpu_tmp0);
943 // 3: FCC0 & FCC1
944 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945 unsigned int fcc_offset)
947 gen_mov_reg_FCC0(dst, src, fcc_offset);
948 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949 tcg_gen_and_tl(dst, dst, cpu_tmp0);
952 // 0: !(FCC0 | FCC1)
953 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
956 gen_mov_reg_FCC0(dst, src, fcc_offset);
957 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958 tcg_gen_or_tl(dst, dst, cpu_tmp0);
959 tcg_gen_xori_tl(dst, dst, 0x1);
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969 tcg_gen_xori_tl(dst, dst, 0x1);
972 // 0 or 2: !FCC0
973 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974 unsigned int fcc_offset)
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 tcg_gen_xori_tl(dst, dst, 0x1);
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987 tcg_gen_and_tl(dst, dst, cpu_tmp0);
988 tcg_gen_xori_tl(dst, dst, 0x1);
991 // 0 or 1: !FCC1
992 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC1(dst, src, fcc_offset);
996 tcg_gen_xori_tl(dst, dst, 0x1);
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001 unsigned int fcc_offset)
1003 gen_mov_reg_FCC0(dst, src, fcc_offset);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012 unsigned int fcc_offset)
1014 gen_mov_reg_FCC0(dst, src, fcc_offset);
1015 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 tcg_gen_xori_tl(dst, dst, 0x1);
1020 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021 target_ulong pc2, TCGv r_cond)
1023 int l1;
1025 l1 = gen_new_label();
1027 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1029 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1031 gen_set_label(l1);
1032 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1035 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036 target_ulong pc2, TCGv r_cond)
1038 int l1;
1040 l1 = gen_new_label();
1042 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1044 gen_goto_tb(dc, 0, pc2, pc1);
1046 gen_set_label(l1);
1047 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1050 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051 TCGv r_cond)
1053 int l1, l2;
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1058 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1060 tcg_gen_movi_tl(cpu_npc, npc1);
1061 tcg_gen_br(l2);
1063 gen_set_label(l1);
1064 tcg_gen_movi_tl(cpu_npc, npc2);
1065 gen_set_label(l2);
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext *dc, TCGv cond)
1072 if (dc->npc == JUMP_PC) {
1073 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074 dc->npc = DYNAMIC_PC;
1078 static inline void save_npc(DisasContext *dc, TCGv cond)
1080 if (dc->npc == JUMP_PC) {
1081 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082 dc->npc = DYNAMIC_PC;
1083 } else if (dc->npc != DYNAMIC_PC) {
1084 tcg_gen_movi_tl(cpu_npc, dc->npc);
1088 static inline void save_state(DisasContext *dc, TCGv cond)
1090 tcg_gen_movi_tl(cpu_pc, dc->pc);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc->cc_op != CC_OP_FLAGS) {
1093 dc->cc_op = CC_OP_FLAGS;
1094 gen_helper_compute_psr();
1096 save_npc(dc, cond);
1099 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1101 if (dc->npc == JUMP_PC) {
1102 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104 dc->pc = DYNAMIC_PC;
1105 } else if (dc->npc == DYNAMIC_PC) {
1106 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107 dc->pc = DYNAMIC_PC;
1108 } else {
1109 dc->pc = dc->npc;
1113 static inline void gen_op_next_insn(void)
1115 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1119 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120 DisasContext *dc)
1122 TCGv_i32 r_src;
1124 #ifdef TARGET_SPARC64
1125 if (cc)
1126 r_src = cpu_xcc;
1127 else
1128 r_src = cpu_psr;
1129 #else
1130 r_src = cpu_psr;
1131 #endif
1132 switch (dc->cc_op) {
1133 case CC_OP_FLAGS:
1134 break;
1135 default:
1136 gen_helper_compute_psr();
1137 dc->cc_op = CC_OP_FLAGS;
1138 break;
1140 switch (cond) {
1141 case 0x0:
1142 gen_op_eval_bn(r_dst);
1143 break;
1144 case 0x1:
1145 gen_op_eval_be(r_dst, r_src);
1146 break;
1147 case 0x2:
1148 gen_op_eval_ble(r_dst, r_src);
1149 break;
1150 case 0x3:
1151 gen_op_eval_bl(r_dst, r_src);
1152 break;
1153 case 0x4:
1154 gen_op_eval_bleu(r_dst, r_src);
1155 break;
1156 case 0x5:
1157 gen_op_eval_bcs(r_dst, r_src);
1158 break;
1159 case 0x6:
1160 gen_op_eval_bneg(r_dst, r_src);
1161 break;
1162 case 0x7:
1163 gen_op_eval_bvs(r_dst, r_src);
1164 break;
1165 case 0x8:
1166 gen_op_eval_ba(r_dst);
1167 break;
1168 case 0x9:
1169 gen_op_eval_bne(r_dst, r_src);
1170 break;
1171 case 0xa:
1172 gen_op_eval_bg(r_dst, r_src);
1173 break;
1174 case 0xb:
1175 gen_op_eval_bge(r_dst, r_src);
1176 break;
1177 case 0xc:
1178 gen_op_eval_bgu(r_dst, r_src);
1179 break;
1180 case 0xd:
1181 gen_op_eval_bcc(r_dst, r_src);
1182 break;
1183 case 0xe:
1184 gen_op_eval_bpos(r_dst, r_src);
1185 break;
1186 case 0xf:
1187 gen_op_eval_bvc(r_dst, r_src);
1188 break;
1192 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1194 unsigned int offset;
1196 switch (cc) {
1197 default:
1198 case 0x0:
1199 offset = 0;
1200 break;
1201 case 0x1:
1202 offset = 32 - 10;
1203 break;
1204 case 0x2:
1205 offset = 34 - 10;
1206 break;
1207 case 0x3:
1208 offset = 36 - 10;
1209 break;
1212 switch (cond) {
1213 case 0x0:
1214 gen_op_eval_bn(r_dst);
1215 break;
1216 case 0x1:
1217 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218 break;
1219 case 0x2:
1220 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221 break;
1222 case 0x3:
1223 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224 break;
1225 case 0x4:
1226 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227 break;
1228 case 0x5:
1229 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230 break;
1231 case 0x6:
1232 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233 break;
1234 case 0x7:
1235 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236 break;
1237 case 0x8:
1238 gen_op_eval_ba(r_dst);
1239 break;
1240 case 0x9:
1241 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242 break;
1243 case 0xa:
1244 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245 break;
1246 case 0xb:
1247 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248 break;
1249 case 0xc:
1250 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251 break;
1252 case 0xd:
1253 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254 break;
1255 case 0xe:
1256 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257 break;
1258 case 0xf:
1259 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260 break;
1264 #ifdef TARGET_SPARC64
1265 // Inverted logic
1266 static const int gen_tcg_cond_reg[8] = {
1268 TCG_COND_NE,
1269 TCG_COND_GT,
1270 TCG_COND_GE,
1272 TCG_COND_EQ,
1273 TCG_COND_LE,
1274 TCG_COND_LT,
1277 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1279 int l1;
1281 l1 = gen_new_label();
1282 tcg_gen_movi_tl(r_dst, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284 tcg_gen_movi_tl(r_dst, 1);
1285 gen_set_label(l1);
1287 #endif
1289 /* XXX: potentially incorrect if dynamic npc */
1290 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1291 TCGv r_cond)
1293 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1294 target_ulong target = dc->pc + offset;
1296 if (cond == 0x0) {
1297 /* unconditional not taken */
1298 if (a) {
1299 dc->pc = dc->npc + 4;
1300 dc->npc = dc->pc + 4;
1301 } else {
1302 dc->pc = dc->npc;
1303 dc->npc = dc->pc + 4;
1305 } else if (cond == 0x8) {
1306 /* unconditional taken */
1307 if (a) {
1308 dc->pc = target;
1309 dc->npc = dc->pc + 4;
1310 } else {
1311 dc->pc = dc->npc;
1312 dc->npc = target;
1313 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1315 } else {
1316 flush_cond(dc, r_cond);
1317 gen_cond(r_cond, cc, cond, dc);
1318 if (a) {
1319 gen_branch_a(dc, target, dc->npc, r_cond);
1320 dc->is_br = 1;
1321 } else {
1322 dc->pc = dc->npc;
1323 dc->jump_pc[0] = target;
1324 dc->jump_pc[1] = dc->npc + 4;
1325 dc->npc = JUMP_PC;
1330 /* XXX: potentially incorrect if dynamic npc */
1331 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1332 TCGv r_cond)
1334 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1335 target_ulong target = dc->pc + offset;
1337 if (cond == 0x0) {
1338 /* unconditional not taken */
1339 if (a) {
1340 dc->pc = dc->npc + 4;
1341 dc->npc = dc->pc + 4;
1342 } else {
1343 dc->pc = dc->npc;
1344 dc->npc = dc->pc + 4;
1346 } else if (cond == 0x8) {
1347 /* unconditional taken */
1348 if (a) {
1349 dc->pc = target;
1350 dc->npc = dc->pc + 4;
1351 } else {
1352 dc->pc = dc->npc;
1353 dc->npc = target;
1354 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1356 } else {
1357 flush_cond(dc, r_cond);
1358 gen_fcond(r_cond, cc, cond);
1359 if (a) {
1360 gen_branch_a(dc, target, dc->npc, r_cond);
1361 dc->is_br = 1;
1362 } else {
1363 dc->pc = dc->npc;
1364 dc->jump_pc[0] = target;
1365 dc->jump_pc[1] = dc->npc + 4;
1366 dc->npc = JUMP_PC;
1371 #ifdef TARGET_SPARC64
1372 /* XXX: potentially incorrect if dynamic npc */
1373 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1374 TCGv r_cond, TCGv r_reg)
1376 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1377 target_ulong target = dc->pc + offset;
1379 flush_cond(dc, r_cond);
1380 gen_cond_reg(r_cond, cond, r_reg);
1381 if (a) {
1382 gen_branch_a(dc, target, dc->npc, r_cond);
1383 dc->is_br = 1;
1384 } else {
1385 dc->pc = dc->npc;
1386 dc->jump_pc[0] = target;
1387 dc->jump_pc[1] = dc->npc + 4;
1388 dc->npc = JUMP_PC;
1392 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1394 switch (fccno) {
1395 case 0:
1396 gen_helper_fcmps(r_rs1, r_rs2);
1397 break;
1398 case 1:
1399 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1400 break;
1401 case 2:
1402 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1403 break;
1404 case 3:
1405 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1406 break;
1410 static inline void gen_op_fcmpd(int fccno)
1412 switch (fccno) {
1413 case 0:
1414 gen_helper_fcmpd();
1415 break;
1416 case 1:
1417 gen_helper_fcmpd_fcc1();
1418 break;
1419 case 2:
1420 gen_helper_fcmpd_fcc2();
1421 break;
1422 case 3:
1423 gen_helper_fcmpd_fcc3();
1424 break;
1428 static inline void gen_op_fcmpq(int fccno)
1430 switch (fccno) {
1431 case 0:
1432 gen_helper_fcmpq();
1433 break;
1434 case 1:
1435 gen_helper_fcmpq_fcc1();
1436 break;
1437 case 2:
1438 gen_helper_fcmpq_fcc2();
1439 break;
1440 case 3:
1441 gen_helper_fcmpq_fcc3();
1442 break;
1446 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1448 switch (fccno) {
1449 case 0:
1450 gen_helper_fcmpes(r_rs1, r_rs2);
1451 break;
1452 case 1:
1453 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1454 break;
1455 case 2:
1456 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1457 break;
1458 case 3:
1459 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1460 break;
1464 static inline void gen_op_fcmped(int fccno)
1466 switch (fccno) {
1467 case 0:
1468 gen_helper_fcmped();
1469 break;
1470 case 1:
1471 gen_helper_fcmped_fcc1();
1472 break;
1473 case 2:
1474 gen_helper_fcmped_fcc2();
1475 break;
1476 case 3:
1477 gen_helper_fcmped_fcc3();
1478 break;
1482 static inline void gen_op_fcmpeq(int fccno)
1484 switch (fccno) {
1485 case 0:
1486 gen_helper_fcmpeq();
1487 break;
1488 case 1:
1489 gen_helper_fcmpeq_fcc1();
1490 break;
1491 case 2:
1492 gen_helper_fcmpeq_fcc2();
1493 break;
1494 case 3:
1495 gen_helper_fcmpeq_fcc3();
1496 break;
1500 #else
1502 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1504 gen_helper_fcmps(r_rs1, r_rs2);
1507 static inline void gen_op_fcmpd(int fccno)
1509 gen_helper_fcmpd();
1512 static inline void gen_op_fcmpq(int fccno)
1514 gen_helper_fcmpq();
1517 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1519 gen_helper_fcmpes(r_rs1, r_rs2);
1522 static inline void gen_op_fcmped(int fccno)
1524 gen_helper_fcmped();
1527 static inline void gen_op_fcmpeq(int fccno)
1529 gen_helper_fcmpeq();
1531 #endif
1533 static inline void gen_op_fpexception_im(int fsr_flags)
1535 TCGv_i32 r_const;
1537 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1538 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1539 r_const = tcg_const_i32(TT_FP_EXCP);
1540 gen_helper_raise_exception(r_const);
1541 tcg_temp_free_i32(r_const);
1544 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1546 #if !defined(CONFIG_USER_ONLY)
1547 if (!dc->fpu_enabled) {
1548 TCGv_i32 r_const;
1550 save_state(dc, r_cond);
1551 r_const = tcg_const_i32(TT_NFPU_INSN);
1552 gen_helper_raise_exception(r_const);
1553 tcg_temp_free_i32(r_const);
1554 dc->is_br = 1;
1555 return 1;
1557 #endif
1558 return 0;
1561 static inline void gen_update_fprs_dirty(int rd)
1563 #if defined(TARGET_SPARC64)
1564 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
1565 #endif
1568 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1570 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1573 static inline void gen_clear_float_exceptions(void)
1575 gen_helper_clear_float_exceptions();
1578 /* asi moves */
1579 #ifdef TARGET_SPARC64
1580 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1582 int asi;
1583 TCGv_i32 r_asi;
1585 if (IS_IMM) {
1586 r_asi = tcg_temp_new_i32();
1587 tcg_gen_mov_i32(r_asi, cpu_asi);
1588 } else {
1589 asi = GET_FIELD(insn, 19, 26);
1590 r_asi = tcg_const_i32(asi);
1592 return r_asi;
1595 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1596 int sign)
1598 TCGv_i32 r_asi, r_size, r_sign;
1600 r_asi = gen_get_asi(insn, addr);
1601 r_size = tcg_const_i32(size);
1602 r_sign = tcg_const_i32(sign);
1603 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1604 tcg_temp_free_i32(r_sign);
1605 tcg_temp_free_i32(r_size);
1606 tcg_temp_free_i32(r_asi);
1609 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1611 TCGv_i32 r_asi, r_size;
1613 r_asi = gen_get_asi(insn, addr);
1614 r_size = tcg_const_i32(size);
1615 gen_helper_st_asi(addr, src, r_asi, r_size);
1616 tcg_temp_free_i32(r_size);
1617 tcg_temp_free_i32(r_asi);
1620 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1622 TCGv_i32 r_asi, r_size, r_rd;
1624 r_asi = gen_get_asi(insn, addr);
1625 r_size = tcg_const_i32(size);
1626 r_rd = tcg_const_i32(rd);
1627 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1628 tcg_temp_free_i32(r_rd);
1629 tcg_temp_free_i32(r_size);
1630 tcg_temp_free_i32(r_asi);
1633 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1635 TCGv_i32 r_asi, r_size, r_rd;
1637 r_asi = gen_get_asi(insn, addr);
1638 r_size = tcg_const_i32(size);
1639 r_rd = tcg_const_i32(rd);
1640 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1641 tcg_temp_free_i32(r_rd);
1642 tcg_temp_free_i32(r_size);
1643 tcg_temp_free_i32(r_asi);
1646 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1648 TCGv_i32 r_asi, r_size, r_sign;
1650 r_asi = gen_get_asi(insn, addr);
1651 r_size = tcg_const_i32(4);
1652 r_sign = tcg_const_i32(0);
1653 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1654 tcg_temp_free_i32(r_sign);
1655 gen_helper_st_asi(addr, dst, r_asi, r_size);
1656 tcg_temp_free_i32(r_size);
1657 tcg_temp_free_i32(r_asi);
1658 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1661 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1663 TCGv_i32 r_asi, r_rd;
1665 r_asi = gen_get_asi(insn, addr);
1666 r_rd = tcg_const_i32(rd);
1667 gen_helper_ldda_asi(addr, r_asi, r_rd);
1668 tcg_temp_free_i32(r_rd);
1669 tcg_temp_free_i32(r_asi);
1672 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1674 TCGv_i32 r_asi, r_size;
1676 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1677 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1678 r_asi = gen_get_asi(insn, addr);
1679 r_size = tcg_const_i32(8);
1680 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1681 tcg_temp_free_i32(r_size);
1682 tcg_temp_free_i32(r_asi);
1685 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1686 int rd)
1688 TCGv r_val1;
1689 TCGv_i32 r_asi;
1691 r_val1 = tcg_temp_new();
1692 gen_movl_reg_TN(rd, r_val1);
1693 r_asi = gen_get_asi(insn, addr);
1694 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1695 tcg_temp_free_i32(r_asi);
1696 tcg_temp_free(r_val1);
1699 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1700 int rd)
1702 TCGv_i32 r_asi;
1704 gen_movl_reg_TN(rd, cpu_tmp64);
1705 r_asi = gen_get_asi(insn, addr);
1706 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1707 tcg_temp_free_i32(r_asi);
1710 #elif !defined(CONFIG_USER_ONLY)
1712 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1713 int sign)
1715 TCGv_i32 r_asi, r_size, r_sign;
1717 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1718 r_size = tcg_const_i32(size);
1719 r_sign = tcg_const_i32(sign);
1720 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1721 tcg_temp_free(r_sign);
1722 tcg_temp_free(r_size);
1723 tcg_temp_free(r_asi);
1724 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1727 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1729 TCGv_i32 r_asi, r_size;
1731 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1732 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1733 r_size = tcg_const_i32(size);
1734 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1735 tcg_temp_free(r_size);
1736 tcg_temp_free(r_asi);
1739 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1741 TCGv_i32 r_asi, r_size, r_sign;
1742 TCGv_i64 r_val;
1744 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1745 r_size = tcg_const_i32(4);
1746 r_sign = tcg_const_i32(0);
1747 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1748 tcg_temp_free(r_sign);
1749 r_val = tcg_temp_new_i64();
1750 tcg_gen_extu_tl_i64(r_val, dst);
1751 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1752 tcg_temp_free_i64(r_val);
1753 tcg_temp_free(r_size);
1754 tcg_temp_free(r_asi);
1755 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1758 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1760 TCGv_i32 r_asi, r_size, r_sign;
1762 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1763 r_size = tcg_const_i32(8);
1764 r_sign = tcg_const_i32(0);
1765 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1766 tcg_temp_free(r_sign);
1767 tcg_temp_free(r_size);
1768 tcg_temp_free(r_asi);
1769 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1770 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1771 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1772 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1773 gen_movl_TN_reg(rd, hi);
1776 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1778 TCGv_i32 r_asi, r_size;
1780 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1781 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1782 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1783 r_size = tcg_const_i32(8);
1784 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1785 tcg_temp_free(r_size);
1786 tcg_temp_free(r_asi);
1788 #endif
1790 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1791 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1793 TCGv_i64 r_val;
1794 TCGv_i32 r_asi, r_size;
1796 gen_ld_asi(dst, addr, insn, 1, 0);
1798 r_val = tcg_const_i64(0xffULL);
1799 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1800 r_size = tcg_const_i32(1);
1801 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1802 tcg_temp_free_i32(r_size);
1803 tcg_temp_free_i32(r_asi);
1804 tcg_temp_free_i64(r_val);
1806 #endif
1808 static inline TCGv get_src1(unsigned int insn, TCGv def)
1810 TCGv r_rs1 = def;
1811 unsigned int rs1;
1813 rs1 = GET_FIELD(insn, 13, 17);
1814 if (rs1 == 0) {
1815 tcg_gen_movi_tl(def, 0);
1816 } else if (rs1 < 8) {
1817 r_rs1 = cpu_gregs[rs1];
1818 } else {
1819 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1821 return r_rs1;
1824 static inline TCGv get_src2(unsigned int insn, TCGv def)
1826 TCGv r_rs2 = def;
1828 if (IS_IMM) { /* immediate */
1829 target_long simm = GET_FIELDs(insn, 19, 31);
1830 tcg_gen_movi_tl(def, simm);
1831 } else { /* register */
1832 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1833 if (rs2 == 0) {
1834 tcg_gen_movi_tl(def, 0);
1835 } else if (rs2 < 8) {
1836 r_rs2 = cpu_gregs[rs2];
1837 } else {
1838 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1841 return r_rs2;
1844 #ifdef TARGET_SPARC64
1845 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1847 TCGv_i32 r_tl = tcg_temp_new_i32();
1849 /* load env->tl into r_tl */
1850 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1852 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1853 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1855 /* calculate offset to current trap state from env->ts, reuse r_tl */
1856 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1857 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1859 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1861 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1862 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1863 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1864 tcg_temp_free_ptr(r_tl_tmp);
1867 tcg_temp_free_i32(r_tl);
1869 #endif
1871 #define CHECK_IU_FEATURE(dc, FEATURE) \
1872 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1873 goto illegal_insn;
1874 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1875 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1876 goto nfpu_insn;
1878 /* before an instruction, dc->pc must be static */
1879 static void disas_sparc_insn(DisasContext * dc)
1881 unsigned int insn, opc, rs1, rs2, rd;
1882 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1883 target_long simm;
1885 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1886 tcg_gen_debug_insn_start(dc->pc);
1887 insn = ldl_code(dc->pc);
1888 opc = GET_FIELD(insn, 0, 1);
1890 rd = GET_FIELD(insn, 2, 6);
1892 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1893 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1895 switch (opc) {
1896 case 0: /* branches/sethi */
1898 unsigned int xop = GET_FIELD(insn, 7, 9);
1899 int32_t target;
1900 switch (xop) {
1901 #ifdef TARGET_SPARC64
1902 case 0x1: /* V9 BPcc */
1904 int cc;
1906 target = GET_FIELD_SP(insn, 0, 18);
1907 target = sign_extend(target, 19);
1908 target <<= 2;
1909 cc = GET_FIELD_SP(insn, 20, 21);
1910 if (cc == 0)
1911 do_branch(dc, target, insn, 0, cpu_cond);
1912 else if (cc == 2)
1913 do_branch(dc, target, insn, 1, cpu_cond);
1914 else
1915 goto illegal_insn;
1916 goto jmp_insn;
1918 case 0x3: /* V9 BPr */
1920 target = GET_FIELD_SP(insn, 0, 13) |
1921 (GET_FIELD_SP(insn, 20, 21) << 14);
1922 target = sign_extend(target, 16);
1923 target <<= 2;
1924 cpu_src1 = get_src1(insn, cpu_src1);
1925 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1926 goto jmp_insn;
1928 case 0x5: /* V9 FBPcc */
1930 int cc = GET_FIELD_SP(insn, 20, 21);
1931 if (gen_trap_ifnofpu(dc, cpu_cond))
1932 goto jmp_insn;
1933 target = GET_FIELD_SP(insn, 0, 18);
1934 target = sign_extend(target, 19);
1935 target <<= 2;
1936 do_fbranch(dc, target, insn, cc, cpu_cond);
1937 goto jmp_insn;
1939 #else
1940 case 0x7: /* CBN+x */
1942 goto ncp_insn;
1944 #endif
1945 case 0x2: /* BN+x */
1947 target = GET_FIELD(insn, 10, 31);
1948 target = sign_extend(target, 22);
1949 target <<= 2;
1950 do_branch(dc, target, insn, 0, cpu_cond);
1951 goto jmp_insn;
1953 case 0x6: /* FBN+x */
1955 if (gen_trap_ifnofpu(dc, cpu_cond))
1956 goto jmp_insn;
1957 target = GET_FIELD(insn, 10, 31);
1958 target = sign_extend(target, 22);
1959 target <<= 2;
1960 do_fbranch(dc, target, insn, 0, cpu_cond);
1961 goto jmp_insn;
1963 case 0x4: /* SETHI */
1964 if (rd) { // nop
1965 uint32_t value = GET_FIELD(insn, 10, 31);
1966 TCGv r_const;
1968 r_const = tcg_const_tl(value << 10);
1969 gen_movl_TN_reg(rd, r_const);
1970 tcg_temp_free(r_const);
1972 break;
1973 case 0x0: /* UNIMPL */
1974 default:
1975 goto illegal_insn;
1977 break;
1979 break;
1980 case 1: /*CALL*/
1982 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1983 TCGv r_const;
1985 r_const = tcg_const_tl(dc->pc);
1986 gen_movl_TN_reg(15, r_const);
1987 tcg_temp_free(r_const);
1988 target += dc->pc;
1989 gen_mov_pc_npc(dc, cpu_cond);
1990 dc->npc = target;
1992 goto jmp_insn;
1993 case 2: /* FPU & Logical Operations */
1995 unsigned int xop = GET_FIELD(insn, 7, 12);
1996 if (xop == 0x3a) { /* generate trap */
1997 int cond;
1999 cpu_src1 = get_src1(insn, cpu_src1);
2000 if (IS_IMM) {
2001 rs2 = GET_FIELD(insn, 25, 31);
2002 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2003 } else {
2004 rs2 = GET_FIELD(insn, 27, 31);
2005 if (rs2 != 0) {
2006 gen_movl_reg_TN(rs2, cpu_src2);
2007 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2008 } else
2009 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2012 cond = GET_FIELD(insn, 3, 6);
2013 if (cond == 0x8) { /* Trap Always */
2014 save_state(dc, cpu_cond);
2015 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2016 supervisor(dc))
2017 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2018 else
2019 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2020 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2021 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2023 if (rs2 == 0 &&
2024 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2026 gen_helper_shutdown();
2028 } else {
2029 gen_helper_raise_exception(cpu_tmp32);
2031 } else if (cond != 0) {
2032 TCGv r_cond = tcg_temp_new();
2033 int l1;
2034 #ifdef TARGET_SPARC64
2035 /* V9 icc/xcc */
2036 int cc = GET_FIELD_SP(insn, 11, 12);
2038 save_state(dc, cpu_cond);
2039 if (cc == 0)
2040 gen_cond(r_cond, 0, cond, dc);
2041 else if (cc == 2)
2042 gen_cond(r_cond, 1, cond, dc);
2043 else
2044 goto illegal_insn;
2045 #else
2046 save_state(dc, cpu_cond);
2047 gen_cond(r_cond, 0, cond, dc);
2048 #endif
2049 l1 = gen_new_label();
2050 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2052 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2053 supervisor(dc))
2054 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2055 else
2056 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2057 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2058 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2059 gen_helper_raise_exception(cpu_tmp32);
2061 gen_set_label(l1);
2062 tcg_temp_free(r_cond);
2064 gen_op_next_insn();
2065 tcg_gen_exit_tb(0);
2066 dc->is_br = 1;
2067 goto jmp_insn;
2068 } else if (xop == 0x28) {
2069 rs1 = GET_FIELD(insn, 13, 17);
2070 switch(rs1) {
2071 case 0: /* rdy */
2072 #ifndef TARGET_SPARC64
2073 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2074 manual, rdy on the microSPARC
2075 II */
2076 case 0x0f: /* stbar in the SPARCv8 manual,
2077 rdy on the microSPARC II */
2078 case 0x10 ... 0x1f: /* implementation-dependent in the
2079 SPARCv8 manual, rdy on the
2080 microSPARC II */
2081 /* Read Asr17 */
2082 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2083 TCGv r_const;
2085 /* Read Asr17 for a Leon3 monoprocessor */
2086 r_const = tcg_const_tl((1 << 8)
2087 | (dc->def->nwindows - 1));
2088 gen_movl_TN_reg(rd, r_const);
2089 tcg_temp_free(r_const);
2090 break;
2092 #endif
2093 gen_movl_TN_reg(rd, cpu_y);
2094 break;
2095 #ifdef TARGET_SPARC64
2096 case 0x2: /* V9 rdccr */
2097 gen_helper_compute_psr();
2098 gen_helper_rdccr(cpu_dst);
2099 gen_movl_TN_reg(rd, cpu_dst);
2100 break;
2101 case 0x3: /* V9 rdasi */
2102 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2103 gen_movl_TN_reg(rd, cpu_dst);
2104 break;
2105 case 0x4: /* V9 rdtick */
2107 TCGv_ptr r_tickptr;
2109 r_tickptr = tcg_temp_new_ptr();
2110 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2111 offsetof(CPUState, tick));
2112 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2113 tcg_temp_free_ptr(r_tickptr);
2114 gen_movl_TN_reg(rd, cpu_dst);
2116 break;
2117 case 0x5: /* V9 rdpc */
2119 TCGv r_const;
2121 r_const = tcg_const_tl(dc->pc);
2122 gen_movl_TN_reg(rd, r_const);
2123 tcg_temp_free(r_const);
2125 break;
2126 case 0x6: /* V9 rdfprs */
2127 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2128 gen_movl_TN_reg(rd, cpu_dst);
2129 break;
2130 case 0xf: /* V9 membar */
2131 break; /* no effect */
2132 case 0x13: /* Graphics Status */
2133 if (gen_trap_ifnofpu(dc, cpu_cond))
2134 goto jmp_insn;
2135 gen_movl_TN_reg(rd, cpu_gsr);
2136 break;
2137 case 0x16: /* Softint */
2138 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2139 gen_movl_TN_reg(rd, cpu_dst);
2140 break;
2141 case 0x17: /* Tick compare */
2142 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2143 break;
2144 case 0x18: /* System tick */
2146 TCGv_ptr r_tickptr;
2148 r_tickptr = tcg_temp_new_ptr();
2149 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2150 offsetof(CPUState, stick));
2151 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2152 tcg_temp_free_ptr(r_tickptr);
2153 gen_movl_TN_reg(rd, cpu_dst);
2155 break;
2156 case 0x19: /* System tick compare */
2157 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2158 break;
2159 case 0x10: /* Performance Control */
2160 case 0x11: /* Performance Instrumentation Counter */
2161 case 0x12: /* Dispatch Control */
2162 case 0x14: /* Softint set, WO */
2163 case 0x15: /* Softint clear, WO */
2164 #endif
2165 default:
2166 goto illegal_insn;
2168 #if !defined(CONFIG_USER_ONLY)
2169 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2170 #ifndef TARGET_SPARC64
2171 if (!supervisor(dc))
2172 goto priv_insn;
2173 gen_helper_compute_psr();
2174 dc->cc_op = CC_OP_FLAGS;
2175 gen_helper_rdpsr(cpu_dst);
2176 #else
2177 CHECK_IU_FEATURE(dc, HYPV);
2178 if (!hypervisor(dc))
2179 goto priv_insn;
2180 rs1 = GET_FIELD(insn, 13, 17);
2181 switch (rs1) {
2182 case 0: // hpstate
2183 // gen_op_rdhpstate();
2184 break;
2185 case 1: // htstate
2186 // gen_op_rdhtstate();
2187 break;
2188 case 3: // hintp
2189 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2190 break;
2191 case 5: // htba
2192 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2193 break;
2194 case 6: // hver
2195 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2196 break;
2197 case 31: // hstick_cmpr
2198 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2199 break;
2200 default:
2201 goto illegal_insn;
2203 #endif
2204 gen_movl_TN_reg(rd, cpu_dst);
2205 break;
2206 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2207 if (!supervisor(dc))
2208 goto priv_insn;
2209 #ifdef TARGET_SPARC64
2210 rs1 = GET_FIELD(insn, 13, 17);
2211 switch (rs1) {
2212 case 0: // tpc
2214 TCGv_ptr r_tsptr;
2216 r_tsptr = tcg_temp_new_ptr();
2217 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2218 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2219 offsetof(trap_state, tpc));
2220 tcg_temp_free_ptr(r_tsptr);
2222 break;
2223 case 1: // tnpc
2225 TCGv_ptr r_tsptr;
2227 r_tsptr = tcg_temp_new_ptr();
2228 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2229 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2230 offsetof(trap_state, tnpc));
2231 tcg_temp_free_ptr(r_tsptr);
2233 break;
2234 case 2: // tstate
2236 TCGv_ptr r_tsptr;
2238 r_tsptr = tcg_temp_new_ptr();
2239 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2240 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2241 offsetof(trap_state, tstate));
2242 tcg_temp_free_ptr(r_tsptr);
2244 break;
2245 case 3: // tt
2247 TCGv_ptr r_tsptr;
2249 r_tsptr = tcg_temp_new_ptr();
2250 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2251 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2252 offsetof(trap_state, tt));
2253 tcg_temp_free_ptr(r_tsptr);
2254 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2256 break;
2257 case 4: // tick
2259 TCGv_ptr r_tickptr;
2261 r_tickptr = tcg_temp_new_ptr();
2262 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2263 offsetof(CPUState, tick));
2264 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2265 gen_movl_TN_reg(rd, cpu_tmp0);
2266 tcg_temp_free_ptr(r_tickptr);
2268 break;
2269 case 5: // tba
2270 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2271 break;
2272 case 6: // pstate
2273 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2274 offsetof(CPUSPARCState, pstate));
2275 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2276 break;
2277 case 7: // tl
2278 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2279 offsetof(CPUSPARCState, tl));
2280 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2281 break;
2282 case 8: // pil
2283 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2284 offsetof(CPUSPARCState, psrpil));
2285 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286 break;
2287 case 9: // cwp
2288 gen_helper_rdcwp(cpu_tmp0);
2289 break;
2290 case 10: // cansave
2291 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2292 offsetof(CPUSPARCState, cansave));
2293 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2294 break;
2295 case 11: // canrestore
2296 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2297 offsetof(CPUSPARCState, canrestore));
2298 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2299 break;
2300 case 12: // cleanwin
2301 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2302 offsetof(CPUSPARCState, cleanwin));
2303 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2304 break;
2305 case 13: // otherwin
2306 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2307 offsetof(CPUSPARCState, otherwin));
2308 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2309 break;
2310 case 14: // wstate
2311 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2312 offsetof(CPUSPARCState, wstate));
2313 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2314 break;
2315 case 16: // UA2005 gl
2316 CHECK_IU_FEATURE(dc, GL);
2317 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2318 offsetof(CPUSPARCState, gl));
2319 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2320 break;
2321 case 26: // UA2005 strand status
2322 CHECK_IU_FEATURE(dc, HYPV);
2323 if (!hypervisor(dc))
2324 goto priv_insn;
2325 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2326 break;
2327 case 31: // ver
2328 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2329 break;
2330 case 15: // fq
2331 default:
2332 goto illegal_insn;
2334 #else
2335 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2336 #endif
2337 gen_movl_TN_reg(rd, cpu_tmp0);
2338 break;
2339 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2340 #ifdef TARGET_SPARC64
2341 save_state(dc, cpu_cond);
2342 gen_helper_flushw();
2343 #else
2344 if (!supervisor(dc))
2345 goto priv_insn;
2346 gen_movl_TN_reg(rd, cpu_tbr);
2347 #endif
2348 break;
2349 #endif
2350 } else if (xop == 0x34) { /* FPU Operations */
2351 if (gen_trap_ifnofpu(dc, cpu_cond))
2352 goto jmp_insn;
2353 gen_op_clear_ieee_excp_and_FTT();
2354 rs1 = GET_FIELD(insn, 13, 17);
2355 rs2 = GET_FIELD(insn, 27, 31);
2356 xop = GET_FIELD(insn, 18, 26);
2357 save_state(dc, cpu_cond);
2358 switch (xop) {
2359 case 0x1: /* fmovs */
2360 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2361 gen_update_fprs_dirty(rd);
2362 break;
2363 case 0x5: /* fnegs */
2364 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2365 gen_update_fprs_dirty(rd);
2366 break;
2367 case 0x9: /* fabss */
2368 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2369 gen_update_fprs_dirty(rd);
2370 break;
2371 case 0x29: /* fsqrts */
2372 CHECK_FPU_FEATURE(dc, FSQRT);
2373 gen_clear_float_exceptions();
2374 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2375 gen_helper_check_ieee_exceptions();
2376 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2377 gen_update_fprs_dirty(rd);
2378 break;
2379 case 0x2a: /* fsqrtd */
2380 CHECK_FPU_FEATURE(dc, FSQRT);
2381 gen_op_load_fpr_DT1(DFPREG(rs2));
2382 gen_clear_float_exceptions();
2383 gen_helper_fsqrtd();
2384 gen_helper_check_ieee_exceptions();
2385 gen_op_store_DT0_fpr(DFPREG(rd));
2386 gen_update_fprs_dirty(DFPREG(rd));
2387 break;
2388 case 0x2b: /* fsqrtq */
2389 CHECK_FPU_FEATURE(dc, FLOAT128);
2390 gen_op_load_fpr_QT1(QFPREG(rs2));
2391 gen_clear_float_exceptions();
2392 gen_helper_fsqrtq();
2393 gen_helper_check_ieee_exceptions();
2394 gen_op_store_QT0_fpr(QFPREG(rd));
2395 gen_update_fprs_dirty(QFPREG(rd));
2396 break;
2397 case 0x41: /* fadds */
2398 gen_clear_float_exceptions();
2399 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2400 gen_helper_check_ieee_exceptions();
2401 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2402 gen_update_fprs_dirty(rd);
2403 break;
2404 case 0x42: /* faddd */
2405 gen_op_load_fpr_DT0(DFPREG(rs1));
2406 gen_op_load_fpr_DT1(DFPREG(rs2));
2407 gen_clear_float_exceptions();
2408 gen_helper_faddd();
2409 gen_helper_check_ieee_exceptions();
2410 gen_op_store_DT0_fpr(DFPREG(rd));
2411 gen_update_fprs_dirty(DFPREG(rd));
2412 break;
2413 case 0x43: /* faddq */
2414 CHECK_FPU_FEATURE(dc, FLOAT128);
2415 gen_op_load_fpr_QT0(QFPREG(rs1));
2416 gen_op_load_fpr_QT1(QFPREG(rs2));
2417 gen_clear_float_exceptions();
2418 gen_helper_faddq();
2419 gen_helper_check_ieee_exceptions();
2420 gen_op_store_QT0_fpr(QFPREG(rd));
2421 gen_update_fprs_dirty(QFPREG(rd));
2422 break;
2423 case 0x45: /* fsubs */
2424 gen_clear_float_exceptions();
2425 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2426 gen_helper_check_ieee_exceptions();
2427 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2428 gen_update_fprs_dirty(rd);
2429 break;
2430 case 0x46: /* fsubd */
2431 gen_op_load_fpr_DT0(DFPREG(rs1));
2432 gen_op_load_fpr_DT1(DFPREG(rs2));
2433 gen_clear_float_exceptions();
2434 gen_helper_fsubd();
2435 gen_helper_check_ieee_exceptions();
2436 gen_op_store_DT0_fpr(DFPREG(rd));
2437 gen_update_fprs_dirty(DFPREG(rd));
2438 break;
2439 case 0x47: /* fsubq */
2440 CHECK_FPU_FEATURE(dc, FLOAT128);
2441 gen_op_load_fpr_QT0(QFPREG(rs1));
2442 gen_op_load_fpr_QT1(QFPREG(rs2));
2443 gen_clear_float_exceptions();
2444 gen_helper_fsubq();
2445 gen_helper_check_ieee_exceptions();
2446 gen_op_store_QT0_fpr(QFPREG(rd));
2447 gen_update_fprs_dirty(QFPREG(rd));
2448 break;
2449 case 0x49: /* fmuls */
2450 CHECK_FPU_FEATURE(dc, FMUL);
2451 gen_clear_float_exceptions();
2452 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2453 gen_helper_check_ieee_exceptions();
2454 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2455 gen_update_fprs_dirty(rd);
2456 break;
2457 case 0x4a: /* fmuld */
2458 CHECK_FPU_FEATURE(dc, FMUL);
2459 gen_op_load_fpr_DT0(DFPREG(rs1));
2460 gen_op_load_fpr_DT1(DFPREG(rs2));
2461 gen_clear_float_exceptions();
2462 gen_helper_fmuld();
2463 gen_helper_check_ieee_exceptions();
2464 gen_op_store_DT0_fpr(DFPREG(rd));
2465 gen_update_fprs_dirty(DFPREG(rd));
2466 break;
2467 case 0x4b: /* fmulq */
2468 CHECK_FPU_FEATURE(dc, FLOAT128);
2469 CHECK_FPU_FEATURE(dc, FMUL);
2470 gen_op_load_fpr_QT0(QFPREG(rs1));
2471 gen_op_load_fpr_QT1(QFPREG(rs2));
2472 gen_clear_float_exceptions();
2473 gen_helper_fmulq();
2474 gen_helper_check_ieee_exceptions();
2475 gen_op_store_QT0_fpr(QFPREG(rd));
2476 gen_update_fprs_dirty(QFPREG(rd));
2477 break;
2478 case 0x4d: /* fdivs */
2479 gen_clear_float_exceptions();
2480 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2481 gen_helper_check_ieee_exceptions();
2482 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2483 gen_update_fprs_dirty(rd);
2484 break;
2485 case 0x4e: /* fdivd */
2486 gen_op_load_fpr_DT0(DFPREG(rs1));
2487 gen_op_load_fpr_DT1(DFPREG(rs2));
2488 gen_clear_float_exceptions();
2489 gen_helper_fdivd();
2490 gen_helper_check_ieee_exceptions();
2491 gen_op_store_DT0_fpr(DFPREG(rd));
2492 gen_update_fprs_dirty(DFPREG(rd));
2493 break;
2494 case 0x4f: /* fdivq */
2495 CHECK_FPU_FEATURE(dc, FLOAT128);
2496 gen_op_load_fpr_QT0(QFPREG(rs1));
2497 gen_op_load_fpr_QT1(QFPREG(rs2));
2498 gen_clear_float_exceptions();
2499 gen_helper_fdivq();
2500 gen_helper_check_ieee_exceptions();
2501 gen_op_store_QT0_fpr(QFPREG(rd));
2502 gen_update_fprs_dirty(QFPREG(rd));
2503 break;
2504 case 0x69: /* fsmuld */
2505 CHECK_FPU_FEATURE(dc, FSMULD);
2506 gen_clear_float_exceptions();
2507 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2508 gen_helper_check_ieee_exceptions();
2509 gen_op_store_DT0_fpr(DFPREG(rd));
2510 gen_update_fprs_dirty(DFPREG(rd));
2511 break;
2512 case 0x6e: /* fdmulq */
2513 CHECK_FPU_FEATURE(dc, FLOAT128);
2514 gen_op_load_fpr_DT0(DFPREG(rs1));
2515 gen_op_load_fpr_DT1(DFPREG(rs2));
2516 gen_clear_float_exceptions();
2517 gen_helper_fdmulq();
2518 gen_helper_check_ieee_exceptions();
2519 gen_op_store_QT0_fpr(QFPREG(rd));
2520 gen_update_fprs_dirty(QFPREG(rd));
2521 break;
2522 case 0xc4: /* fitos */
2523 gen_clear_float_exceptions();
2524 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2525 gen_helper_check_ieee_exceptions();
2526 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2527 gen_update_fprs_dirty(rd);
2528 break;
2529 case 0xc6: /* fdtos */
2530 gen_op_load_fpr_DT1(DFPREG(rs2));
2531 gen_clear_float_exceptions();
2532 gen_helper_fdtos(cpu_tmp32);
2533 gen_helper_check_ieee_exceptions();
2534 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2535 gen_update_fprs_dirty(rd);
2536 break;
2537 case 0xc7: /* fqtos */
2538 CHECK_FPU_FEATURE(dc, FLOAT128);
2539 gen_op_load_fpr_QT1(QFPREG(rs2));
2540 gen_clear_float_exceptions();
2541 gen_helper_fqtos(cpu_tmp32);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2544 gen_update_fprs_dirty(rd);
2545 break;
2546 case 0xc8: /* fitod */
2547 gen_helper_fitod(cpu_fpr[rs2]);
2548 gen_op_store_DT0_fpr(DFPREG(rd));
2549 gen_update_fprs_dirty(DFPREG(rd));
2550 break;
2551 case 0xc9: /* fstod */
2552 gen_helper_fstod(cpu_fpr[rs2]);
2553 gen_op_store_DT0_fpr(DFPREG(rd));
2554 gen_update_fprs_dirty(DFPREG(rd));
2555 break;
2556 case 0xcb: /* fqtod */
2557 CHECK_FPU_FEATURE(dc, FLOAT128);
2558 gen_op_load_fpr_QT1(QFPREG(rs2));
2559 gen_clear_float_exceptions();
2560 gen_helper_fqtod();
2561 gen_helper_check_ieee_exceptions();
2562 gen_op_store_DT0_fpr(DFPREG(rd));
2563 gen_update_fprs_dirty(DFPREG(rd));
2564 break;
2565 case 0xcc: /* fitoq */
2566 CHECK_FPU_FEATURE(dc, FLOAT128);
2567 gen_helper_fitoq(cpu_fpr[rs2]);
2568 gen_op_store_QT0_fpr(QFPREG(rd));
2569 gen_update_fprs_dirty(QFPREG(rd));
2570 break;
2571 case 0xcd: /* fstoq */
2572 CHECK_FPU_FEATURE(dc, FLOAT128);
2573 gen_helper_fstoq(cpu_fpr[rs2]);
2574 gen_op_store_QT0_fpr(QFPREG(rd));
2575 gen_update_fprs_dirty(QFPREG(rd));
2576 break;
2577 case 0xce: /* fdtoq */
2578 CHECK_FPU_FEATURE(dc, FLOAT128);
2579 gen_op_load_fpr_DT1(DFPREG(rs2));
2580 gen_helper_fdtoq();
2581 gen_op_store_QT0_fpr(QFPREG(rd));
2582 gen_update_fprs_dirty(QFPREG(rd));
2583 break;
2584 case 0xd1: /* fstoi */
2585 gen_clear_float_exceptions();
2586 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2587 gen_helper_check_ieee_exceptions();
2588 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2589 gen_update_fprs_dirty(rd);
2590 break;
2591 case 0xd2: /* fdtoi */
2592 gen_op_load_fpr_DT1(DFPREG(rs2));
2593 gen_clear_float_exceptions();
2594 gen_helper_fdtoi(cpu_tmp32);
2595 gen_helper_check_ieee_exceptions();
2596 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2597 gen_update_fprs_dirty(rd);
2598 break;
2599 case 0xd3: /* fqtoi */
2600 CHECK_FPU_FEATURE(dc, FLOAT128);
2601 gen_op_load_fpr_QT1(QFPREG(rs2));
2602 gen_clear_float_exceptions();
2603 gen_helper_fqtoi(cpu_tmp32);
2604 gen_helper_check_ieee_exceptions();
2605 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2606 gen_update_fprs_dirty(rd);
2607 break;
2608 #ifdef TARGET_SPARC64
2609 case 0x2: /* V9 fmovd */
2610 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2611 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2612 cpu_fpr[DFPREG(rs2) + 1]);
2613 gen_update_fprs_dirty(DFPREG(rd));
2614 break;
2615 case 0x3: /* V9 fmovq */
2616 CHECK_FPU_FEATURE(dc, FLOAT128);
2617 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2618 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2619 cpu_fpr[QFPREG(rs2) + 1]);
2620 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2621 cpu_fpr[QFPREG(rs2) + 2]);
2622 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2623 cpu_fpr[QFPREG(rs2) + 3]);
2624 gen_update_fprs_dirty(QFPREG(rd));
2625 break;
2626 case 0x6: /* V9 fnegd */
2627 gen_op_load_fpr_DT1(DFPREG(rs2));
2628 gen_helper_fnegd();
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2630 gen_update_fprs_dirty(DFPREG(rd));
2631 break;
2632 case 0x7: /* V9 fnegq */
2633 CHECK_FPU_FEATURE(dc, FLOAT128);
2634 gen_op_load_fpr_QT1(QFPREG(rs2));
2635 gen_helper_fnegq();
2636 gen_op_store_QT0_fpr(QFPREG(rd));
2637 gen_update_fprs_dirty(QFPREG(rd));
2638 break;
2639 case 0xa: /* V9 fabsd */
2640 gen_op_load_fpr_DT1(DFPREG(rs2));
2641 gen_helper_fabsd();
2642 gen_op_store_DT0_fpr(DFPREG(rd));
2643 gen_update_fprs_dirty(DFPREG(rd));
2644 break;
2645 case 0xb: /* V9 fabsq */
2646 CHECK_FPU_FEATURE(dc, FLOAT128);
2647 gen_op_load_fpr_QT1(QFPREG(rs2));
2648 gen_helper_fabsq();
2649 gen_op_store_QT0_fpr(QFPREG(rd));
2650 gen_update_fprs_dirty(QFPREG(rd));
2651 break;
2652 case 0x81: /* V9 fstox */
2653 gen_clear_float_exceptions();
2654 gen_helper_fstox(cpu_fpr[rs2]);
2655 gen_helper_check_ieee_exceptions();
2656 gen_op_store_DT0_fpr(DFPREG(rd));
2657 gen_update_fprs_dirty(DFPREG(rd));
2658 break;
2659 case 0x82: /* V9 fdtox */
2660 gen_op_load_fpr_DT1(DFPREG(rs2));
2661 gen_clear_float_exceptions();
2662 gen_helper_fdtox();
2663 gen_helper_check_ieee_exceptions();
2664 gen_op_store_DT0_fpr(DFPREG(rd));
2665 gen_update_fprs_dirty(DFPREG(rd));
2666 break;
2667 case 0x83: /* V9 fqtox */
2668 CHECK_FPU_FEATURE(dc, FLOAT128);
2669 gen_op_load_fpr_QT1(QFPREG(rs2));
2670 gen_clear_float_exceptions();
2671 gen_helper_fqtox();
2672 gen_helper_check_ieee_exceptions();
2673 gen_op_store_DT0_fpr(DFPREG(rd));
2674 gen_update_fprs_dirty(DFPREG(rd));
2675 break;
2676 case 0x84: /* V9 fxtos */
2677 gen_op_load_fpr_DT1(DFPREG(rs2));
2678 gen_clear_float_exceptions();
2679 gen_helper_fxtos(cpu_tmp32);
2680 gen_helper_check_ieee_exceptions();
2681 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2682 gen_update_fprs_dirty(rd);
2683 break;
2684 case 0x88: /* V9 fxtod */
2685 gen_op_load_fpr_DT1(DFPREG(rs2));
2686 gen_clear_float_exceptions();
2687 gen_helper_fxtod();
2688 gen_helper_check_ieee_exceptions();
2689 gen_op_store_DT0_fpr(DFPREG(rd));
2690 gen_update_fprs_dirty(DFPREG(rd));
2691 break;
2692 case 0x8c: /* V9 fxtoq */
2693 CHECK_FPU_FEATURE(dc, FLOAT128);
2694 gen_op_load_fpr_DT1(DFPREG(rs2));
2695 gen_clear_float_exceptions();
2696 gen_helper_fxtoq();
2697 gen_helper_check_ieee_exceptions();
2698 gen_op_store_QT0_fpr(QFPREG(rd));
2699 gen_update_fprs_dirty(QFPREG(rd));
2700 break;
2701 #endif
2702 default:
2703 goto illegal_insn;
2705 } else if (xop == 0x35) { /* FPU Operations */
2706 #ifdef TARGET_SPARC64
2707 int cond;
2708 #endif
2709 if (gen_trap_ifnofpu(dc, cpu_cond))
2710 goto jmp_insn;
2711 gen_op_clear_ieee_excp_and_FTT();
2712 rs1 = GET_FIELD(insn, 13, 17);
2713 rs2 = GET_FIELD(insn, 27, 31);
2714 xop = GET_FIELD(insn, 18, 26);
2715 save_state(dc, cpu_cond);
2716 #ifdef TARGET_SPARC64
2717 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2718 int l1;
2720 l1 = gen_new_label();
2721 cond = GET_FIELD_SP(insn, 14, 17);
2722 cpu_src1 = get_src1(insn, cpu_src1);
2723 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2724 0, l1);
2725 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2726 gen_update_fprs_dirty(rd);
2727 gen_set_label(l1);
2728 break;
2729 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2730 int l1;
2732 l1 = gen_new_label();
2733 cond = GET_FIELD_SP(insn, 14, 17);
2734 cpu_src1 = get_src1(insn, cpu_src1);
2735 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2736 0, l1);
2737 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2738 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2739 gen_update_fprs_dirty(DFPREG(rd));
2740 gen_set_label(l1);
2741 break;
2742 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2743 int l1;
2745 CHECK_FPU_FEATURE(dc, FLOAT128);
2746 l1 = gen_new_label();
2747 cond = GET_FIELD_SP(insn, 14, 17);
2748 cpu_src1 = get_src1(insn, cpu_src1);
2749 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2750 0, l1);
2751 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2753 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2755 gen_update_fprs_dirty(QFPREG(rd));
2756 gen_set_label(l1);
2757 break;
2759 #endif
2760 switch (xop) {
2761 #ifdef TARGET_SPARC64
2762 #define FMOVSCC(fcc) \
2764 TCGv r_cond; \
2765 int l1; \
2767 l1 = gen_new_label(); \
2768 r_cond = tcg_temp_new(); \
2769 cond = GET_FIELD_SP(insn, 14, 17); \
2770 gen_fcond(r_cond, fcc, cond); \
2771 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2772 0, l1); \
2773 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2774 gen_update_fprs_dirty(rd); \
2775 gen_set_label(l1); \
2776 tcg_temp_free(r_cond); \
2778 #define FMOVDCC(fcc) \
2780 TCGv r_cond; \
2781 int l1; \
2783 l1 = gen_new_label(); \
2784 r_cond = tcg_temp_new(); \
2785 cond = GET_FIELD_SP(insn, 14, 17); \
2786 gen_fcond(r_cond, fcc, cond); \
2787 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2788 0, l1); \
2789 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2790 cpu_fpr[DFPREG(rs2)]); \
2791 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2792 cpu_fpr[DFPREG(rs2) + 1]); \
2793 gen_update_fprs_dirty(DFPREG(rd)); \
2794 gen_set_label(l1); \
2795 tcg_temp_free(r_cond); \
2797 #define FMOVQCC(fcc) \
2799 TCGv r_cond; \
2800 int l1; \
2802 l1 = gen_new_label(); \
2803 r_cond = tcg_temp_new(); \
2804 cond = GET_FIELD_SP(insn, 14, 17); \
2805 gen_fcond(r_cond, fcc, cond); \
2806 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2807 0, l1); \
2808 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2809 cpu_fpr[QFPREG(rs2)]); \
2810 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2811 cpu_fpr[QFPREG(rs2) + 1]); \
2812 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2813 cpu_fpr[QFPREG(rs2) + 2]); \
2814 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2815 cpu_fpr[QFPREG(rs2) + 3]); \
2816 gen_update_fprs_dirty(QFPREG(rd)); \
2817 gen_set_label(l1); \
2818 tcg_temp_free(r_cond); \
2820 case 0x001: /* V9 fmovscc %fcc0 */
2821 FMOVSCC(0);
2822 break;
2823 case 0x002: /* V9 fmovdcc %fcc0 */
2824 FMOVDCC(0);
2825 break;
2826 case 0x003: /* V9 fmovqcc %fcc0 */
2827 CHECK_FPU_FEATURE(dc, FLOAT128);
2828 FMOVQCC(0);
2829 break;
2830 case 0x041: /* V9 fmovscc %fcc1 */
2831 FMOVSCC(1);
2832 break;
2833 case 0x042: /* V9 fmovdcc %fcc1 */
2834 FMOVDCC(1);
2835 break;
2836 case 0x043: /* V9 fmovqcc %fcc1 */
2837 CHECK_FPU_FEATURE(dc, FLOAT128);
2838 FMOVQCC(1);
2839 break;
2840 case 0x081: /* V9 fmovscc %fcc2 */
2841 FMOVSCC(2);
2842 break;
2843 case 0x082: /* V9 fmovdcc %fcc2 */
2844 FMOVDCC(2);
2845 break;
2846 case 0x083: /* V9 fmovqcc %fcc2 */
2847 CHECK_FPU_FEATURE(dc, FLOAT128);
2848 FMOVQCC(2);
2849 break;
2850 case 0x0c1: /* V9 fmovscc %fcc3 */
2851 FMOVSCC(3);
2852 break;
2853 case 0x0c2: /* V9 fmovdcc %fcc3 */
2854 FMOVDCC(3);
2855 break;
2856 case 0x0c3: /* V9 fmovqcc %fcc3 */
2857 CHECK_FPU_FEATURE(dc, FLOAT128);
2858 FMOVQCC(3);
2859 break;
2860 #undef FMOVSCC
2861 #undef FMOVDCC
2862 #undef FMOVQCC
2863 #define FMOVSCC(icc) \
2865 TCGv r_cond; \
2866 int l1; \
2868 l1 = gen_new_label(); \
2869 r_cond = tcg_temp_new(); \
2870 cond = GET_FIELD_SP(insn, 14, 17); \
2871 gen_cond(r_cond, icc, cond, dc); \
2872 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2873 0, l1); \
2874 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2875 gen_update_fprs_dirty(rd); \
2876 gen_set_label(l1); \
2877 tcg_temp_free(r_cond); \
2879 #define FMOVDCC(icc) \
2881 TCGv r_cond; \
2882 int l1; \
2884 l1 = gen_new_label(); \
2885 r_cond = tcg_temp_new(); \
2886 cond = GET_FIELD_SP(insn, 14, 17); \
2887 gen_cond(r_cond, icc, cond, dc); \
2888 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2889 0, l1); \
2890 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2891 cpu_fpr[DFPREG(rs2)]); \
2892 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2893 cpu_fpr[DFPREG(rs2) + 1]); \
2894 gen_update_fprs_dirty(DFPREG(rd)); \
2895 gen_set_label(l1); \
2896 tcg_temp_free(r_cond); \
2898 #define FMOVQCC(icc) \
2900 TCGv r_cond; \
2901 int l1; \
2903 l1 = gen_new_label(); \
2904 r_cond = tcg_temp_new(); \
2905 cond = GET_FIELD_SP(insn, 14, 17); \
2906 gen_cond(r_cond, icc, cond, dc); \
2907 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2908 0, l1); \
2909 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2910 cpu_fpr[QFPREG(rs2)]); \
2911 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2912 cpu_fpr[QFPREG(rs2) + 1]); \
2913 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2914 cpu_fpr[QFPREG(rs2) + 2]); \
2915 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2916 cpu_fpr[QFPREG(rs2) + 3]); \
2917 gen_update_fprs_dirty(QFPREG(rd)); \
2918 gen_set_label(l1); \
2919 tcg_temp_free(r_cond); \
2922 case 0x101: /* V9 fmovscc %icc */
2923 FMOVSCC(0);
2924 break;
2925 case 0x102: /* V9 fmovdcc %icc */
2926 FMOVDCC(0);
2927 break;
2928 case 0x103: /* V9 fmovqcc %icc */
2929 CHECK_FPU_FEATURE(dc, FLOAT128);
2930 FMOVQCC(0);
2931 break;
2932 case 0x181: /* V9 fmovscc %xcc */
2933 FMOVSCC(1);
2934 break;
2935 case 0x182: /* V9 fmovdcc %xcc */
2936 FMOVDCC(1);
2937 break;
2938 case 0x183: /* V9 fmovqcc %xcc */
2939 CHECK_FPU_FEATURE(dc, FLOAT128);
2940 FMOVQCC(1);
2941 break;
2942 #undef FMOVSCC
2943 #undef FMOVDCC
2944 #undef FMOVQCC
2945 #endif
2946 case 0x51: /* fcmps, V9 %fcc */
2947 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2948 break;
2949 case 0x52: /* fcmpd, V9 %fcc */
2950 gen_op_load_fpr_DT0(DFPREG(rs1));
2951 gen_op_load_fpr_DT1(DFPREG(rs2));
2952 gen_op_fcmpd(rd & 3);
2953 break;
2954 case 0x53: /* fcmpq, V9 %fcc */
2955 CHECK_FPU_FEATURE(dc, FLOAT128);
2956 gen_op_load_fpr_QT0(QFPREG(rs1));
2957 gen_op_load_fpr_QT1(QFPREG(rs2));
2958 gen_op_fcmpq(rd & 3);
2959 break;
2960 case 0x55: /* fcmpes, V9 %fcc */
2961 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2962 break;
2963 case 0x56: /* fcmped, V9 %fcc */
2964 gen_op_load_fpr_DT0(DFPREG(rs1));
2965 gen_op_load_fpr_DT1(DFPREG(rs2));
2966 gen_op_fcmped(rd & 3);
2967 break;
2968 case 0x57: /* fcmpeq, V9 %fcc */
2969 CHECK_FPU_FEATURE(dc, FLOAT128);
2970 gen_op_load_fpr_QT0(QFPREG(rs1));
2971 gen_op_load_fpr_QT1(QFPREG(rs2));
2972 gen_op_fcmpeq(rd & 3);
2973 break;
2974 default:
2975 goto illegal_insn;
2977 } else if (xop == 0x2) {
2978 // clr/mov shortcut
2980 rs1 = GET_FIELD(insn, 13, 17);
2981 if (rs1 == 0) {
2982 // or %g0, x, y -> mov T0, x; mov y, T0
2983 if (IS_IMM) { /* immediate */
2984 TCGv r_const;
2986 simm = GET_FIELDs(insn, 19, 31);
2987 r_const = tcg_const_tl(simm);
2988 gen_movl_TN_reg(rd, r_const);
2989 tcg_temp_free(r_const);
2990 } else { /* register */
2991 rs2 = GET_FIELD(insn, 27, 31);
2992 gen_movl_reg_TN(rs2, cpu_dst);
2993 gen_movl_TN_reg(rd, cpu_dst);
2995 } else {
2996 cpu_src1 = get_src1(insn, cpu_src1);
2997 if (IS_IMM) { /* immediate */
2998 simm = GET_FIELDs(insn, 19, 31);
2999 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3000 gen_movl_TN_reg(rd, cpu_dst);
3001 } else { /* register */
3002 // or x, %g0, y -> mov T1, x; mov y, T1
3003 rs2 = GET_FIELD(insn, 27, 31);
3004 if (rs2 != 0) {
3005 gen_movl_reg_TN(rs2, cpu_src2);
3006 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3007 gen_movl_TN_reg(rd, cpu_dst);
3008 } else
3009 gen_movl_TN_reg(rd, cpu_src1);
3012 #ifdef TARGET_SPARC64
3013 } else if (xop == 0x25) { /* sll, V9 sllx */
3014 cpu_src1 = get_src1(insn, cpu_src1);
3015 if (IS_IMM) { /* immediate */
3016 simm = GET_FIELDs(insn, 20, 31);
3017 if (insn & (1 << 12)) {
3018 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3019 } else {
3020 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3022 } else { /* register */
3023 rs2 = GET_FIELD(insn, 27, 31);
3024 gen_movl_reg_TN(rs2, cpu_src2);
3025 if (insn & (1 << 12)) {
3026 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3027 } else {
3028 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3030 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3032 gen_movl_TN_reg(rd, cpu_dst);
3033 } else if (xop == 0x26) { /* srl, V9 srlx */
3034 cpu_src1 = get_src1(insn, cpu_src1);
3035 if (IS_IMM) { /* immediate */
3036 simm = GET_FIELDs(insn, 20, 31);
3037 if (insn & (1 << 12)) {
3038 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3039 } else {
3040 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3041 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3043 } else { /* register */
3044 rs2 = GET_FIELD(insn, 27, 31);
3045 gen_movl_reg_TN(rs2, cpu_src2);
3046 if (insn & (1 << 12)) {
3047 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3048 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3049 } else {
3050 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3051 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3052 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3055 gen_movl_TN_reg(rd, cpu_dst);
3056 } else if (xop == 0x27) { /* sra, V9 srax */
3057 cpu_src1 = get_src1(insn, cpu_src1);
3058 if (IS_IMM) { /* immediate */
3059 simm = GET_FIELDs(insn, 20, 31);
3060 if (insn & (1 << 12)) {
3061 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3062 } else {
3063 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3065 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3067 } else { /* register */
3068 rs2 = GET_FIELD(insn, 27, 31);
3069 gen_movl_reg_TN(rs2, cpu_src2);
3070 if (insn & (1 << 12)) {
3071 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3073 } else {
3074 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3080 gen_movl_TN_reg(rd, cpu_dst);
3081 #endif
3082 } else if (xop < 0x36) {
3083 if (xop < 0x20) {
3084 cpu_src1 = get_src1(insn, cpu_src1);
3085 cpu_src2 = get_src2(insn, cpu_src2);
3086 switch (xop & ~0x10) {
3087 case 0x0: /* add */
3088 if (IS_IMM) {
3089 simm = GET_FIELDs(insn, 19, 31);
3090 if (xop & 0x10) {
3091 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3093 dc->cc_op = CC_OP_ADD;
3094 } else {
3095 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3097 } else {
3098 if (xop & 0x10) {
3099 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3100 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3101 dc->cc_op = CC_OP_ADD;
3102 } else {
3103 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3106 break;
3107 case 0x1: /* and */
3108 if (IS_IMM) {
3109 simm = GET_FIELDs(insn, 19, 31);
3110 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3111 } else {
3112 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3114 if (xop & 0x10) {
3115 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3116 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3117 dc->cc_op = CC_OP_LOGIC;
3119 break;
3120 case 0x2: /* or */
3121 if (IS_IMM) {
3122 simm = GET_FIELDs(insn, 19, 31);
3123 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3124 } else {
3125 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3127 if (xop & 0x10) {
3128 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3129 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3130 dc->cc_op = CC_OP_LOGIC;
3132 break;
3133 case 0x3: /* xor */
3134 if (IS_IMM) {
3135 simm = GET_FIELDs(insn, 19, 31);
3136 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3137 } else {
3138 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3140 if (xop & 0x10) {
3141 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3142 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3143 dc->cc_op = CC_OP_LOGIC;
3145 break;
3146 case 0x4: /* sub */
3147 if (IS_IMM) {
3148 simm = GET_FIELDs(insn, 19, 31);
3149 if (xop & 0x10) {
3150 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3151 } else {
3152 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3154 } else {
3155 if (xop & 0x10) {
3156 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3157 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3158 dc->cc_op = CC_OP_SUB;
3159 } else {
3160 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3163 break;
3164 case 0x5: /* andn */
3165 if (IS_IMM) {
3166 simm = GET_FIELDs(insn, 19, 31);
3167 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3168 } else {
3169 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3171 if (xop & 0x10) {
3172 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3173 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3174 dc->cc_op = CC_OP_LOGIC;
3176 break;
3177 case 0x6: /* orn */
3178 if (IS_IMM) {
3179 simm = GET_FIELDs(insn, 19, 31);
3180 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3181 } else {
3182 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3184 if (xop & 0x10) {
3185 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3186 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3187 dc->cc_op = CC_OP_LOGIC;
3189 break;
3190 case 0x7: /* xorn */
3191 if (IS_IMM) {
3192 simm = GET_FIELDs(insn, 19, 31);
3193 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3194 } else {
3195 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3196 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3198 if (xop & 0x10) {
3199 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3200 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3201 dc->cc_op = CC_OP_LOGIC;
3203 break;
3204 case 0x8: /* addx, V9 addc */
3205 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3206 (xop & 0x10));
3207 break;
3208 #ifdef TARGET_SPARC64
3209 case 0x9: /* V9 mulx */
3210 if (IS_IMM) {
3211 simm = GET_FIELDs(insn, 19, 31);
3212 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3213 } else {
3214 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3216 break;
3217 #endif
3218 case 0xa: /* umul */
3219 CHECK_IU_FEATURE(dc, MUL);
3220 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3221 if (xop & 0x10) {
3222 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3223 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3224 dc->cc_op = CC_OP_LOGIC;
3226 break;
3227 case 0xb: /* smul */
3228 CHECK_IU_FEATURE(dc, MUL);
3229 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3230 if (xop & 0x10) {
3231 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3232 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3233 dc->cc_op = CC_OP_LOGIC;
3235 break;
3236 case 0xc: /* subx, V9 subc */
3237 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3238 (xop & 0x10));
3239 break;
3240 #ifdef TARGET_SPARC64
3241 case 0xd: /* V9 udivx */
3243 TCGv r_temp1, r_temp2;
3244 r_temp1 = tcg_temp_local_new();
3245 r_temp2 = tcg_temp_local_new();
3246 tcg_gen_mov_tl(r_temp1, cpu_src1);
3247 tcg_gen_mov_tl(r_temp2, cpu_src2);
3248 gen_trap_ifdivzero_tl(r_temp2);
3249 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3250 tcg_temp_free(r_temp1);
3251 tcg_temp_free(r_temp2);
3253 break;
3254 #endif
3255 case 0xe: /* udiv */
3256 CHECK_IU_FEATURE(dc, DIV);
3257 if (xop & 0x10) {
3258 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3259 dc->cc_op = CC_OP_DIV;
3260 } else {
3261 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3263 break;
3264 case 0xf: /* sdiv */
3265 CHECK_IU_FEATURE(dc, DIV);
3266 if (xop & 0x10) {
3267 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3268 dc->cc_op = CC_OP_DIV;
3269 } else {
3270 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3272 break;
3273 default:
3274 goto illegal_insn;
3276 gen_movl_TN_reg(rd, cpu_dst);
3277 } else {
3278 cpu_src1 = get_src1(insn, cpu_src1);
3279 cpu_src2 = get_src2(insn, cpu_src2);
3280 switch (xop) {
3281 case 0x20: /* taddcc */
3282 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3283 gen_movl_TN_reg(rd, cpu_dst);
3284 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3285 dc->cc_op = CC_OP_TADD;
3286 break;
3287 case 0x21: /* tsubcc */
3288 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3289 gen_movl_TN_reg(rd, cpu_dst);
3290 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3291 dc->cc_op = CC_OP_TSUB;
3292 break;
3293 case 0x22: /* taddcctv */
3294 save_state(dc, cpu_cond);
3295 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3296 gen_movl_TN_reg(rd, cpu_dst);
3297 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3298 dc->cc_op = CC_OP_TADDTV;
3299 break;
3300 case 0x23: /* tsubcctv */
3301 save_state(dc, cpu_cond);
3302 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3303 gen_movl_TN_reg(rd, cpu_dst);
3304 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3305 dc->cc_op = CC_OP_TSUBTV;
3306 break;
3307 case 0x24: /* mulscc */
3308 gen_helper_compute_psr();
3309 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3310 gen_movl_TN_reg(rd, cpu_dst);
3311 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3312 dc->cc_op = CC_OP_ADD;
3313 break;
3314 #ifndef TARGET_SPARC64
3315 case 0x25: /* sll */
3316 if (IS_IMM) { /* immediate */
3317 simm = GET_FIELDs(insn, 20, 31);
3318 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3319 } else { /* register */
3320 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3321 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3323 gen_movl_TN_reg(rd, cpu_dst);
3324 break;
3325 case 0x26: /* srl */
3326 if (IS_IMM) { /* immediate */
3327 simm = GET_FIELDs(insn, 20, 31);
3328 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3329 } else { /* register */
3330 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3331 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3333 gen_movl_TN_reg(rd, cpu_dst);
3334 break;
3335 case 0x27: /* sra */
3336 if (IS_IMM) { /* immediate */
3337 simm = GET_FIELDs(insn, 20, 31);
3338 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3339 } else { /* register */
3340 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3341 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3343 gen_movl_TN_reg(rd, cpu_dst);
3344 break;
3345 #endif
3346 case 0x30:
3348 switch(rd) {
3349 case 0: /* wry */
3350 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3351 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3352 break;
3353 #ifndef TARGET_SPARC64
3354 case 0x01 ... 0x0f: /* undefined in the
3355 SPARCv8 manual, nop
3356 on the microSPARC
3357 II */
3358 case 0x10 ... 0x1f: /* implementation-dependent
3359 in the SPARCv8
3360 manual, nop on the
3361 microSPARC II */
3362 break;
3363 #else
3364 case 0x2: /* V9 wrccr */
3365 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3366 gen_helper_wrccr(cpu_dst);
3367 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3368 dc->cc_op = CC_OP_FLAGS;
3369 break;
3370 case 0x3: /* V9 wrasi */
3371 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3372 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3373 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3374 break;
3375 case 0x6: /* V9 wrfprs */
3376 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3377 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3378 save_state(dc, cpu_cond);
3379 gen_op_next_insn();
3380 tcg_gen_exit_tb(0);
3381 dc->is_br = 1;
3382 break;
3383 case 0xf: /* V9 sir, nop if user */
3384 #if !defined(CONFIG_USER_ONLY)
3385 if (supervisor(dc)) {
3386 ; // XXX
3388 #endif
3389 break;
3390 case 0x13: /* Graphics Status */
3391 if (gen_trap_ifnofpu(dc, cpu_cond))
3392 goto jmp_insn;
3393 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3394 break;
3395 case 0x14: /* Softint set */
3396 if (!supervisor(dc))
3397 goto illegal_insn;
3398 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3399 gen_helper_set_softint(cpu_tmp64);
3400 break;
3401 case 0x15: /* Softint clear */
3402 if (!supervisor(dc))
3403 goto illegal_insn;
3404 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3405 gen_helper_clear_softint(cpu_tmp64);
3406 break;
3407 case 0x16: /* Softint write */
3408 if (!supervisor(dc))
3409 goto illegal_insn;
3410 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3411 gen_helper_write_softint(cpu_tmp64);
3412 break;
3413 case 0x17: /* Tick compare */
3414 #if !defined(CONFIG_USER_ONLY)
3415 if (!supervisor(dc))
3416 goto illegal_insn;
3417 #endif
3419 TCGv_ptr r_tickptr;
3421 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3422 cpu_src2);
3423 r_tickptr = tcg_temp_new_ptr();
3424 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3425 offsetof(CPUState, tick));
3426 gen_helper_tick_set_limit(r_tickptr,
3427 cpu_tick_cmpr);
3428 tcg_temp_free_ptr(r_tickptr);
3430 break;
3431 case 0x18: /* System tick */
3432 #if !defined(CONFIG_USER_ONLY)
3433 if (!supervisor(dc))
3434 goto illegal_insn;
3435 #endif
3437 TCGv_ptr r_tickptr;
3439 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3440 cpu_src2);
3441 r_tickptr = tcg_temp_new_ptr();
3442 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3443 offsetof(CPUState, stick));
3444 gen_helper_tick_set_count(r_tickptr,
3445 cpu_dst);
3446 tcg_temp_free_ptr(r_tickptr);
3448 break;
3449 case 0x19: /* System tick compare */
3450 #if !defined(CONFIG_USER_ONLY)
3451 if (!supervisor(dc))
3452 goto illegal_insn;
3453 #endif
3455 TCGv_ptr r_tickptr;
3457 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3458 cpu_src2);
3459 r_tickptr = tcg_temp_new_ptr();
3460 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3461 offsetof(CPUState, stick));
3462 gen_helper_tick_set_limit(r_tickptr,
3463 cpu_stick_cmpr);
3464 tcg_temp_free_ptr(r_tickptr);
3466 break;
3468 case 0x10: /* Performance Control */
3469 case 0x11: /* Performance Instrumentation
3470 Counter */
3471 case 0x12: /* Dispatch Control */
3472 #endif
3473 default:
3474 goto illegal_insn;
3477 break;
3478 #if !defined(CONFIG_USER_ONLY)
3479 case 0x31: /* wrpsr, V9 saved, restored */
3481 if (!supervisor(dc))
3482 goto priv_insn;
3483 #ifdef TARGET_SPARC64
3484 switch (rd) {
3485 case 0:
3486 gen_helper_saved();
3487 break;
3488 case 1:
3489 gen_helper_restored();
3490 break;
3491 case 2: /* UA2005 allclean */
3492 case 3: /* UA2005 otherw */
3493 case 4: /* UA2005 normalw */
3494 case 5: /* UA2005 invalw */
3495 // XXX
3496 default:
3497 goto illegal_insn;
3499 #else
3500 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3501 gen_helper_wrpsr(cpu_dst);
3502 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3503 dc->cc_op = CC_OP_FLAGS;
3504 save_state(dc, cpu_cond);
3505 gen_op_next_insn();
3506 tcg_gen_exit_tb(0);
3507 dc->is_br = 1;
3508 #endif
3510 break;
3511 case 0x32: /* wrwim, V9 wrpr */
3513 if (!supervisor(dc))
3514 goto priv_insn;
3515 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3516 #ifdef TARGET_SPARC64
3517 switch (rd) {
3518 case 0: // tpc
3520 TCGv_ptr r_tsptr;
3522 r_tsptr = tcg_temp_new_ptr();
3523 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3524 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3525 offsetof(trap_state, tpc));
3526 tcg_temp_free_ptr(r_tsptr);
3528 break;
3529 case 1: // tnpc
3531 TCGv_ptr r_tsptr;
3533 r_tsptr = tcg_temp_new_ptr();
3534 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3535 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3536 offsetof(trap_state, tnpc));
3537 tcg_temp_free_ptr(r_tsptr);
3539 break;
3540 case 2: // tstate
3542 TCGv_ptr r_tsptr;
3544 r_tsptr = tcg_temp_new_ptr();
3545 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3546 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3547 offsetof(trap_state,
3548 tstate));
3549 tcg_temp_free_ptr(r_tsptr);
3551 break;
3552 case 3: // tt
3554 TCGv_ptr r_tsptr;
3556 r_tsptr = tcg_temp_new_ptr();
3557 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3558 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3559 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3560 offsetof(trap_state, tt));
3561 tcg_temp_free_ptr(r_tsptr);
3563 break;
3564 case 4: // tick
3566 TCGv_ptr r_tickptr;
3568 r_tickptr = tcg_temp_new_ptr();
3569 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3570 offsetof(CPUState, tick));
3571 gen_helper_tick_set_count(r_tickptr,
3572 cpu_tmp0);
3573 tcg_temp_free_ptr(r_tickptr);
3575 break;
3576 case 5: // tba
3577 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3578 break;
3579 case 6: // pstate
3581 TCGv r_tmp = tcg_temp_local_new();
3583 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3584 save_state(dc, cpu_cond);
3585 gen_helper_wrpstate(r_tmp);
3586 tcg_temp_free(r_tmp);
3587 dc->npc = DYNAMIC_PC;
3589 break;
3590 case 7: // tl
3592 TCGv r_tmp = tcg_temp_local_new();
3594 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3595 save_state(dc, cpu_cond);
3596 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3597 tcg_temp_free(r_tmp);
3598 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3599 offsetof(CPUSPARCState, tl));
3600 dc->npc = DYNAMIC_PC;
3602 break;
3603 case 8: // pil
3604 gen_helper_wrpil(cpu_tmp0);
3605 break;
3606 case 9: // cwp
3607 gen_helper_wrcwp(cpu_tmp0);
3608 break;
3609 case 10: // cansave
3610 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3611 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3612 offsetof(CPUSPARCState,
3613 cansave));
3614 break;
3615 case 11: // canrestore
3616 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3617 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3618 offsetof(CPUSPARCState,
3619 canrestore));
3620 break;
3621 case 12: // cleanwin
3622 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3623 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3624 offsetof(CPUSPARCState,
3625 cleanwin));
3626 break;
3627 case 13: // otherwin
3628 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3629 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3630 offsetof(CPUSPARCState,
3631 otherwin));
3632 break;
3633 case 14: // wstate
3634 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3635 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3636 offsetof(CPUSPARCState,
3637 wstate));
3638 break;
3639 case 16: // UA2005 gl
3640 CHECK_IU_FEATURE(dc, GL);
3641 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3642 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3643 offsetof(CPUSPARCState, gl));
3644 break;
3645 case 26: // UA2005 strand status
3646 CHECK_IU_FEATURE(dc, HYPV);
3647 if (!hypervisor(dc))
3648 goto priv_insn;
3649 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3650 break;
3651 default:
3652 goto illegal_insn;
3654 #else
3655 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3656 if (dc->def->nwindows != 32)
3657 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3658 (1 << dc->def->nwindows) - 1);
3659 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3660 #endif
3662 break;
3663 case 0x33: /* wrtbr, UA2005 wrhpr */
3665 #ifndef TARGET_SPARC64
3666 if (!supervisor(dc))
3667 goto priv_insn;
3668 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3669 #else
3670 CHECK_IU_FEATURE(dc, HYPV);
3671 if (!hypervisor(dc))
3672 goto priv_insn;
3673 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3674 switch (rd) {
3675 case 0: // hpstate
3676 // XXX gen_op_wrhpstate();
3677 save_state(dc, cpu_cond);
3678 gen_op_next_insn();
3679 tcg_gen_exit_tb(0);
3680 dc->is_br = 1;
3681 break;
3682 case 1: // htstate
3683 // XXX gen_op_wrhtstate();
3684 break;
3685 case 3: // hintp
3686 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3687 break;
3688 case 5: // htba
3689 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3690 break;
3691 case 31: // hstick_cmpr
3693 TCGv_ptr r_tickptr;
3695 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3696 r_tickptr = tcg_temp_new_ptr();
3697 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3698 offsetof(CPUState, hstick));
3699 gen_helper_tick_set_limit(r_tickptr,
3700 cpu_hstick_cmpr);
3701 tcg_temp_free_ptr(r_tickptr);
3703 break;
3704 case 6: // hver readonly
3705 default:
3706 goto illegal_insn;
3708 #endif
3710 break;
3711 #endif
3712 #ifdef TARGET_SPARC64
3713 case 0x2c: /* V9 movcc */
3715 int cc = GET_FIELD_SP(insn, 11, 12);
3716 int cond = GET_FIELD_SP(insn, 14, 17);
3717 TCGv r_cond;
3718 int l1;
3720 r_cond = tcg_temp_new();
3721 if (insn & (1 << 18)) {
3722 if (cc == 0)
3723 gen_cond(r_cond, 0, cond, dc);
3724 else if (cc == 2)
3725 gen_cond(r_cond, 1, cond, dc);
3726 else
3727 goto illegal_insn;
3728 } else {
3729 gen_fcond(r_cond, cc, cond);
3732 l1 = gen_new_label();
3734 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3735 if (IS_IMM) { /* immediate */
3736 TCGv r_const;
3738 simm = GET_FIELD_SPs(insn, 0, 10);
3739 r_const = tcg_const_tl(simm);
3740 gen_movl_TN_reg(rd, r_const);
3741 tcg_temp_free(r_const);
3742 } else {
3743 rs2 = GET_FIELD_SP(insn, 0, 4);
3744 gen_movl_reg_TN(rs2, cpu_tmp0);
3745 gen_movl_TN_reg(rd, cpu_tmp0);
3747 gen_set_label(l1);
3748 tcg_temp_free(r_cond);
3749 break;
3751 case 0x2d: /* V9 sdivx */
3752 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3753 gen_movl_TN_reg(rd, cpu_dst);
3754 break;
3755 case 0x2e: /* V9 popc */
3757 cpu_src2 = get_src2(insn, cpu_src2);
3758 gen_helper_popc(cpu_dst, cpu_src2);
3759 gen_movl_TN_reg(rd, cpu_dst);
3761 case 0x2f: /* V9 movr */
3763 int cond = GET_FIELD_SP(insn, 10, 12);
3764 int l1;
3766 cpu_src1 = get_src1(insn, cpu_src1);
3768 l1 = gen_new_label();
3770 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3771 cpu_src1, 0, l1);
3772 if (IS_IMM) { /* immediate */
3773 TCGv r_const;
3775 simm = GET_FIELD_SPs(insn, 0, 9);
3776 r_const = tcg_const_tl(simm);
3777 gen_movl_TN_reg(rd, r_const);
3778 tcg_temp_free(r_const);
3779 } else {
3780 rs2 = GET_FIELD_SP(insn, 0, 4);
3781 gen_movl_reg_TN(rs2, cpu_tmp0);
3782 gen_movl_TN_reg(rd, cpu_tmp0);
3784 gen_set_label(l1);
3785 break;
3787 #endif
3788 default:
3789 goto illegal_insn;
3792 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3793 #ifdef TARGET_SPARC64
3794 int opf = GET_FIELD_SP(insn, 5, 13);
3795 rs1 = GET_FIELD(insn, 13, 17);
3796 rs2 = GET_FIELD(insn, 27, 31);
3797 if (gen_trap_ifnofpu(dc, cpu_cond))
3798 goto jmp_insn;
3800 switch (opf) {
3801 case 0x000: /* VIS I edge8cc */
3802 case 0x001: /* VIS II edge8n */
3803 case 0x002: /* VIS I edge8lcc */
3804 case 0x003: /* VIS II edge8ln */
3805 case 0x004: /* VIS I edge16cc */
3806 case 0x005: /* VIS II edge16n */
3807 case 0x006: /* VIS I edge16lcc */
3808 case 0x007: /* VIS II edge16ln */
3809 case 0x008: /* VIS I edge32cc */
3810 case 0x009: /* VIS II edge32n */
3811 case 0x00a: /* VIS I edge32lcc */
3812 case 0x00b: /* VIS II edge32ln */
3813 // XXX
3814 goto illegal_insn;
3815 case 0x010: /* VIS I array8 */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 cpu_src1 = get_src1(insn, cpu_src1);
3818 gen_movl_reg_TN(rs2, cpu_src2);
3819 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3820 gen_movl_TN_reg(rd, cpu_dst);
3821 break;
3822 case 0x012: /* VIS I array16 */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 cpu_src1 = get_src1(insn, cpu_src1);
3825 gen_movl_reg_TN(rs2, cpu_src2);
3826 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3827 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3828 gen_movl_TN_reg(rd, cpu_dst);
3829 break;
3830 case 0x014: /* VIS I array32 */
3831 CHECK_FPU_FEATURE(dc, VIS1);
3832 cpu_src1 = get_src1(insn, cpu_src1);
3833 gen_movl_reg_TN(rs2, cpu_src2);
3834 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3835 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3836 gen_movl_TN_reg(rd, cpu_dst);
3837 break;
3838 case 0x018: /* VIS I alignaddr */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 cpu_src1 = get_src1(insn, cpu_src1);
3841 gen_movl_reg_TN(rs2, cpu_src2);
3842 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3843 gen_movl_TN_reg(rd, cpu_dst);
3844 break;
3845 case 0x019: /* VIS II bmask */
3846 case 0x01a: /* VIS I alignaddrl */
3847 // XXX
3848 goto illegal_insn;
3849 case 0x020: /* VIS I fcmple16 */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 gen_op_load_fpr_DT0(DFPREG(rs1));
3852 gen_op_load_fpr_DT1(DFPREG(rs2));
3853 gen_helper_fcmple16(cpu_dst);
3854 gen_movl_TN_reg(rd, cpu_dst);
3855 break;
3856 case 0x022: /* VIS I fcmpne16 */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 gen_op_load_fpr_DT0(DFPREG(rs1));
3859 gen_op_load_fpr_DT1(DFPREG(rs2));
3860 gen_helper_fcmpne16(cpu_dst);
3861 gen_movl_TN_reg(rd, cpu_dst);
3862 break;
3863 case 0x024: /* VIS I fcmple32 */
3864 CHECK_FPU_FEATURE(dc, VIS1);
3865 gen_op_load_fpr_DT0(DFPREG(rs1));
3866 gen_op_load_fpr_DT1(DFPREG(rs2));
3867 gen_helper_fcmple32(cpu_dst);
3868 gen_movl_TN_reg(rd, cpu_dst);
3869 break;
3870 case 0x026: /* VIS I fcmpne32 */
3871 CHECK_FPU_FEATURE(dc, VIS1);
3872 gen_op_load_fpr_DT0(DFPREG(rs1));
3873 gen_op_load_fpr_DT1(DFPREG(rs2));
3874 gen_helper_fcmpne32(cpu_dst);
3875 gen_movl_TN_reg(rd, cpu_dst);
3876 break;
3877 case 0x028: /* VIS I fcmpgt16 */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 gen_op_load_fpr_DT0(DFPREG(rs1));
3880 gen_op_load_fpr_DT1(DFPREG(rs2));
3881 gen_helper_fcmpgt16(cpu_dst);
3882 gen_movl_TN_reg(rd, cpu_dst);
3883 break;
3884 case 0x02a: /* VIS I fcmpeq16 */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 gen_op_load_fpr_DT0(DFPREG(rs1));
3887 gen_op_load_fpr_DT1(DFPREG(rs2));
3888 gen_helper_fcmpeq16(cpu_dst);
3889 gen_movl_TN_reg(rd, cpu_dst);
3890 break;
3891 case 0x02c: /* VIS I fcmpgt32 */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 gen_op_load_fpr_DT0(DFPREG(rs1));
3894 gen_op_load_fpr_DT1(DFPREG(rs2));
3895 gen_helper_fcmpgt32(cpu_dst);
3896 gen_movl_TN_reg(rd, cpu_dst);
3897 break;
3898 case 0x02e: /* VIS I fcmpeq32 */
3899 CHECK_FPU_FEATURE(dc, VIS1);
3900 gen_op_load_fpr_DT0(DFPREG(rs1));
3901 gen_op_load_fpr_DT1(DFPREG(rs2));
3902 gen_helper_fcmpeq32(cpu_dst);
3903 gen_movl_TN_reg(rd, cpu_dst);
3904 break;
3905 case 0x031: /* VIS I fmul8x16 */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 gen_op_load_fpr_DT0(DFPREG(rs1));
3908 gen_op_load_fpr_DT1(DFPREG(rs2));
3909 gen_helper_fmul8x16();
3910 gen_op_store_DT0_fpr(DFPREG(rd));
3911 gen_update_fprs_dirty(DFPREG(rd));
3912 break;
3913 case 0x033: /* VIS I fmul8x16au */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 gen_op_load_fpr_DT0(DFPREG(rs1));
3916 gen_op_load_fpr_DT1(DFPREG(rs2));
3917 gen_helper_fmul8x16au();
3918 gen_op_store_DT0_fpr(DFPREG(rd));
3919 gen_update_fprs_dirty(DFPREG(rd));
3920 break;
3921 case 0x035: /* VIS I fmul8x16al */
3922 CHECK_FPU_FEATURE(dc, VIS1);
3923 gen_op_load_fpr_DT0(DFPREG(rs1));
3924 gen_op_load_fpr_DT1(DFPREG(rs2));
3925 gen_helper_fmul8x16al();
3926 gen_op_store_DT0_fpr(DFPREG(rd));
3927 gen_update_fprs_dirty(DFPREG(rd));
3928 break;
3929 case 0x036: /* VIS I fmul8sux16 */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 gen_op_load_fpr_DT0(DFPREG(rs1));
3932 gen_op_load_fpr_DT1(DFPREG(rs2));
3933 gen_helper_fmul8sux16();
3934 gen_op_store_DT0_fpr(DFPREG(rd));
3935 gen_update_fprs_dirty(DFPREG(rd));
3936 break;
3937 case 0x037: /* VIS I fmul8ulx16 */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 gen_op_load_fpr_DT0(DFPREG(rs1));
3940 gen_op_load_fpr_DT1(DFPREG(rs2));
3941 gen_helper_fmul8ulx16();
3942 gen_op_store_DT0_fpr(DFPREG(rd));
3943 gen_update_fprs_dirty(DFPREG(rd));
3944 break;
3945 case 0x038: /* VIS I fmuld8sux16 */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 gen_op_load_fpr_DT0(DFPREG(rs1));
3948 gen_op_load_fpr_DT1(DFPREG(rs2));
3949 gen_helper_fmuld8sux16();
3950 gen_op_store_DT0_fpr(DFPREG(rd));
3951 gen_update_fprs_dirty(DFPREG(rd));
3952 break;
3953 case 0x039: /* VIS I fmuld8ulx16 */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 gen_op_load_fpr_DT0(DFPREG(rs1));
3956 gen_op_load_fpr_DT1(DFPREG(rs2));
3957 gen_helper_fmuld8ulx16();
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3959 gen_update_fprs_dirty(DFPREG(rd));
3960 break;
3961 case 0x03a: /* VIS I fpack32 */
3962 case 0x03b: /* VIS I fpack16 */
3963 case 0x03d: /* VIS I fpackfix */
3964 case 0x03e: /* VIS I pdist */
3965 // XXX
3966 goto illegal_insn;
3967 case 0x048: /* VIS I faligndata */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 gen_op_load_fpr_DT0(DFPREG(rs1));
3970 gen_op_load_fpr_DT1(DFPREG(rs2));
3971 gen_helper_faligndata();
3972 gen_op_store_DT0_fpr(DFPREG(rd));
3973 gen_update_fprs_dirty(DFPREG(rd));
3974 break;
3975 case 0x04b: /* VIS I fpmerge */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 gen_op_load_fpr_DT0(DFPREG(rs1));
3978 gen_op_load_fpr_DT1(DFPREG(rs2));
3979 gen_helper_fpmerge();
3980 gen_op_store_DT0_fpr(DFPREG(rd));
3981 gen_update_fprs_dirty(DFPREG(rd));
3982 break;
3983 case 0x04c: /* VIS II bshuffle */
3984 // XXX
3985 goto illegal_insn;
3986 case 0x04d: /* VIS I fexpand */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_DT0(DFPREG(rs1));
3989 gen_op_load_fpr_DT1(DFPREG(rs2));
3990 gen_helper_fexpand();
3991 gen_op_store_DT0_fpr(DFPREG(rd));
3992 gen_update_fprs_dirty(DFPREG(rd));
3993 break;
3994 case 0x050: /* VIS I fpadd16 */
3995 CHECK_FPU_FEATURE(dc, VIS1);
3996 gen_op_load_fpr_DT0(DFPREG(rs1));
3997 gen_op_load_fpr_DT1(DFPREG(rs2));
3998 gen_helper_fpadd16();
3999 gen_op_store_DT0_fpr(DFPREG(rd));
4000 gen_update_fprs_dirty(DFPREG(rd));
4001 break;
4002 case 0x051: /* VIS I fpadd16s */
4003 CHECK_FPU_FEATURE(dc, VIS1);
4004 gen_helper_fpadd16s(cpu_fpr[rd],
4005 cpu_fpr[rs1], cpu_fpr[rs2]);
4006 gen_update_fprs_dirty(rd);
4007 break;
4008 case 0x052: /* VIS I fpadd32 */
4009 CHECK_FPU_FEATURE(dc, VIS1);
4010 gen_op_load_fpr_DT0(DFPREG(rs1));
4011 gen_op_load_fpr_DT1(DFPREG(rs2));
4012 gen_helper_fpadd32();
4013 gen_op_store_DT0_fpr(DFPREG(rd));
4014 gen_update_fprs_dirty(DFPREG(rd));
4015 break;
4016 case 0x053: /* VIS I fpadd32s */
4017 CHECK_FPU_FEATURE(dc, VIS1);
4018 gen_helper_fpadd32s(cpu_fpr[rd],
4019 cpu_fpr[rs1], cpu_fpr[rs2]);
4020 gen_update_fprs_dirty(rd);
4021 break;
4022 case 0x054: /* VIS I fpsub16 */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 gen_op_load_fpr_DT0(DFPREG(rs1));
4025 gen_op_load_fpr_DT1(DFPREG(rs2));
4026 gen_helper_fpsub16();
4027 gen_op_store_DT0_fpr(DFPREG(rd));
4028 gen_update_fprs_dirty(DFPREG(rd));
4029 break;
4030 case 0x055: /* VIS I fpsub16s */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 gen_helper_fpsub16s(cpu_fpr[rd],
4033 cpu_fpr[rs1], cpu_fpr[rs2]);
4034 gen_update_fprs_dirty(rd);
4035 break;
4036 case 0x056: /* VIS I fpsub32 */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 gen_op_load_fpr_DT0(DFPREG(rs1));
4039 gen_op_load_fpr_DT1(DFPREG(rs2));
4040 gen_helper_fpsub32();
4041 gen_op_store_DT0_fpr(DFPREG(rd));
4042 gen_update_fprs_dirty(DFPREG(rd));
4043 break;
4044 case 0x057: /* VIS I fpsub32s */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 gen_helper_fpsub32s(cpu_fpr[rd],
4047 cpu_fpr[rs1], cpu_fpr[rs2]);
4048 gen_update_fprs_dirty(rd);
4049 break;
4050 case 0x060: /* VIS I fzero */
4051 CHECK_FPU_FEATURE(dc, VIS1);
4052 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4053 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4054 gen_update_fprs_dirty(DFPREG(rd));
4055 break;
4056 case 0x061: /* VIS I fzeros */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4059 gen_update_fprs_dirty(rd);
4060 break;
4061 case 0x062: /* VIS I fnor */
4062 CHECK_FPU_FEATURE(dc, VIS1);
4063 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4064 cpu_fpr[DFPREG(rs2)]);
4065 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd) + 1],
4066 cpu_fpr[DFPREG(rs1) + 1],
4067 cpu_fpr[DFPREG(rs2) + 1]);
4068 gen_update_fprs_dirty(DFPREG(rd));
4069 break;
4070 case 0x063: /* VIS I fnors */
4071 CHECK_FPU_FEATURE(dc, VIS1);
4072 tcg_gen_nor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4073 gen_update_fprs_dirty(rd);
4074 break;
4075 case 0x064: /* VIS I fandnot2 */
4076 CHECK_FPU_FEATURE(dc, VIS1);
4077 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4078 cpu_fpr[DFPREG(rs2)]);
4079 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4080 cpu_fpr[DFPREG(rs1) + 1],
4081 cpu_fpr[DFPREG(rs2) + 1]);
4082 gen_update_fprs_dirty(DFPREG(rd));
4083 break;
4084 case 0x065: /* VIS I fandnot2s */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4087 gen_update_fprs_dirty(rd);
4088 break;
4089 case 0x066: /* VIS I fnot2 */
4090 CHECK_FPU_FEATURE(dc, VIS1);
4091 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4092 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4093 cpu_fpr[DFPREG(rs2) + 1]);
4094 gen_update_fprs_dirty(DFPREG(rd));
4095 break;
4096 case 0x067: /* VIS I fnot2s */
4097 CHECK_FPU_FEATURE(dc, VIS1);
4098 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4099 gen_update_fprs_dirty(rd);
4100 break;
4101 case 0x068: /* VIS I fandnot1 */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4104 cpu_fpr[DFPREG(rs1)]);
4105 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4106 cpu_fpr[DFPREG(rs2) + 1],
4107 cpu_fpr[DFPREG(rs1) + 1]);
4108 gen_update_fprs_dirty(DFPREG(rd));
4109 break;
4110 case 0x069: /* VIS I fandnot1s */
4111 CHECK_FPU_FEATURE(dc, VIS1);
4112 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4113 gen_update_fprs_dirty(rd);
4114 break;
4115 case 0x06a: /* VIS I fnot1 */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4118 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4119 cpu_fpr[DFPREG(rs1) + 1]);
4120 gen_update_fprs_dirty(DFPREG(rd));
4121 break;
4122 case 0x06b: /* VIS I fnot1s */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4125 gen_update_fprs_dirty(rd);
4126 break;
4127 case 0x06c: /* VIS I fxor */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4130 cpu_fpr[DFPREG(rs2)]);
4131 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4132 cpu_fpr[DFPREG(rs1) + 1],
4133 cpu_fpr[DFPREG(rs2) + 1]);
4134 gen_update_fprs_dirty(DFPREG(rd));
4135 break;
4136 case 0x06d: /* VIS I fxors */
4137 CHECK_FPU_FEATURE(dc, VIS1);
4138 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4139 gen_update_fprs_dirty(rd);
4140 break;
4141 case 0x06e: /* VIS I fnand */
4142 CHECK_FPU_FEATURE(dc, VIS1);
4143 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4144 cpu_fpr[DFPREG(rs2)]);
4145 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd) + 1],
4146 cpu_fpr[DFPREG(rs1) + 1],
4147 cpu_fpr[DFPREG(rs2) + 1]);
4148 gen_update_fprs_dirty(DFPREG(rd));
4149 break;
4150 case 0x06f: /* VIS I fnands */
4151 CHECK_FPU_FEATURE(dc, VIS1);
4152 tcg_gen_nand_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4153 gen_update_fprs_dirty(rd);
4154 break;
4155 case 0x070: /* VIS I fand */
4156 CHECK_FPU_FEATURE(dc, VIS1);
4157 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4158 cpu_fpr[DFPREG(rs2)]);
4159 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4160 cpu_fpr[DFPREG(rs1) + 1],
4161 cpu_fpr[DFPREG(rs2) + 1]);
4162 gen_update_fprs_dirty(DFPREG(rd));
4163 break;
4164 case 0x071: /* VIS I fands */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4167 gen_update_fprs_dirty(rd);
4168 break;
4169 case 0x072: /* VIS I fxnor */
4170 CHECK_FPU_FEATURE(dc, VIS1);
4171 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4172 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4173 cpu_fpr[DFPREG(rs1)]);
4174 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4175 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4176 cpu_fpr[DFPREG(rs1) + 1]);
4177 gen_update_fprs_dirty(DFPREG(rd));
4178 break;
4179 case 0x073: /* VIS I fxnors */
4180 CHECK_FPU_FEATURE(dc, VIS1);
4181 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4182 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4183 gen_update_fprs_dirty(rd);
4184 break;
4185 case 0x074: /* VIS I fsrc1 */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4188 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4189 cpu_fpr[DFPREG(rs1) + 1]);
4190 gen_update_fprs_dirty(DFPREG(rd));
4191 break;
4192 case 0x075: /* VIS I fsrc1s */
4193 CHECK_FPU_FEATURE(dc, VIS1);
4194 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4195 gen_update_fprs_dirty(rd);
4196 break;
4197 case 0x076: /* VIS I fornot2 */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4200 cpu_fpr[DFPREG(rs2)]);
4201 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4202 cpu_fpr[DFPREG(rs1) + 1],
4203 cpu_fpr[DFPREG(rs2) + 1]);
4204 gen_update_fprs_dirty(DFPREG(rd));
4205 break;
4206 case 0x077: /* VIS I fornot2s */
4207 CHECK_FPU_FEATURE(dc, VIS1);
4208 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4209 gen_update_fprs_dirty(rd);
4210 break;
4211 case 0x078: /* VIS I fsrc2 */
4212 CHECK_FPU_FEATURE(dc, VIS1);
4213 gen_op_load_fpr_DT0(DFPREG(rs2));
4214 gen_op_store_DT0_fpr(DFPREG(rd));
4215 gen_update_fprs_dirty(DFPREG(rd));
4216 break;
4217 case 0x079: /* VIS I fsrc2s */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4220 gen_update_fprs_dirty(rd);
4221 break;
4222 case 0x07a: /* VIS I fornot1 */
4223 CHECK_FPU_FEATURE(dc, VIS1);
4224 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4225 cpu_fpr[DFPREG(rs1)]);
4226 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4227 cpu_fpr[DFPREG(rs2) + 1],
4228 cpu_fpr[DFPREG(rs1) + 1]);
4229 gen_update_fprs_dirty(DFPREG(rd));
4230 break;
4231 case 0x07b: /* VIS I fornot1s */
4232 CHECK_FPU_FEATURE(dc, VIS1);
4233 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4234 gen_update_fprs_dirty(rd);
4235 break;
4236 case 0x07c: /* VIS I for */
4237 CHECK_FPU_FEATURE(dc, VIS1);
4238 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4239 cpu_fpr[DFPREG(rs2)]);
4240 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4241 cpu_fpr[DFPREG(rs1) + 1],
4242 cpu_fpr[DFPREG(rs2) + 1]);
4243 gen_update_fprs_dirty(DFPREG(rd));
4244 break;
4245 case 0x07d: /* VIS I fors */
4246 CHECK_FPU_FEATURE(dc, VIS1);
4247 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4248 gen_update_fprs_dirty(rd);
4249 break;
4250 case 0x07e: /* VIS I fone */
4251 CHECK_FPU_FEATURE(dc, VIS1);
4252 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4253 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4254 gen_update_fprs_dirty(DFPREG(rd));
4255 break;
4256 case 0x07f: /* VIS I fones */
4257 CHECK_FPU_FEATURE(dc, VIS1);
4258 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4259 gen_update_fprs_dirty(rd);
4260 break;
4261 case 0x080: /* VIS I shutdown */
4262 case 0x081: /* VIS II siam */
4263 // XXX
4264 goto illegal_insn;
4265 default:
4266 goto illegal_insn;
4268 #else
4269 goto ncp_insn;
4270 #endif
4271 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4272 #ifdef TARGET_SPARC64
4273 goto illegal_insn;
4274 #else
4275 goto ncp_insn;
4276 #endif
4277 #ifdef TARGET_SPARC64
4278 } else if (xop == 0x39) { /* V9 return */
4279 TCGv_i32 r_const;
4281 save_state(dc, cpu_cond);
4282 cpu_src1 = get_src1(insn, cpu_src1);
4283 if (IS_IMM) { /* immediate */
4284 simm = GET_FIELDs(insn, 19, 31);
4285 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4286 } else { /* register */
4287 rs2 = GET_FIELD(insn, 27, 31);
4288 if (rs2) {
4289 gen_movl_reg_TN(rs2, cpu_src2);
4290 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4291 } else
4292 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4294 gen_helper_restore();
4295 gen_mov_pc_npc(dc, cpu_cond);
4296 r_const = tcg_const_i32(3);
4297 gen_helper_check_align(cpu_dst, r_const);
4298 tcg_temp_free_i32(r_const);
4299 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4300 dc->npc = DYNAMIC_PC;
4301 goto jmp_insn;
4302 #endif
4303 } else {
4304 cpu_src1 = get_src1(insn, cpu_src1);
4305 if (IS_IMM) { /* immediate */
4306 simm = GET_FIELDs(insn, 19, 31);
4307 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4308 } else { /* register */
4309 rs2 = GET_FIELD(insn, 27, 31);
4310 if (rs2) {
4311 gen_movl_reg_TN(rs2, cpu_src2);
4312 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4313 } else
4314 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4316 switch (xop) {
4317 case 0x38: /* jmpl */
4319 TCGv r_pc;
4320 TCGv_i32 r_const;
4322 r_pc = tcg_const_tl(dc->pc);
4323 gen_movl_TN_reg(rd, r_pc);
4324 tcg_temp_free(r_pc);
4325 gen_mov_pc_npc(dc, cpu_cond);
4326 r_const = tcg_const_i32(3);
4327 gen_helper_check_align(cpu_dst, r_const);
4328 tcg_temp_free_i32(r_const);
4329 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4330 dc->npc = DYNAMIC_PC;
4332 goto jmp_insn;
4333 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4334 case 0x39: /* rett, V9 return */
4336 TCGv_i32 r_const;
4338 if (!supervisor(dc))
4339 goto priv_insn;
4340 gen_mov_pc_npc(dc, cpu_cond);
4341 r_const = tcg_const_i32(3);
4342 gen_helper_check_align(cpu_dst, r_const);
4343 tcg_temp_free_i32(r_const);
4344 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4345 dc->npc = DYNAMIC_PC;
4346 gen_helper_rett();
4348 goto jmp_insn;
4349 #endif
4350 case 0x3b: /* flush */
4351 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4352 goto unimp_flush;
4353 /* nop */
4354 break;
4355 case 0x3c: /* save */
4356 save_state(dc, cpu_cond);
4357 gen_helper_save();
4358 gen_movl_TN_reg(rd, cpu_dst);
4359 break;
4360 case 0x3d: /* restore */
4361 save_state(dc, cpu_cond);
4362 gen_helper_restore();
4363 gen_movl_TN_reg(rd, cpu_dst);
4364 break;
4365 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4366 case 0x3e: /* V9 done/retry */
4368 switch (rd) {
4369 case 0:
4370 if (!supervisor(dc))
4371 goto priv_insn;
4372 dc->npc = DYNAMIC_PC;
4373 dc->pc = DYNAMIC_PC;
4374 gen_helper_done();
4375 goto jmp_insn;
4376 case 1:
4377 if (!supervisor(dc))
4378 goto priv_insn;
4379 dc->npc = DYNAMIC_PC;
4380 dc->pc = DYNAMIC_PC;
4381 gen_helper_retry();
4382 goto jmp_insn;
4383 default:
4384 goto illegal_insn;
4387 break;
4388 #endif
4389 default:
4390 goto illegal_insn;
4393 break;
4395 break;
4396 case 3: /* load/store instructions */
4398 unsigned int xop = GET_FIELD(insn, 7, 12);
4400 /* flush pending conditional evaluations before exposing
4401 cpu state */
4402 if (dc->cc_op != CC_OP_FLAGS) {
4403 dc->cc_op = CC_OP_FLAGS;
4404 gen_helper_compute_psr();
4406 cpu_src1 = get_src1(insn, cpu_src1);
4407 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4408 rs2 = GET_FIELD(insn, 27, 31);
4409 gen_movl_reg_TN(rs2, cpu_src2);
4410 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4411 } else if (IS_IMM) { /* immediate */
4412 simm = GET_FIELDs(insn, 19, 31);
4413 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4414 } else { /* register */
4415 rs2 = GET_FIELD(insn, 27, 31);
4416 if (rs2 != 0) {
4417 gen_movl_reg_TN(rs2, cpu_src2);
4418 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4419 } else
4420 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4422 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4423 (xop > 0x17 && xop <= 0x1d ) ||
4424 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4425 switch (xop) {
4426 case 0x0: /* ld, V9 lduw, load unsigned word */
4427 gen_address_mask(dc, cpu_addr);
4428 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4429 break;
4430 case 0x1: /* ldub, load unsigned byte */
4431 gen_address_mask(dc, cpu_addr);
4432 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4433 break;
4434 case 0x2: /* lduh, load unsigned halfword */
4435 gen_address_mask(dc, cpu_addr);
4436 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4437 break;
4438 case 0x3: /* ldd, load double word */
4439 if (rd & 1)
4440 goto illegal_insn;
4441 else {
4442 TCGv_i32 r_const;
4444 save_state(dc, cpu_cond);
4445 r_const = tcg_const_i32(7);
4446 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4447 tcg_temp_free_i32(r_const);
4448 gen_address_mask(dc, cpu_addr);
4449 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4450 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4451 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4452 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4453 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4454 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4455 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4457 break;
4458 case 0x9: /* ldsb, load signed byte */
4459 gen_address_mask(dc, cpu_addr);
4460 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4461 break;
4462 case 0xa: /* ldsh, load signed halfword */
4463 gen_address_mask(dc, cpu_addr);
4464 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4465 break;
4466 case 0xd: /* ldstub -- XXX: should be atomically */
4468 TCGv r_const;
4470 gen_address_mask(dc, cpu_addr);
4471 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4472 r_const = tcg_const_tl(0xff);
4473 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4474 tcg_temp_free(r_const);
4476 break;
4477 case 0x0f: /* swap, swap register with memory. Also
4478 atomically */
4479 CHECK_IU_FEATURE(dc, SWAP);
4480 gen_movl_reg_TN(rd, cpu_val);
4481 gen_address_mask(dc, cpu_addr);
4482 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4483 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4484 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4485 break;
4486 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4487 case 0x10: /* lda, V9 lduwa, load word alternate */
4488 #ifndef TARGET_SPARC64
4489 if (IS_IMM)
4490 goto illegal_insn;
4491 if (!supervisor(dc))
4492 goto priv_insn;
4493 #endif
4494 save_state(dc, cpu_cond);
4495 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4496 break;
4497 case 0x11: /* lduba, load unsigned byte alternate */
4498 #ifndef TARGET_SPARC64
4499 if (IS_IMM)
4500 goto illegal_insn;
4501 if (!supervisor(dc))
4502 goto priv_insn;
4503 #endif
4504 save_state(dc, cpu_cond);
4505 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4506 break;
4507 case 0x12: /* lduha, load unsigned halfword alternate */
4508 #ifndef TARGET_SPARC64
4509 if (IS_IMM)
4510 goto illegal_insn;
4511 if (!supervisor(dc))
4512 goto priv_insn;
4513 #endif
4514 save_state(dc, cpu_cond);
4515 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4516 break;
4517 case 0x13: /* ldda, load double word alternate */
4518 #ifndef TARGET_SPARC64
4519 if (IS_IMM)
4520 goto illegal_insn;
4521 if (!supervisor(dc))
4522 goto priv_insn;
4523 #endif
4524 if (rd & 1)
4525 goto illegal_insn;
4526 save_state(dc, cpu_cond);
4527 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4528 goto skip_move;
4529 case 0x19: /* ldsba, load signed byte alternate */
4530 #ifndef TARGET_SPARC64
4531 if (IS_IMM)
4532 goto illegal_insn;
4533 if (!supervisor(dc))
4534 goto priv_insn;
4535 #endif
4536 save_state(dc, cpu_cond);
4537 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4538 break;
4539 case 0x1a: /* ldsha, load signed halfword alternate */
4540 #ifndef TARGET_SPARC64
4541 if (IS_IMM)
4542 goto illegal_insn;
4543 if (!supervisor(dc))
4544 goto priv_insn;
4545 #endif
4546 save_state(dc, cpu_cond);
4547 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4548 break;
4549 case 0x1d: /* ldstuba -- XXX: should be atomically */
4550 #ifndef TARGET_SPARC64
4551 if (IS_IMM)
4552 goto illegal_insn;
4553 if (!supervisor(dc))
4554 goto priv_insn;
4555 #endif
4556 save_state(dc, cpu_cond);
4557 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4558 break;
4559 case 0x1f: /* swapa, swap reg with alt. memory. Also
4560 atomically */
4561 CHECK_IU_FEATURE(dc, SWAP);
4562 #ifndef TARGET_SPARC64
4563 if (IS_IMM)
4564 goto illegal_insn;
4565 if (!supervisor(dc))
4566 goto priv_insn;
4567 #endif
4568 save_state(dc, cpu_cond);
4569 gen_movl_reg_TN(rd, cpu_val);
4570 gen_swap_asi(cpu_val, cpu_addr, insn);
4571 break;
4573 #ifndef TARGET_SPARC64
4574 case 0x30: /* ldc */
4575 case 0x31: /* ldcsr */
4576 case 0x33: /* lddc */
4577 goto ncp_insn;
4578 #endif
4579 #endif
4580 #ifdef TARGET_SPARC64
4581 case 0x08: /* V9 ldsw */
4582 gen_address_mask(dc, cpu_addr);
4583 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4584 break;
4585 case 0x0b: /* V9 ldx */
4586 gen_address_mask(dc, cpu_addr);
4587 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4588 break;
4589 case 0x18: /* V9 ldswa */
4590 save_state(dc, cpu_cond);
4591 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4592 break;
4593 case 0x1b: /* V9 ldxa */
4594 save_state(dc, cpu_cond);
4595 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4596 break;
4597 case 0x2d: /* V9 prefetch, no effect */
4598 goto skip_move;
4599 case 0x30: /* V9 ldfa */
4600 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4601 goto jmp_insn;
4603 save_state(dc, cpu_cond);
4604 gen_ldf_asi(cpu_addr, insn, 4, rd);
4605 gen_update_fprs_dirty(rd);
4606 goto skip_move;
4607 case 0x33: /* V9 lddfa */
4608 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4609 goto jmp_insn;
4611 save_state(dc, cpu_cond);
4612 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4613 gen_update_fprs_dirty(DFPREG(rd));
4614 goto skip_move;
4615 case 0x3d: /* V9 prefetcha, no effect */
4616 goto skip_move;
4617 case 0x32: /* V9 ldqfa */
4618 CHECK_FPU_FEATURE(dc, FLOAT128);
4619 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4620 goto jmp_insn;
4622 save_state(dc, cpu_cond);
4623 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4624 gen_update_fprs_dirty(QFPREG(rd));
4625 goto skip_move;
4626 #endif
4627 default:
4628 goto illegal_insn;
4630 gen_movl_TN_reg(rd, cpu_val);
4631 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4632 skip_move: ;
4633 #endif
4634 } else if (xop >= 0x20 && xop < 0x24) {
4635 if (gen_trap_ifnofpu(dc, cpu_cond))
4636 goto jmp_insn;
4637 save_state(dc, cpu_cond);
4638 switch (xop) {
4639 case 0x20: /* ldf, load fpreg */
4640 gen_address_mask(dc, cpu_addr);
4641 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4642 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4643 gen_update_fprs_dirty(rd);
4644 break;
4645 case 0x21: /* ldfsr, V9 ldxfsr */
4646 #ifdef TARGET_SPARC64
4647 gen_address_mask(dc, cpu_addr);
4648 if (rd == 1) {
4649 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4650 gen_helper_ldxfsr(cpu_tmp64);
4651 } else {
4652 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4653 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4654 gen_helper_ldfsr(cpu_tmp32);
4656 #else
4658 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4659 gen_helper_ldfsr(cpu_tmp32);
4661 #endif
4662 break;
4663 case 0x22: /* ldqf, load quad fpreg */
4665 TCGv_i32 r_const;
4667 CHECK_FPU_FEATURE(dc, FLOAT128);
4668 r_const = tcg_const_i32(dc->mem_idx);
4669 gen_address_mask(dc, cpu_addr);
4670 gen_helper_ldqf(cpu_addr, r_const);
4671 tcg_temp_free_i32(r_const);
4672 gen_op_store_QT0_fpr(QFPREG(rd));
4673 gen_update_fprs_dirty(QFPREG(rd));
4675 break;
4676 case 0x23: /* lddf, load double fpreg */
4678 TCGv_i32 r_const;
4680 r_const = tcg_const_i32(dc->mem_idx);
4681 gen_address_mask(dc, cpu_addr);
4682 gen_helper_lddf(cpu_addr, r_const);
4683 tcg_temp_free_i32(r_const);
4684 gen_op_store_DT0_fpr(DFPREG(rd));
4685 gen_update_fprs_dirty(DFPREG(rd));
4687 break;
4688 default:
4689 goto illegal_insn;
4691 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4692 xop == 0xe || xop == 0x1e) {
4693 gen_movl_reg_TN(rd, cpu_val);
4694 switch (xop) {
4695 case 0x4: /* st, store word */
4696 gen_address_mask(dc, cpu_addr);
4697 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4698 break;
4699 case 0x5: /* stb, store byte */
4700 gen_address_mask(dc, cpu_addr);
4701 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4702 break;
4703 case 0x6: /* sth, store halfword */
4704 gen_address_mask(dc, cpu_addr);
4705 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4706 break;
4707 case 0x7: /* std, store double word */
4708 if (rd & 1)
4709 goto illegal_insn;
4710 else {
4711 TCGv_i32 r_const;
4713 save_state(dc, cpu_cond);
4714 gen_address_mask(dc, cpu_addr);
4715 r_const = tcg_const_i32(7);
4716 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4717 tcg_temp_free_i32(r_const);
4718 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4719 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4720 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4722 break;
4723 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4724 case 0x14: /* sta, V9 stwa, store word alternate */
4725 #ifndef TARGET_SPARC64
4726 if (IS_IMM)
4727 goto illegal_insn;
4728 if (!supervisor(dc))
4729 goto priv_insn;
4730 #endif
4731 save_state(dc, cpu_cond);
4732 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4733 dc->npc = DYNAMIC_PC;
4734 break;
4735 case 0x15: /* stba, store byte alternate */
4736 #ifndef TARGET_SPARC64
4737 if (IS_IMM)
4738 goto illegal_insn;
4739 if (!supervisor(dc))
4740 goto priv_insn;
4741 #endif
4742 save_state(dc, cpu_cond);
4743 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4744 dc->npc = DYNAMIC_PC;
4745 break;
4746 case 0x16: /* stha, store halfword alternate */
4747 #ifndef TARGET_SPARC64
4748 if (IS_IMM)
4749 goto illegal_insn;
4750 if (!supervisor(dc))
4751 goto priv_insn;
4752 #endif
4753 save_state(dc, cpu_cond);
4754 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4755 dc->npc = DYNAMIC_PC;
4756 break;
4757 case 0x17: /* stda, store double word alternate */
4758 #ifndef TARGET_SPARC64
4759 if (IS_IMM)
4760 goto illegal_insn;
4761 if (!supervisor(dc))
4762 goto priv_insn;
4763 #endif
4764 if (rd & 1)
4765 goto illegal_insn;
4766 else {
4767 save_state(dc, cpu_cond);
4768 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4770 break;
4771 #endif
4772 #ifdef TARGET_SPARC64
4773 case 0x0e: /* V9 stx */
4774 gen_address_mask(dc, cpu_addr);
4775 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4776 break;
4777 case 0x1e: /* V9 stxa */
4778 save_state(dc, cpu_cond);
4779 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4780 dc->npc = DYNAMIC_PC;
4781 break;
4782 #endif
4783 default:
4784 goto illegal_insn;
4786 } else if (xop > 0x23 && xop < 0x28) {
4787 if (gen_trap_ifnofpu(dc, cpu_cond))
4788 goto jmp_insn;
4789 save_state(dc, cpu_cond);
4790 switch (xop) {
4791 case 0x24: /* stf, store fpreg */
4792 gen_address_mask(dc, cpu_addr);
4793 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4794 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4795 break;
4796 case 0x25: /* stfsr, V9 stxfsr */
4797 #ifdef TARGET_SPARC64
4798 gen_address_mask(dc, cpu_addr);
4799 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4800 if (rd == 1)
4801 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4802 else
4803 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4804 #else
4805 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4806 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4807 #endif
4808 break;
4809 case 0x26:
4810 #ifdef TARGET_SPARC64
4811 /* V9 stqf, store quad fpreg */
4813 TCGv_i32 r_const;
4815 CHECK_FPU_FEATURE(dc, FLOAT128);
4816 gen_op_load_fpr_QT0(QFPREG(rd));
4817 r_const = tcg_const_i32(dc->mem_idx);
4818 gen_address_mask(dc, cpu_addr);
4819 gen_helper_stqf(cpu_addr, r_const);
4820 tcg_temp_free_i32(r_const);
4822 break;
4823 #else /* !TARGET_SPARC64 */
4824 /* stdfq, store floating point queue */
4825 #if defined(CONFIG_USER_ONLY)
4826 goto illegal_insn;
4827 #else
4828 if (!supervisor(dc))
4829 goto priv_insn;
4830 if (gen_trap_ifnofpu(dc, cpu_cond))
4831 goto jmp_insn;
4832 goto nfq_insn;
4833 #endif
4834 #endif
4835 case 0x27: /* stdf, store double fpreg */
4837 TCGv_i32 r_const;
4839 gen_op_load_fpr_DT0(DFPREG(rd));
4840 r_const = tcg_const_i32(dc->mem_idx);
4841 gen_address_mask(dc, cpu_addr);
4842 gen_helper_stdf(cpu_addr, r_const);
4843 tcg_temp_free_i32(r_const);
4845 break;
4846 default:
4847 goto illegal_insn;
4849 } else if (xop > 0x33 && xop < 0x3f) {
4850 save_state(dc, cpu_cond);
4851 switch (xop) {
4852 #ifdef TARGET_SPARC64
4853 case 0x34: /* V9 stfa */
4854 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4855 goto jmp_insn;
4857 gen_stf_asi(cpu_addr, insn, 4, rd);
4858 break;
4859 case 0x36: /* V9 stqfa */
4861 TCGv_i32 r_const;
4863 CHECK_FPU_FEATURE(dc, FLOAT128);
4864 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4865 goto jmp_insn;
4867 r_const = tcg_const_i32(7);
4868 gen_helper_check_align(cpu_addr, r_const);
4869 tcg_temp_free_i32(r_const);
4870 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4872 break;
4873 case 0x37: /* V9 stdfa */
4874 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4875 goto jmp_insn;
4877 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4878 break;
4879 case 0x3c: /* V9 casa */
4880 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4881 gen_movl_TN_reg(rd, cpu_val);
4882 break;
4883 case 0x3e: /* V9 casxa */
4884 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4885 gen_movl_TN_reg(rd, cpu_val);
4886 break;
4887 #else
4888 case 0x34: /* stc */
4889 case 0x35: /* stcsr */
4890 case 0x36: /* stdcq */
4891 case 0x37: /* stdc */
4892 goto ncp_insn;
4893 #endif
4894 default:
4895 goto illegal_insn;
4897 } else
4898 goto illegal_insn;
4900 break;
4902 /* default case for non jump instructions */
4903 if (dc->npc == DYNAMIC_PC) {
4904 dc->pc = DYNAMIC_PC;
4905 gen_op_next_insn();
4906 } else if (dc->npc == JUMP_PC) {
4907 /* we can do a static jump */
4908 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4909 dc->is_br = 1;
4910 } else {
4911 dc->pc = dc->npc;
4912 dc->npc = dc->npc + 4;
4914 jmp_insn:
4915 goto egress;
4916 illegal_insn:
4918 TCGv_i32 r_const;
4920 save_state(dc, cpu_cond);
4921 r_const = tcg_const_i32(TT_ILL_INSN);
4922 gen_helper_raise_exception(r_const);
4923 tcg_temp_free_i32(r_const);
4924 dc->is_br = 1;
4926 goto egress;
4927 unimp_flush:
4929 TCGv_i32 r_const;
4931 save_state(dc, cpu_cond);
4932 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4933 gen_helper_raise_exception(r_const);
4934 tcg_temp_free_i32(r_const);
4935 dc->is_br = 1;
4937 goto egress;
4938 #if !defined(CONFIG_USER_ONLY)
4939 priv_insn:
4941 TCGv_i32 r_const;
4943 save_state(dc, cpu_cond);
4944 r_const = tcg_const_i32(TT_PRIV_INSN);
4945 gen_helper_raise_exception(r_const);
4946 tcg_temp_free_i32(r_const);
4947 dc->is_br = 1;
4949 goto egress;
4950 #endif
4951 nfpu_insn:
4952 save_state(dc, cpu_cond);
4953 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4954 dc->is_br = 1;
4955 goto egress;
4956 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4957 nfq_insn:
4958 save_state(dc, cpu_cond);
4959 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4960 dc->is_br = 1;
4961 goto egress;
4962 #endif
4963 #ifndef TARGET_SPARC64
4964 ncp_insn:
4966 TCGv r_const;
4968 save_state(dc, cpu_cond);
4969 r_const = tcg_const_i32(TT_NCP_INSN);
4970 gen_helper_raise_exception(r_const);
4971 tcg_temp_free(r_const);
4972 dc->is_br = 1;
4974 goto egress;
4975 #endif
4976 egress:
4977 tcg_temp_free(cpu_tmp1);
4978 tcg_temp_free(cpu_tmp2);
4981 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4982 int spc, CPUSPARCState *env)
4984 target_ulong pc_start, last_pc;
4985 uint16_t *gen_opc_end;
4986 DisasContext dc1, *dc = &dc1;
4987 CPUBreakpoint *bp;
4988 int j, lj = -1;
4989 int num_insns;
4990 int max_insns;
4992 memset(dc, 0, sizeof(DisasContext));
4993 dc->tb = tb;
4994 pc_start = tb->pc;
4995 dc->pc = pc_start;
4996 last_pc = dc->pc;
4997 dc->npc = (target_ulong) tb->cs_base;
4998 dc->cc_op = CC_OP_DYNAMIC;
4999 dc->mem_idx = cpu_mmu_index(env);
5000 dc->def = env->def;
5001 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5002 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5003 dc->singlestep = (env->singlestep_enabled || singlestep);
5004 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5006 cpu_tmp0 = tcg_temp_new();
5007 cpu_tmp32 = tcg_temp_new_i32();
5008 cpu_tmp64 = tcg_temp_new_i64();
5010 cpu_dst = tcg_temp_local_new();
5012 // loads and stores
5013 cpu_val = tcg_temp_local_new();
5014 cpu_addr = tcg_temp_local_new();
5016 num_insns = 0;
5017 max_insns = tb->cflags & CF_COUNT_MASK;
5018 if (max_insns == 0)
5019 max_insns = CF_COUNT_MASK;
5020 gen_icount_start();
5021 do {
5022 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5023 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5024 if (bp->pc == dc->pc) {
5025 if (dc->pc != pc_start)
5026 save_state(dc, cpu_cond);
5027 gen_helper_debug();
5028 tcg_gen_exit_tb(0);
5029 dc->is_br = 1;
5030 goto exit_gen_loop;
5034 if (spc) {
5035 qemu_log("Search PC...\n");
5036 j = gen_opc_ptr - gen_opc_buf;
5037 if (lj < j) {
5038 lj++;
5039 while (lj < j)
5040 gen_opc_instr_start[lj++] = 0;
5041 gen_opc_pc[lj] = dc->pc;
5042 gen_opc_npc[lj] = dc->npc;
5043 gen_opc_instr_start[lj] = 1;
5044 gen_opc_icount[lj] = num_insns;
5047 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5048 gen_io_start();
5049 last_pc = dc->pc;
5050 disas_sparc_insn(dc);
5051 num_insns++;
5053 if (dc->is_br)
5054 break;
5055 /* if the next PC is different, we abort now */
5056 if (dc->pc != (last_pc + 4))
5057 break;
5058 /* if we reach a page boundary, we stop generation so that the
5059 PC of a TT_TFAULT exception is always in the right page */
5060 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5061 break;
5062 /* if single step mode, we generate only one instruction and
5063 generate an exception */
5064 if (dc->singlestep) {
5065 break;
5067 } while ((gen_opc_ptr < gen_opc_end) &&
5068 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5069 num_insns < max_insns);
5071 exit_gen_loop:
5072 tcg_temp_free(cpu_addr);
5073 tcg_temp_free(cpu_val);
5074 tcg_temp_free(cpu_dst);
5075 tcg_temp_free_i64(cpu_tmp64);
5076 tcg_temp_free_i32(cpu_tmp32);
5077 tcg_temp_free(cpu_tmp0);
5078 if (tb->cflags & CF_LAST_IO)
5079 gen_io_end();
5080 if (!dc->is_br) {
5081 if (dc->pc != DYNAMIC_PC &&
5082 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5083 /* static PC and NPC: we can use direct chaining */
5084 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5085 } else {
5086 if (dc->pc != DYNAMIC_PC)
5087 tcg_gen_movi_tl(cpu_pc, dc->pc);
5088 save_npc(dc, cpu_cond);
5089 tcg_gen_exit_tb(0);
5092 gen_icount_end(tb, num_insns);
5093 *gen_opc_ptr = INDEX_op_end;
5094 if (spc) {
5095 j = gen_opc_ptr - gen_opc_buf;
5096 lj++;
5097 while (lj <= j)
5098 gen_opc_instr_start[lj++] = 0;
5099 #if 0
5100 log_page_dump();
5101 #endif
5102 gen_opc_jump_pc[0] = dc->jump_pc[0];
5103 gen_opc_jump_pc[1] = dc->jump_pc[1];
5104 } else {
5105 tb->size = last_pc + 4 - pc_start;
5106 tb->icount = num_insns;
5108 #ifdef DEBUG_DISAS
5109 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5110 qemu_log("--------------\n");
5111 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5112 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5113 qemu_log("\n");
5115 #endif
5118 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5120 gen_intermediate_code_internal(tb, 0, env);
5123 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5125 gen_intermediate_code_internal(tb, 1, env);
5128 void gen_intermediate_code_init(CPUSPARCState *env)
5130 unsigned int i;
5131 static int inited;
5132 static const char * const gregnames[8] = {
5133 NULL, // g0 not used
5134 "g1",
5135 "g2",
5136 "g3",
5137 "g4",
5138 "g5",
5139 "g6",
5140 "g7",
5142 static const char * const fregnames[64] = {
5143 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5144 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5145 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5146 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5147 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5148 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5149 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5150 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5153 /* init various static tables */
5154 if (!inited) {
5155 inited = 1;
5157 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5158 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5159 offsetof(CPUState, regwptr),
5160 "regwptr");
5161 #ifdef TARGET_SPARC64
5162 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5163 "xcc");
5164 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5165 "asi");
5166 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5167 "fprs");
5168 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5169 "gsr");
5170 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5171 offsetof(CPUState, tick_cmpr),
5172 "tick_cmpr");
5173 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5174 offsetof(CPUState, stick_cmpr),
5175 "stick_cmpr");
5176 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5177 offsetof(CPUState, hstick_cmpr),
5178 "hstick_cmpr");
5179 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5180 "hintp");
5181 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5182 "htba");
5183 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5184 "hver");
5185 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5186 offsetof(CPUState, ssr), "ssr");
5187 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5188 offsetof(CPUState, version), "ver");
5189 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5190 offsetof(CPUState, softint),
5191 "softint");
5192 #else
5193 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5194 "wim");
5195 #endif
5196 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5197 "cond");
5198 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5199 "cc_src");
5200 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5201 offsetof(CPUState, cc_src2),
5202 "cc_src2");
5203 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5204 "cc_dst");
5205 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5206 "cc_op");
5207 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5208 "psr");
5209 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5210 "fsr");
5211 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5212 "pc");
5213 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5214 "npc");
5215 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5216 #ifndef CONFIG_USER_ONLY
5217 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5218 "tbr");
5219 #endif
5220 for (i = 1; i < 8; i++)
5221 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5222 offsetof(CPUState, gregs[i]),
5223 gregnames[i]);
5224 for (i = 0; i < TARGET_FPREGS; i++)
5225 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5226 offsetof(CPUState, fpr[i]),
5227 fregnames[i]);
5229 /* register helpers */
5231 #define GEN_HELPER 2
5232 #include "helper.h"
5236 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5238 target_ulong npc;
5239 env->pc = gen_opc_pc[pc_pos];
5240 npc = gen_opc_npc[pc_pos];
5241 if (npc == 1) {
5242 /* dynamic NPC: already stored */
5243 } else if (npc == 2) {
5244 /* jump PC: use 'cond' and the jump targets of the translation */
5245 if (env->cond) {
5246 env->npc = gen_opc_jump_pc[0];
5247 } else {
5248 env->npc = gen_opc_jump_pc[1];
5250 } else {
5251 env->npc = npc;
5254 /* flush pending conditional evaluations before exposing cpu state */
5255 if (CC_OP != CC_OP_FLAGS) {
5256 helper_compute_psr();