vmware-vga: Register reset service
[qemu/wangdongxu.git] / target-sparc / translate.c
blobdee67b334f7209f99659cca82a62d1718edd778e
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x, int len)
111 len = 32 - len;
112 return (x << len) >> len;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
126 static void gen_op_load_fpr_DT1(unsigned int src)
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
134 static void gen_op_store_DT0_fpr(unsigned int dst)
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
142 static void gen_op_load_fpr_QT0(unsigned int src)
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
154 static void gen_op_load_fpr_QT1(unsigned int src)
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
166 static void gen_op_store_QT0_fpr(unsigned int dst)
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188 #else
189 #endif
190 #endif
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
233 TranslationBlock *tb;
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 !s->singlestep) {
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num);
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244 } else {
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc, pc);
247 tcg_gen_movi_tl(cpu_npc, npc);
248 tcg_gen_exit_tb(0);
252 // XXX suboptimal
253 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269 tcg_gen_extu_i32_tl(reg, src);
270 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271 tcg_gen_andi_tl(reg, reg, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276 tcg_gen_extu_i32_tl(reg, src);
277 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278 tcg_gen_andi_tl(reg, reg, 0x1);
281 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283 TCGv r_temp;
284 TCGv_i32 r_const;
285 int l1;
287 l1 = gen_new_label();
289 r_temp = tcg_temp_new();
290 tcg_gen_xor_tl(r_temp, src1, src2);
291 tcg_gen_not_tl(r_temp, r_temp);
292 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296 r_const = tcg_const_i32(TT_TOVF);
297 gen_helper_raise_exception(r_const);
298 tcg_temp_free_i32(r_const);
299 gen_set_label(l1);
300 tcg_temp_free(r_temp);
303 static inline void gen_tag_tv(TCGv src1, TCGv src2)
305 int l1;
306 TCGv_i32 r_const;
308 l1 = gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0, src1, src2);
310 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312 r_const = tcg_const_i32(TT_TOVF);
313 gen_helper_raise_exception(r_const);
314 tcg_temp_free_i32(r_const);
315 gen_set_label(l1);
318 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320 tcg_gen_mov_tl(cpu_cc_src, src1);
321 tcg_gen_movi_tl(cpu_cc_src2, src2);
322 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323 tcg_gen_mov_tl(dst, cpu_cc_dst);
326 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328 tcg_gen_mov_tl(cpu_cc_src, src1);
329 tcg_gen_mov_tl(cpu_cc_src2, src2);
330 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331 tcg_gen_mov_tl(dst, cpu_cc_dst);
334 static TCGv_i32 gen_add32_carry32(void)
336 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32 = tcg_temp_new_i32();
341 cc_src2_32 = tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344 #else
345 cc_src1_32 = cpu_cc_dst;
346 cc_src2_32 = cpu_cc_src;
347 #endif
349 carry_32 = tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32);
354 tcg_temp_free_i32(cc_src2_32);
355 #endif
357 return carry_32;
360 static TCGv_i32 gen_sub32_carry32(void)
362 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32 = tcg_temp_new_i32();
367 cc_src2_32 = tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370 #else
371 cc_src1_32 = cpu_cc_src;
372 cc_src2_32 = cpu_cc_src2;
373 #endif
375 carry_32 = tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32);
380 tcg_temp_free_i32(cc_src2_32);
381 #endif
383 return carry_32;
386 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387 TCGv src2, int update_cc)
389 TCGv_i32 carry_32;
390 TCGv carry;
392 switch (dc->cc_op) {
393 case CC_OP_DIV:
394 case CC_OP_LOGIC:
395 /* Carry is known to be zero. Fall back to plain ADD. */
396 if (update_cc) {
397 gen_op_add_cc(dst, src1, src2);
398 } else {
399 tcg_gen_add_tl(dst, src1, src2);
401 return;
403 case CC_OP_ADD:
404 case CC_OP_TADD:
405 case CC_OP_TADDTV:
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low = tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414 cpu_cc_src, src1, cpu_cc_src2, src2);
415 tcg_temp_free(dst_low);
416 goto add_done;
418 #endif
419 carry_32 = gen_add32_carry32();
420 break;
422 case CC_OP_SUB:
423 case CC_OP_TSUB:
424 case CC_OP_TSUBTV:
425 carry_32 = gen_sub32_carry32();
426 break;
428 default:
429 /* We need external help to produce the carry. */
430 carry_32 = tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32);
432 break;
435 #if TARGET_LONG_BITS == 64
436 carry = tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry, carry_32);
438 #else
439 carry = carry_32;
440 #endif
442 tcg_gen_add_tl(dst, src1, src2);
443 tcg_gen_add_tl(dst, dst, carry);
445 tcg_temp_free_i32(carry_32);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry);
448 #endif
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451 add_done:
452 #endif
453 if (update_cc) {
454 tcg_gen_mov_tl(cpu_cc_src, src1);
455 tcg_gen_mov_tl(cpu_cc_src2, src2);
456 tcg_gen_mov_tl(cpu_cc_dst, dst);
457 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458 dc->cc_op = CC_OP_ADDX;
462 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_mov_tl(cpu_cc_src2, src2);
466 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 tcg_gen_mov_tl(dst, cpu_cc_dst);
480 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
482 TCGv r_temp;
483 TCGv_i32 r_const;
484 int l1;
486 l1 = gen_new_label();
488 r_temp = tcg_temp_new();
489 tcg_gen_xor_tl(r_temp, src1, src2);
490 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494 r_const = tcg_const_i32(TT_TOVF);
495 gen_helper_raise_exception(r_const);
496 tcg_temp_free_i32(r_const);
497 gen_set_label(l1);
498 tcg_temp_free(r_temp);
501 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503 tcg_gen_mov_tl(cpu_cc_src, src1);
504 tcg_gen_movi_tl(cpu_cc_src2, src2);
505 if (src2 == 0) {
506 tcg_gen_mov_tl(cpu_cc_dst, src1);
507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508 dc->cc_op = CC_OP_LOGIC;
509 } else {
510 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512 dc->cc_op = CC_OP_SUB;
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
517 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 tcg_gen_mov_tl(dst, cpu_cc_dst);
525 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526 TCGv src2, int update_cc)
528 TCGv_i32 carry_32;
529 TCGv carry;
531 switch (dc->cc_op) {
532 case CC_OP_DIV:
533 case CC_OP_LOGIC:
534 /* Carry is known to be zero. Fall back to plain SUB. */
535 if (update_cc) {
536 gen_op_sub_cc(dst, src1, src2);
537 } else {
538 tcg_gen_sub_tl(dst, src1, src2);
540 return;
542 case CC_OP_ADD:
543 case CC_OP_TADD:
544 case CC_OP_TADDTV:
545 carry_32 = gen_add32_carry32();
546 break;
548 case CC_OP_SUB:
549 case CC_OP_TSUB:
550 case CC_OP_TSUBTV:
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low = tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559 cpu_cc_src, src1, cpu_cc_src2, src2);
560 tcg_temp_free(dst_low);
561 goto sub_done;
563 #endif
564 carry_32 = gen_sub32_carry32();
565 break;
567 default:
568 /* We need external help to produce the carry. */
569 carry_32 = tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32);
571 break;
574 #if TARGET_LONG_BITS == 64
575 carry = tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry, carry_32);
577 #else
578 carry = carry_32;
579 #endif
581 tcg_gen_sub_tl(dst, src1, src2);
582 tcg_gen_sub_tl(dst, dst, carry);
584 tcg_temp_free_i32(carry_32);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry);
587 #endif
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590 sub_done:
591 #endif
592 if (update_cc) {
593 tcg_gen_mov_tl(cpu_cc_src, src1);
594 tcg_gen_mov_tl(cpu_cc_src2, src2);
595 tcg_gen_mov_tl(cpu_cc_dst, dst);
596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597 dc->cc_op = CC_OP_SUBX;
601 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
603 tcg_gen_mov_tl(cpu_cc_src, src1);
604 tcg_gen_mov_tl(cpu_cc_src2, src2);
605 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606 tcg_gen_mov_tl(dst, cpu_cc_dst);
609 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
611 tcg_gen_mov_tl(cpu_cc_src, src1);
612 tcg_gen_mov_tl(cpu_cc_src2, src2);
613 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
619 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
621 TCGv r_temp;
622 int l1;
624 l1 = gen_new_label();
625 r_temp = tcg_temp_new();
627 /* old op:
628 if (!(env->y & 1))
629 T1 = 0;
631 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635 tcg_gen_movi_tl(cpu_cc_src2, 0);
636 gen_set_label(l1);
638 // b2 = T0 & 1;
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641 tcg_gen_shli_tl(r_temp, r_temp, 31);
642 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
647 // b1 = N ^ V;
648 gen_mov_reg_N(cpu_tmp0, cpu_psr);
649 gen_mov_reg_V(r_temp, cpu_psr);
650 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651 tcg_temp_free(r_temp);
653 // T0 = (b1 << 31) | (T0 >> 1);
654 // src1 = T0;
655 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
659 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661 tcg_gen_mov_tl(dst, cpu_cc_dst);
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
666 TCGv_i32 r_src1, r_src2;
667 TCGv_i64 r_temp, r_temp2;
669 r_src1 = tcg_temp_new_i32();
670 r_src2 = tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1, src1);
673 tcg_gen_trunc_tl_i32(r_src2, src2);
675 r_temp = tcg_temp_new_i64();
676 r_temp2 = tcg_temp_new_i64();
678 if (sign_ext) {
679 tcg_gen_ext_i32_i64(r_temp, r_src2);
680 tcg_gen_ext_i32_i64(r_temp2, r_src1);
681 } else {
682 tcg_gen_extu_i32_i64(r_temp, r_src2);
683 tcg_gen_extu_i32_i64(r_temp2, r_src1);
686 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
688 tcg_gen_shri_i64(r_temp, r_temp2, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690 tcg_temp_free_i64(r_temp);
691 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst, r_temp2);
695 tcg_temp_free_i64(r_temp2);
697 tcg_temp_free_i32(r_src1);
698 tcg_temp_free_i32(r_src2);
701 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst, src1, src2, 0);
707 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst, src1, src2, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
716 TCGv_i32 r_const;
717 int l1;
719 l1 = gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721 r_const = tcg_const_i32(TT_DIV_ZERO);
722 gen_helper_raise_exception(r_const);
723 tcg_temp_free_i32(r_const);
724 gen_set_label(l1);
727 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
729 int l1, l2;
730 TCGv r_temp1, r_temp2;
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 r_temp1 = tcg_temp_local_new();
735 r_temp2 = tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1, src1);
737 tcg_gen_mov_tl(r_temp2, src2);
738 gen_trap_ifdivzero_tl(r_temp2);
739 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741 tcg_gen_movi_i64(dst, INT64_MIN);
742 tcg_gen_br(l2);
743 gen_set_label(l1);
744 tcg_gen_div_i64(dst, r_temp1, r_temp2);
745 gen_set_label(l2);
746 tcg_temp_free(r_temp1);
747 tcg_temp_free(r_temp2);
749 #endif
751 // 1
752 static inline void gen_op_eval_ba(TCGv dst)
754 tcg_gen_movi_tl(dst, 1);
757 // Z
758 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
760 gen_mov_reg_Z(dst, src);
763 // Z | (N ^ V)
764 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
766 gen_mov_reg_N(cpu_tmp0, src);
767 gen_mov_reg_V(dst, src);
768 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769 gen_mov_reg_Z(cpu_tmp0, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
773 // N ^ V
774 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
776 gen_mov_reg_V(cpu_tmp0, src);
777 gen_mov_reg_N(dst, src);
778 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
781 // C | Z
782 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
784 gen_mov_reg_Z(cpu_tmp0, src);
785 gen_mov_reg_C(dst, src);
786 tcg_gen_or_tl(dst, dst, cpu_tmp0);
789 // C
790 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
792 gen_mov_reg_C(dst, src);
795 // V
796 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
798 gen_mov_reg_V(dst, src);
801 // 0
802 static inline void gen_op_eval_bn(TCGv dst)
804 tcg_gen_movi_tl(dst, 0);
807 // N
808 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
810 gen_mov_reg_N(dst, src);
813 // !Z
814 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
816 gen_mov_reg_Z(dst, src);
817 tcg_gen_xori_tl(dst, dst, 0x1);
820 // !(Z | (N ^ V))
821 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
823 gen_mov_reg_N(cpu_tmp0, src);
824 gen_mov_reg_V(dst, src);
825 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826 gen_mov_reg_Z(cpu_tmp0, src);
827 tcg_gen_or_tl(dst, dst, cpu_tmp0);
828 tcg_gen_xori_tl(dst, dst, 0x1);
831 // !(N ^ V)
832 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
834 gen_mov_reg_V(cpu_tmp0, src);
835 gen_mov_reg_N(dst, src);
836 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837 tcg_gen_xori_tl(dst, dst, 0x1);
840 // !(C | Z)
841 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
843 gen_mov_reg_Z(cpu_tmp0, src);
844 gen_mov_reg_C(dst, src);
845 tcg_gen_or_tl(dst, dst, cpu_tmp0);
846 tcg_gen_xori_tl(dst, dst, 0x1);
849 // !C
850 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
852 gen_mov_reg_C(dst, src);
853 tcg_gen_xori_tl(dst, dst, 0x1);
856 // !N
857 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
859 gen_mov_reg_N(dst, src);
860 tcg_gen_xori_tl(dst, dst, 0x1);
863 // !V
864 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
866 gen_mov_reg_V(dst, src);
867 tcg_gen_xori_tl(dst, dst, 0x1);
871 FPSR bit field FCC1 | FCC0:
875 3 unordered
877 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878 unsigned int fcc_offset)
880 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881 tcg_gen_andi_tl(reg, reg, 0x1);
884 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885 unsigned int fcc_offset)
887 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888 tcg_gen_andi_tl(reg, reg, 0x1);
891 // !0: FCC0 | FCC1
892 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893 unsigned int fcc_offset)
895 gen_mov_reg_FCC0(dst, src, fcc_offset);
896 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897 tcg_gen_or_tl(dst, dst, cpu_tmp0);
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902 unsigned int fcc_offset)
904 gen_mov_reg_FCC0(dst, src, fcc_offset);
905 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
909 // 1 or 3: FCC0
910 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
916 // 1: FCC0 & !FCC1
917 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
920 gen_mov_reg_FCC0(dst, src, fcc_offset);
921 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923 tcg_gen_and_tl(dst, dst, cpu_tmp0);
926 // 2 or 3: FCC1
927 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928 unsigned int fcc_offset)
930 gen_mov_reg_FCC1(dst, src, fcc_offset);
933 // 2: !FCC0 & FCC1
934 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
937 gen_mov_reg_FCC0(dst, src, fcc_offset);
938 tcg_gen_xori_tl(dst, dst, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940 tcg_gen_and_tl(dst, dst, cpu_tmp0);
943 // 3: FCC0 & FCC1
944 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945 unsigned int fcc_offset)
947 gen_mov_reg_FCC0(dst, src, fcc_offset);
948 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949 tcg_gen_and_tl(dst, dst, cpu_tmp0);
952 // 0: !(FCC0 | FCC1)
953 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
956 gen_mov_reg_FCC0(dst, src, fcc_offset);
957 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958 tcg_gen_or_tl(dst, dst, cpu_tmp0);
959 tcg_gen_xori_tl(dst, dst, 0x1);
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969 tcg_gen_xori_tl(dst, dst, 0x1);
972 // 0 or 2: !FCC0
973 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974 unsigned int fcc_offset)
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 tcg_gen_xori_tl(dst, dst, 0x1);
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987 tcg_gen_and_tl(dst, dst, cpu_tmp0);
988 tcg_gen_xori_tl(dst, dst, 0x1);
991 // 0 or 1: !FCC1
992 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC1(dst, src, fcc_offset);
996 tcg_gen_xori_tl(dst, dst, 0x1);
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001 unsigned int fcc_offset)
1003 gen_mov_reg_FCC0(dst, src, fcc_offset);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012 unsigned int fcc_offset)
1014 gen_mov_reg_FCC0(dst, src, fcc_offset);
1015 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 tcg_gen_xori_tl(dst, dst, 0x1);
1020 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021 target_ulong pc2, TCGv r_cond)
1023 int l1;
1025 l1 = gen_new_label();
1027 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1029 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1031 gen_set_label(l1);
1032 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1035 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036 target_ulong pc2, TCGv r_cond)
1038 int l1;
1040 l1 = gen_new_label();
1042 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1044 gen_goto_tb(dc, 0, pc2, pc1);
1046 gen_set_label(l1);
1047 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1050 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051 TCGv r_cond)
1053 int l1, l2;
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1058 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1060 tcg_gen_movi_tl(cpu_npc, npc1);
1061 tcg_gen_br(l2);
1063 gen_set_label(l1);
1064 tcg_gen_movi_tl(cpu_npc, npc2);
1065 gen_set_label(l2);
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext *dc, TCGv cond)
1072 if (dc->npc == JUMP_PC) {
1073 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074 dc->npc = DYNAMIC_PC;
1078 static inline void save_npc(DisasContext *dc, TCGv cond)
1080 if (dc->npc == JUMP_PC) {
1081 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082 dc->npc = DYNAMIC_PC;
1083 } else if (dc->npc != DYNAMIC_PC) {
1084 tcg_gen_movi_tl(cpu_npc, dc->npc);
1088 static inline void save_state(DisasContext *dc, TCGv cond)
1090 tcg_gen_movi_tl(cpu_pc, dc->pc);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc->cc_op != CC_OP_FLAGS) {
1093 dc->cc_op = CC_OP_FLAGS;
1094 gen_helper_compute_psr();
1096 save_npc(dc, cond);
1099 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1101 if (dc->npc == JUMP_PC) {
1102 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104 dc->pc = DYNAMIC_PC;
1105 } else if (dc->npc == DYNAMIC_PC) {
1106 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107 dc->pc = DYNAMIC_PC;
1108 } else {
1109 dc->pc = dc->npc;
1113 static inline void gen_op_next_insn(void)
1115 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1119 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120 DisasContext *dc)
1122 TCGv_i32 r_src;
1124 #ifdef TARGET_SPARC64
1125 if (cc)
1126 r_src = cpu_xcc;
1127 else
1128 r_src = cpu_psr;
1129 #else
1130 r_src = cpu_psr;
1131 #endif
1132 switch (dc->cc_op) {
1133 case CC_OP_FLAGS:
1134 break;
1135 default:
1136 gen_helper_compute_psr();
1137 dc->cc_op = CC_OP_FLAGS;
1138 break;
1140 switch (cond) {
1141 case 0x0:
1142 gen_op_eval_bn(r_dst);
1143 break;
1144 case 0x1:
1145 gen_op_eval_be(r_dst, r_src);
1146 break;
1147 case 0x2:
1148 gen_op_eval_ble(r_dst, r_src);
1149 break;
1150 case 0x3:
1151 gen_op_eval_bl(r_dst, r_src);
1152 break;
1153 case 0x4:
1154 gen_op_eval_bleu(r_dst, r_src);
1155 break;
1156 case 0x5:
1157 gen_op_eval_bcs(r_dst, r_src);
1158 break;
1159 case 0x6:
1160 gen_op_eval_bneg(r_dst, r_src);
1161 break;
1162 case 0x7:
1163 gen_op_eval_bvs(r_dst, r_src);
1164 break;
1165 case 0x8:
1166 gen_op_eval_ba(r_dst);
1167 break;
1168 case 0x9:
1169 gen_op_eval_bne(r_dst, r_src);
1170 break;
1171 case 0xa:
1172 gen_op_eval_bg(r_dst, r_src);
1173 break;
1174 case 0xb:
1175 gen_op_eval_bge(r_dst, r_src);
1176 break;
1177 case 0xc:
1178 gen_op_eval_bgu(r_dst, r_src);
1179 break;
1180 case 0xd:
1181 gen_op_eval_bcc(r_dst, r_src);
1182 break;
1183 case 0xe:
1184 gen_op_eval_bpos(r_dst, r_src);
1185 break;
1186 case 0xf:
1187 gen_op_eval_bvc(r_dst, r_src);
1188 break;
1192 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1194 unsigned int offset;
1196 switch (cc) {
1197 default:
1198 case 0x0:
1199 offset = 0;
1200 break;
1201 case 0x1:
1202 offset = 32 - 10;
1203 break;
1204 case 0x2:
1205 offset = 34 - 10;
1206 break;
1207 case 0x3:
1208 offset = 36 - 10;
1209 break;
1212 switch (cond) {
1213 case 0x0:
1214 gen_op_eval_bn(r_dst);
1215 break;
1216 case 0x1:
1217 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218 break;
1219 case 0x2:
1220 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221 break;
1222 case 0x3:
1223 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224 break;
1225 case 0x4:
1226 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227 break;
1228 case 0x5:
1229 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230 break;
1231 case 0x6:
1232 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233 break;
1234 case 0x7:
1235 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236 break;
1237 case 0x8:
1238 gen_op_eval_ba(r_dst);
1239 break;
1240 case 0x9:
1241 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242 break;
1243 case 0xa:
1244 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245 break;
1246 case 0xb:
1247 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248 break;
1249 case 0xc:
1250 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251 break;
1252 case 0xd:
1253 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254 break;
1255 case 0xe:
1256 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257 break;
1258 case 0xf:
1259 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260 break;
1264 #ifdef TARGET_SPARC64
1265 // Inverted logic
1266 static const int gen_tcg_cond_reg[8] = {
1268 TCG_COND_NE,
1269 TCG_COND_GT,
1270 TCG_COND_GE,
1272 TCG_COND_EQ,
1273 TCG_COND_LE,
1274 TCG_COND_LT,
1277 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1279 int l1;
1281 l1 = gen_new_label();
1282 tcg_gen_movi_tl(r_dst, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284 tcg_gen_movi_tl(r_dst, 1);
1285 gen_set_label(l1);
1287 #endif
1289 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1290 TCGv r_cond)
1292 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1293 target_ulong target = dc->pc + offset;
1295 if (cond == 0x0) {
1296 /* unconditional not taken */
1297 if (a) {
1298 dc->pc = dc->npc + 4;
1299 dc->npc = dc->pc + 4;
1300 } else {
1301 dc->pc = dc->npc;
1302 dc->npc = dc->pc + 4;
1304 } else if (cond == 0x8) {
1305 /* unconditional taken */
1306 if (a) {
1307 dc->pc = target;
1308 dc->npc = dc->pc + 4;
1309 } else {
1310 dc->pc = dc->npc;
1311 dc->npc = target;
1312 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1314 } else {
1315 flush_cond(dc, r_cond);
1316 gen_cond(r_cond, cc, cond, dc);
1317 if (a) {
1318 gen_branch_a(dc, target, dc->npc, r_cond);
1319 dc->is_br = 1;
1320 } else {
1321 dc->pc = dc->npc;
1322 dc->jump_pc[0] = target;
1323 if (unlikely(dc->npc == DYNAMIC_PC)) {
1324 dc->jump_pc[1] = DYNAMIC_PC;
1325 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1326 } else {
1327 dc->jump_pc[1] = dc->npc + 4;
1328 dc->npc = JUMP_PC;
1334 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1335 TCGv r_cond)
1337 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1338 target_ulong target = dc->pc + offset;
1340 if (cond == 0x0) {
1341 /* unconditional not taken */
1342 if (a) {
1343 dc->pc = dc->npc + 4;
1344 dc->npc = dc->pc + 4;
1345 } else {
1346 dc->pc = dc->npc;
1347 dc->npc = dc->pc + 4;
1349 } else if (cond == 0x8) {
1350 /* unconditional taken */
1351 if (a) {
1352 dc->pc = target;
1353 dc->npc = dc->pc + 4;
1354 } else {
1355 dc->pc = dc->npc;
1356 dc->npc = target;
1357 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1359 } else {
1360 flush_cond(dc, r_cond);
1361 gen_fcond(r_cond, cc, cond);
1362 if (a) {
1363 gen_branch_a(dc, target, dc->npc, r_cond);
1364 dc->is_br = 1;
1365 } else {
1366 dc->pc = dc->npc;
1367 dc->jump_pc[0] = target;
1368 if (unlikely(dc->npc == DYNAMIC_PC)) {
1369 dc->jump_pc[1] = DYNAMIC_PC;
1370 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1371 } else {
1372 dc->jump_pc[1] = dc->npc + 4;
1373 dc->npc = JUMP_PC;
1379 #ifdef TARGET_SPARC64
1380 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1381 TCGv r_cond, TCGv r_reg)
1383 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1386 flush_cond(dc, r_cond);
1387 gen_cond_reg(r_cond, cond, r_reg);
1388 if (a) {
1389 gen_branch_a(dc, target, dc->npc, r_cond);
1390 dc->is_br = 1;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->jump_pc[0] = target;
1394 if (unlikely(dc->npc == DYNAMIC_PC)) {
1395 dc->jump_pc[1] = DYNAMIC_PC;
1396 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1397 } else {
1398 dc->jump_pc[1] = dc->npc + 4;
1399 dc->npc = JUMP_PC;
1404 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1406 switch (fccno) {
1407 case 0:
1408 gen_helper_fcmps(r_rs1, r_rs2);
1409 break;
1410 case 1:
1411 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1412 break;
1413 case 2:
1414 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1415 break;
1416 case 3:
1417 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1418 break;
1422 static inline void gen_op_fcmpd(int fccno)
1424 switch (fccno) {
1425 case 0:
1426 gen_helper_fcmpd();
1427 break;
1428 case 1:
1429 gen_helper_fcmpd_fcc1();
1430 break;
1431 case 2:
1432 gen_helper_fcmpd_fcc2();
1433 break;
1434 case 3:
1435 gen_helper_fcmpd_fcc3();
1436 break;
1440 static inline void gen_op_fcmpq(int fccno)
1442 switch (fccno) {
1443 case 0:
1444 gen_helper_fcmpq();
1445 break;
1446 case 1:
1447 gen_helper_fcmpq_fcc1();
1448 break;
1449 case 2:
1450 gen_helper_fcmpq_fcc2();
1451 break;
1452 case 3:
1453 gen_helper_fcmpq_fcc3();
1454 break;
1458 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1460 switch (fccno) {
1461 case 0:
1462 gen_helper_fcmpes(r_rs1, r_rs2);
1463 break;
1464 case 1:
1465 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1466 break;
1467 case 2:
1468 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1469 break;
1470 case 3:
1471 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1472 break;
1476 static inline void gen_op_fcmped(int fccno)
1478 switch (fccno) {
1479 case 0:
1480 gen_helper_fcmped();
1481 break;
1482 case 1:
1483 gen_helper_fcmped_fcc1();
1484 break;
1485 case 2:
1486 gen_helper_fcmped_fcc2();
1487 break;
1488 case 3:
1489 gen_helper_fcmped_fcc3();
1490 break;
1494 static inline void gen_op_fcmpeq(int fccno)
1496 switch (fccno) {
1497 case 0:
1498 gen_helper_fcmpeq();
1499 break;
1500 case 1:
1501 gen_helper_fcmpeq_fcc1();
1502 break;
1503 case 2:
1504 gen_helper_fcmpeq_fcc2();
1505 break;
1506 case 3:
1507 gen_helper_fcmpeq_fcc3();
1508 break;
1512 #else
1514 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1516 gen_helper_fcmps(r_rs1, r_rs2);
1519 static inline void gen_op_fcmpd(int fccno)
1521 gen_helper_fcmpd();
1524 static inline void gen_op_fcmpq(int fccno)
1526 gen_helper_fcmpq();
1529 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1531 gen_helper_fcmpes(r_rs1, r_rs2);
1534 static inline void gen_op_fcmped(int fccno)
1536 gen_helper_fcmped();
1539 static inline void gen_op_fcmpeq(int fccno)
1541 gen_helper_fcmpeq();
1543 #endif
1545 static inline void gen_op_fpexception_im(int fsr_flags)
1547 TCGv_i32 r_const;
1549 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1550 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1551 r_const = tcg_const_i32(TT_FP_EXCP);
1552 gen_helper_raise_exception(r_const);
1553 tcg_temp_free_i32(r_const);
1556 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1558 #if !defined(CONFIG_USER_ONLY)
1559 if (!dc->fpu_enabled) {
1560 TCGv_i32 r_const;
1562 save_state(dc, r_cond);
1563 r_const = tcg_const_i32(TT_NFPU_INSN);
1564 gen_helper_raise_exception(r_const);
1565 tcg_temp_free_i32(r_const);
1566 dc->is_br = 1;
1567 return 1;
1569 #endif
1570 return 0;
1573 static inline void gen_update_fprs_dirty(int rd)
1575 #if defined(TARGET_SPARC64)
1576 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
1577 #endif
1580 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1582 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1585 static inline void gen_clear_float_exceptions(void)
1587 gen_helper_clear_float_exceptions();
1590 /* asi moves */
1591 #ifdef TARGET_SPARC64
1592 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1594 int asi;
1595 TCGv_i32 r_asi;
1597 if (IS_IMM) {
1598 r_asi = tcg_temp_new_i32();
1599 tcg_gen_mov_i32(r_asi, cpu_asi);
1600 } else {
1601 asi = GET_FIELD(insn, 19, 26);
1602 r_asi = tcg_const_i32(asi);
1604 return r_asi;
1607 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1608 int sign)
1610 TCGv_i32 r_asi, r_size, r_sign;
1612 r_asi = gen_get_asi(insn, addr);
1613 r_size = tcg_const_i32(size);
1614 r_sign = tcg_const_i32(sign);
1615 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1616 tcg_temp_free_i32(r_sign);
1617 tcg_temp_free_i32(r_size);
1618 tcg_temp_free_i32(r_asi);
1621 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1623 TCGv_i32 r_asi, r_size;
1625 r_asi = gen_get_asi(insn, addr);
1626 r_size = tcg_const_i32(size);
1627 gen_helper_st_asi(addr, src, r_asi, r_size);
1628 tcg_temp_free_i32(r_size);
1629 tcg_temp_free_i32(r_asi);
1632 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1634 TCGv_i32 r_asi, r_size, r_rd;
1636 r_asi = gen_get_asi(insn, addr);
1637 r_size = tcg_const_i32(size);
1638 r_rd = tcg_const_i32(rd);
1639 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1640 tcg_temp_free_i32(r_rd);
1641 tcg_temp_free_i32(r_size);
1642 tcg_temp_free_i32(r_asi);
1645 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1647 TCGv_i32 r_asi, r_size, r_rd;
1649 r_asi = gen_get_asi(insn, addr);
1650 r_size = tcg_const_i32(size);
1651 r_rd = tcg_const_i32(rd);
1652 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1653 tcg_temp_free_i32(r_rd);
1654 tcg_temp_free_i32(r_size);
1655 tcg_temp_free_i32(r_asi);
1658 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1660 TCGv_i32 r_asi, r_size, r_sign;
1662 r_asi = gen_get_asi(insn, addr);
1663 r_size = tcg_const_i32(4);
1664 r_sign = tcg_const_i32(0);
1665 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1666 tcg_temp_free_i32(r_sign);
1667 gen_helper_st_asi(addr, dst, r_asi, r_size);
1668 tcg_temp_free_i32(r_size);
1669 tcg_temp_free_i32(r_asi);
1670 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1673 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1675 TCGv_i32 r_asi, r_rd;
1677 r_asi = gen_get_asi(insn, addr);
1678 r_rd = tcg_const_i32(rd);
1679 gen_helper_ldda_asi(addr, r_asi, r_rd);
1680 tcg_temp_free_i32(r_rd);
1681 tcg_temp_free_i32(r_asi);
1684 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1686 TCGv_i32 r_asi, r_size;
1688 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1689 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1690 r_asi = gen_get_asi(insn, addr);
1691 r_size = tcg_const_i32(8);
1692 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1693 tcg_temp_free_i32(r_size);
1694 tcg_temp_free_i32(r_asi);
1697 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1698 int rd)
1700 TCGv r_val1;
1701 TCGv_i32 r_asi;
1703 r_val1 = tcg_temp_new();
1704 gen_movl_reg_TN(rd, r_val1);
1705 r_asi = gen_get_asi(insn, addr);
1706 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1707 tcg_temp_free_i32(r_asi);
1708 tcg_temp_free(r_val1);
1711 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1712 int rd)
1714 TCGv_i32 r_asi;
1716 gen_movl_reg_TN(rd, cpu_tmp64);
1717 r_asi = gen_get_asi(insn, addr);
1718 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1719 tcg_temp_free_i32(r_asi);
1722 #elif !defined(CONFIG_USER_ONLY)
1724 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1725 int sign)
1727 TCGv_i32 r_asi, r_size, r_sign;
1729 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1730 r_size = tcg_const_i32(size);
1731 r_sign = tcg_const_i32(sign);
1732 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1733 tcg_temp_free(r_sign);
1734 tcg_temp_free(r_size);
1735 tcg_temp_free(r_asi);
1736 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1739 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1741 TCGv_i32 r_asi, r_size;
1743 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1744 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1745 r_size = tcg_const_i32(size);
1746 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1747 tcg_temp_free(r_size);
1748 tcg_temp_free(r_asi);
1751 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1753 TCGv_i32 r_asi, r_size, r_sign;
1754 TCGv_i64 r_val;
1756 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1757 r_size = tcg_const_i32(4);
1758 r_sign = tcg_const_i32(0);
1759 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1760 tcg_temp_free(r_sign);
1761 r_val = tcg_temp_new_i64();
1762 tcg_gen_extu_tl_i64(r_val, dst);
1763 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1764 tcg_temp_free_i64(r_val);
1765 tcg_temp_free(r_size);
1766 tcg_temp_free(r_asi);
1767 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1770 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1772 TCGv_i32 r_asi, r_size, r_sign;
1774 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1775 r_size = tcg_const_i32(8);
1776 r_sign = tcg_const_i32(0);
1777 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1778 tcg_temp_free(r_sign);
1779 tcg_temp_free(r_size);
1780 tcg_temp_free(r_asi);
1781 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1782 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1783 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1784 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1785 gen_movl_TN_reg(rd, hi);
1788 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1790 TCGv_i32 r_asi, r_size;
1792 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1793 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1794 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1795 r_size = tcg_const_i32(8);
1796 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1797 tcg_temp_free(r_size);
1798 tcg_temp_free(r_asi);
1800 #endif
1802 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1803 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1805 TCGv_i64 r_val;
1806 TCGv_i32 r_asi, r_size;
1808 gen_ld_asi(dst, addr, insn, 1, 0);
1810 r_val = tcg_const_i64(0xffULL);
1811 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1812 r_size = tcg_const_i32(1);
1813 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1814 tcg_temp_free_i32(r_size);
1815 tcg_temp_free_i32(r_asi);
1816 tcg_temp_free_i64(r_val);
1818 #endif
1820 static inline TCGv get_src1(unsigned int insn, TCGv def)
1822 TCGv r_rs1 = def;
1823 unsigned int rs1;
1825 rs1 = GET_FIELD(insn, 13, 17);
1826 if (rs1 == 0) {
1827 tcg_gen_movi_tl(def, 0);
1828 } else if (rs1 < 8) {
1829 r_rs1 = cpu_gregs[rs1];
1830 } else {
1831 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1833 return r_rs1;
1836 static inline TCGv get_src2(unsigned int insn, TCGv def)
1838 TCGv r_rs2 = def;
1840 if (IS_IMM) { /* immediate */
1841 target_long simm = GET_FIELDs(insn, 19, 31);
1842 tcg_gen_movi_tl(def, simm);
1843 } else { /* register */
1844 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1845 if (rs2 == 0) {
1846 tcg_gen_movi_tl(def, 0);
1847 } else if (rs2 < 8) {
1848 r_rs2 = cpu_gregs[rs2];
1849 } else {
1850 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1853 return r_rs2;
1856 #ifdef TARGET_SPARC64
1857 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1859 TCGv_i32 r_tl = tcg_temp_new_i32();
1861 /* load env->tl into r_tl */
1862 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1864 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1865 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1867 /* calculate offset to current trap state from env->ts, reuse r_tl */
1868 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1869 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1871 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1873 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1874 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1875 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1876 tcg_temp_free_ptr(r_tl_tmp);
1879 tcg_temp_free_i32(r_tl);
1881 #endif
1883 #define CHECK_IU_FEATURE(dc, FEATURE) \
1884 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1885 goto illegal_insn;
1886 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1888 goto nfpu_insn;
1890 /* before an instruction, dc->pc must be static */
1891 static void disas_sparc_insn(DisasContext * dc)
1893 unsigned int insn, opc, rs1, rs2, rd;
1894 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1895 target_long simm;
1897 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1898 tcg_gen_debug_insn_start(dc->pc);
1899 insn = ldl_code(dc->pc);
1900 opc = GET_FIELD(insn, 0, 1);
1902 rd = GET_FIELD(insn, 2, 6);
1904 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1905 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1907 switch (opc) {
1908 case 0: /* branches/sethi */
1910 unsigned int xop = GET_FIELD(insn, 7, 9);
1911 int32_t target;
1912 switch (xop) {
1913 #ifdef TARGET_SPARC64
1914 case 0x1: /* V9 BPcc */
1916 int cc;
1918 target = GET_FIELD_SP(insn, 0, 18);
1919 target = sign_extend(target, 19);
1920 target <<= 2;
1921 cc = GET_FIELD_SP(insn, 20, 21);
1922 if (cc == 0)
1923 do_branch(dc, target, insn, 0, cpu_cond);
1924 else if (cc == 2)
1925 do_branch(dc, target, insn, 1, cpu_cond);
1926 else
1927 goto illegal_insn;
1928 goto jmp_insn;
1930 case 0x3: /* V9 BPr */
1932 target = GET_FIELD_SP(insn, 0, 13) |
1933 (GET_FIELD_SP(insn, 20, 21) << 14);
1934 target = sign_extend(target, 16);
1935 target <<= 2;
1936 cpu_src1 = get_src1(insn, cpu_src1);
1937 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1938 goto jmp_insn;
1940 case 0x5: /* V9 FBPcc */
1942 int cc = GET_FIELD_SP(insn, 20, 21);
1943 if (gen_trap_ifnofpu(dc, cpu_cond))
1944 goto jmp_insn;
1945 target = GET_FIELD_SP(insn, 0, 18);
1946 target = sign_extend(target, 19);
1947 target <<= 2;
1948 do_fbranch(dc, target, insn, cc, cpu_cond);
1949 goto jmp_insn;
1951 #else
1952 case 0x7: /* CBN+x */
1954 goto ncp_insn;
1956 #endif
1957 case 0x2: /* BN+x */
1959 target = GET_FIELD(insn, 10, 31);
1960 target = sign_extend(target, 22);
1961 target <<= 2;
1962 do_branch(dc, target, insn, 0, cpu_cond);
1963 goto jmp_insn;
1965 case 0x6: /* FBN+x */
1967 if (gen_trap_ifnofpu(dc, cpu_cond))
1968 goto jmp_insn;
1969 target = GET_FIELD(insn, 10, 31);
1970 target = sign_extend(target, 22);
1971 target <<= 2;
1972 do_fbranch(dc, target, insn, 0, cpu_cond);
1973 goto jmp_insn;
1975 case 0x4: /* SETHI */
1976 if (rd) { // nop
1977 uint32_t value = GET_FIELD(insn, 10, 31);
1978 TCGv r_const;
1980 r_const = tcg_const_tl(value << 10);
1981 gen_movl_TN_reg(rd, r_const);
1982 tcg_temp_free(r_const);
1984 break;
1985 case 0x0: /* UNIMPL */
1986 default:
1987 goto illegal_insn;
1989 break;
1991 break;
1992 case 1: /*CALL*/
1994 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1995 TCGv r_const;
1997 r_const = tcg_const_tl(dc->pc);
1998 gen_movl_TN_reg(15, r_const);
1999 tcg_temp_free(r_const);
2000 target += dc->pc;
2001 gen_mov_pc_npc(dc, cpu_cond);
2002 dc->npc = target;
2004 goto jmp_insn;
2005 case 2: /* FPU & Logical Operations */
2007 unsigned int xop = GET_FIELD(insn, 7, 12);
2008 if (xop == 0x3a) { /* generate trap */
2009 int cond;
2011 cpu_src1 = get_src1(insn, cpu_src1);
2012 if (IS_IMM) {
2013 rs2 = GET_FIELD(insn, 25, 31);
2014 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2015 } else {
2016 rs2 = GET_FIELD(insn, 27, 31);
2017 if (rs2 != 0) {
2018 gen_movl_reg_TN(rs2, cpu_src2);
2019 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2020 } else
2021 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2024 cond = GET_FIELD(insn, 3, 6);
2025 if (cond == 0x8) { /* Trap Always */
2026 save_state(dc, cpu_cond);
2027 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2028 supervisor(dc))
2029 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2030 else
2031 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2032 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2033 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2035 if (rs2 == 0 &&
2036 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2038 gen_helper_shutdown();
2040 } else {
2041 gen_helper_raise_exception(cpu_tmp32);
2043 } else if (cond != 0) {
2044 TCGv r_cond = tcg_temp_new();
2045 int l1;
2046 #ifdef TARGET_SPARC64
2047 /* V9 icc/xcc */
2048 int cc = GET_FIELD_SP(insn, 11, 12);
2050 save_state(dc, cpu_cond);
2051 if (cc == 0)
2052 gen_cond(r_cond, 0, cond, dc);
2053 else if (cc == 2)
2054 gen_cond(r_cond, 1, cond, dc);
2055 else
2056 goto illegal_insn;
2057 #else
2058 save_state(dc, cpu_cond);
2059 gen_cond(r_cond, 0, cond, dc);
2060 #endif
2061 l1 = gen_new_label();
2062 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2064 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2065 supervisor(dc))
2066 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2067 else
2068 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2069 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2070 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2071 gen_helper_raise_exception(cpu_tmp32);
2073 gen_set_label(l1);
2074 tcg_temp_free(r_cond);
2076 gen_op_next_insn();
2077 tcg_gen_exit_tb(0);
2078 dc->is_br = 1;
2079 goto jmp_insn;
2080 } else if (xop == 0x28) {
2081 rs1 = GET_FIELD(insn, 13, 17);
2082 switch(rs1) {
2083 case 0: /* rdy */
2084 #ifndef TARGET_SPARC64
2085 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2086 manual, rdy on the microSPARC
2087 II */
2088 case 0x0f: /* stbar in the SPARCv8 manual,
2089 rdy on the microSPARC II */
2090 case 0x10 ... 0x1f: /* implementation-dependent in the
2091 SPARCv8 manual, rdy on the
2092 microSPARC II */
2093 /* Read Asr17 */
2094 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2095 TCGv r_const;
2097 /* Read Asr17 for a Leon3 monoprocessor */
2098 r_const = tcg_const_tl((1 << 8)
2099 | (dc->def->nwindows - 1));
2100 gen_movl_TN_reg(rd, r_const);
2101 tcg_temp_free(r_const);
2102 break;
2104 #endif
2105 gen_movl_TN_reg(rd, cpu_y);
2106 break;
2107 #ifdef TARGET_SPARC64
2108 case 0x2: /* V9 rdccr */
2109 gen_helper_compute_psr();
2110 gen_helper_rdccr(cpu_dst);
2111 gen_movl_TN_reg(rd, cpu_dst);
2112 break;
2113 case 0x3: /* V9 rdasi */
2114 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2115 gen_movl_TN_reg(rd, cpu_dst);
2116 break;
2117 case 0x4: /* V9 rdtick */
2119 TCGv_ptr r_tickptr;
2121 r_tickptr = tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2123 offsetof(CPUState, tick));
2124 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2125 tcg_temp_free_ptr(r_tickptr);
2126 gen_movl_TN_reg(rd, cpu_dst);
2128 break;
2129 case 0x5: /* V9 rdpc */
2131 TCGv r_const;
2133 r_const = tcg_const_tl(dc->pc);
2134 gen_movl_TN_reg(rd, r_const);
2135 tcg_temp_free(r_const);
2137 break;
2138 case 0x6: /* V9 rdfprs */
2139 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2140 gen_movl_TN_reg(rd, cpu_dst);
2141 break;
2142 case 0xf: /* V9 membar */
2143 break; /* no effect */
2144 case 0x13: /* Graphics Status */
2145 if (gen_trap_ifnofpu(dc, cpu_cond))
2146 goto jmp_insn;
2147 gen_movl_TN_reg(rd, cpu_gsr);
2148 break;
2149 case 0x16: /* Softint */
2150 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2151 gen_movl_TN_reg(rd, cpu_dst);
2152 break;
2153 case 0x17: /* Tick compare */
2154 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2155 break;
2156 case 0x18: /* System tick */
2158 TCGv_ptr r_tickptr;
2160 r_tickptr = tcg_temp_new_ptr();
2161 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2162 offsetof(CPUState, stick));
2163 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2164 tcg_temp_free_ptr(r_tickptr);
2165 gen_movl_TN_reg(rd, cpu_dst);
2167 break;
2168 case 0x19: /* System tick compare */
2169 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2170 break;
2171 case 0x10: /* Performance Control */
2172 case 0x11: /* Performance Instrumentation Counter */
2173 case 0x12: /* Dispatch Control */
2174 case 0x14: /* Softint set, WO */
2175 case 0x15: /* Softint clear, WO */
2176 #endif
2177 default:
2178 goto illegal_insn;
2180 #if !defined(CONFIG_USER_ONLY)
2181 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2182 #ifndef TARGET_SPARC64
2183 if (!supervisor(dc))
2184 goto priv_insn;
2185 gen_helper_compute_psr();
2186 dc->cc_op = CC_OP_FLAGS;
2187 gen_helper_rdpsr(cpu_dst);
2188 #else
2189 CHECK_IU_FEATURE(dc, HYPV);
2190 if (!hypervisor(dc))
2191 goto priv_insn;
2192 rs1 = GET_FIELD(insn, 13, 17);
2193 switch (rs1) {
2194 case 0: // hpstate
2195 // gen_op_rdhpstate();
2196 break;
2197 case 1: // htstate
2198 // gen_op_rdhtstate();
2199 break;
2200 case 3: // hintp
2201 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2202 break;
2203 case 5: // htba
2204 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2205 break;
2206 case 6: // hver
2207 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2208 break;
2209 case 31: // hstick_cmpr
2210 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2211 break;
2212 default:
2213 goto illegal_insn;
2215 #endif
2216 gen_movl_TN_reg(rd, cpu_dst);
2217 break;
2218 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2219 if (!supervisor(dc))
2220 goto priv_insn;
2221 #ifdef TARGET_SPARC64
2222 rs1 = GET_FIELD(insn, 13, 17);
2223 switch (rs1) {
2224 case 0: // tpc
2226 TCGv_ptr r_tsptr;
2228 r_tsptr = tcg_temp_new_ptr();
2229 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2230 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2231 offsetof(trap_state, tpc));
2232 tcg_temp_free_ptr(r_tsptr);
2234 break;
2235 case 1: // tnpc
2237 TCGv_ptr r_tsptr;
2239 r_tsptr = tcg_temp_new_ptr();
2240 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2241 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2242 offsetof(trap_state, tnpc));
2243 tcg_temp_free_ptr(r_tsptr);
2245 break;
2246 case 2: // tstate
2248 TCGv_ptr r_tsptr;
2250 r_tsptr = tcg_temp_new_ptr();
2251 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2252 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2253 offsetof(trap_state, tstate));
2254 tcg_temp_free_ptr(r_tsptr);
2256 break;
2257 case 3: // tt
2259 TCGv_ptr r_tsptr;
2261 r_tsptr = tcg_temp_new_ptr();
2262 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2263 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2264 offsetof(trap_state, tt));
2265 tcg_temp_free_ptr(r_tsptr);
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2268 break;
2269 case 4: // tick
2271 TCGv_ptr r_tickptr;
2273 r_tickptr = tcg_temp_new_ptr();
2274 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2275 offsetof(CPUState, tick));
2276 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2277 gen_movl_TN_reg(rd, cpu_tmp0);
2278 tcg_temp_free_ptr(r_tickptr);
2280 break;
2281 case 5: // tba
2282 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2283 break;
2284 case 6: // pstate
2285 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286 offsetof(CPUSPARCState, pstate));
2287 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288 break;
2289 case 7: // tl
2290 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291 offsetof(CPUSPARCState, tl));
2292 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293 break;
2294 case 8: // pil
2295 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296 offsetof(CPUSPARCState, psrpil));
2297 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298 break;
2299 case 9: // cwp
2300 gen_helper_rdcwp(cpu_tmp0);
2301 break;
2302 case 10: // cansave
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, cansave));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306 break;
2307 case 11: // canrestore
2308 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2309 offsetof(CPUSPARCState, canrestore));
2310 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2311 break;
2312 case 12: // cleanwin
2313 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2314 offsetof(CPUSPARCState, cleanwin));
2315 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2316 break;
2317 case 13: // otherwin
2318 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2319 offsetof(CPUSPARCState, otherwin));
2320 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2321 break;
2322 case 14: // wstate
2323 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2324 offsetof(CPUSPARCState, wstate));
2325 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2326 break;
2327 case 16: // UA2005 gl
2328 CHECK_IU_FEATURE(dc, GL);
2329 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330 offsetof(CPUSPARCState, gl));
2331 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2332 break;
2333 case 26: // UA2005 strand status
2334 CHECK_IU_FEATURE(dc, HYPV);
2335 if (!hypervisor(dc))
2336 goto priv_insn;
2337 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2338 break;
2339 case 31: // ver
2340 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2341 break;
2342 case 15: // fq
2343 default:
2344 goto illegal_insn;
2346 #else
2347 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2348 #endif
2349 gen_movl_TN_reg(rd, cpu_tmp0);
2350 break;
2351 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2352 #ifdef TARGET_SPARC64
2353 save_state(dc, cpu_cond);
2354 gen_helper_flushw();
2355 #else
2356 if (!supervisor(dc))
2357 goto priv_insn;
2358 gen_movl_TN_reg(rd, cpu_tbr);
2359 #endif
2360 break;
2361 #endif
2362 } else if (xop == 0x34) { /* FPU Operations */
2363 if (gen_trap_ifnofpu(dc, cpu_cond))
2364 goto jmp_insn;
2365 gen_op_clear_ieee_excp_and_FTT();
2366 rs1 = GET_FIELD(insn, 13, 17);
2367 rs2 = GET_FIELD(insn, 27, 31);
2368 xop = GET_FIELD(insn, 18, 26);
2369 save_state(dc, cpu_cond);
2370 switch (xop) {
2371 case 0x1: /* fmovs */
2372 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2373 gen_update_fprs_dirty(rd);
2374 break;
2375 case 0x5: /* fnegs */
2376 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2377 gen_update_fprs_dirty(rd);
2378 break;
2379 case 0x9: /* fabss */
2380 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2381 gen_update_fprs_dirty(rd);
2382 break;
2383 case 0x29: /* fsqrts */
2384 CHECK_FPU_FEATURE(dc, FSQRT);
2385 gen_clear_float_exceptions();
2386 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2387 gen_helper_check_ieee_exceptions();
2388 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2389 gen_update_fprs_dirty(rd);
2390 break;
2391 case 0x2a: /* fsqrtd */
2392 CHECK_FPU_FEATURE(dc, FSQRT);
2393 gen_op_load_fpr_DT1(DFPREG(rs2));
2394 gen_clear_float_exceptions();
2395 gen_helper_fsqrtd();
2396 gen_helper_check_ieee_exceptions();
2397 gen_op_store_DT0_fpr(DFPREG(rd));
2398 gen_update_fprs_dirty(DFPREG(rd));
2399 break;
2400 case 0x2b: /* fsqrtq */
2401 CHECK_FPU_FEATURE(dc, FLOAT128);
2402 gen_op_load_fpr_QT1(QFPREG(rs2));
2403 gen_clear_float_exceptions();
2404 gen_helper_fsqrtq();
2405 gen_helper_check_ieee_exceptions();
2406 gen_op_store_QT0_fpr(QFPREG(rd));
2407 gen_update_fprs_dirty(QFPREG(rd));
2408 break;
2409 case 0x41: /* fadds */
2410 gen_clear_float_exceptions();
2411 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2412 gen_helper_check_ieee_exceptions();
2413 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2414 gen_update_fprs_dirty(rd);
2415 break;
2416 case 0x42: /* faddd */
2417 gen_op_load_fpr_DT0(DFPREG(rs1));
2418 gen_op_load_fpr_DT1(DFPREG(rs2));
2419 gen_clear_float_exceptions();
2420 gen_helper_faddd();
2421 gen_helper_check_ieee_exceptions();
2422 gen_op_store_DT0_fpr(DFPREG(rd));
2423 gen_update_fprs_dirty(DFPREG(rd));
2424 break;
2425 case 0x43: /* faddq */
2426 CHECK_FPU_FEATURE(dc, FLOAT128);
2427 gen_op_load_fpr_QT0(QFPREG(rs1));
2428 gen_op_load_fpr_QT1(QFPREG(rs2));
2429 gen_clear_float_exceptions();
2430 gen_helper_faddq();
2431 gen_helper_check_ieee_exceptions();
2432 gen_op_store_QT0_fpr(QFPREG(rd));
2433 gen_update_fprs_dirty(QFPREG(rd));
2434 break;
2435 case 0x45: /* fsubs */
2436 gen_clear_float_exceptions();
2437 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2438 gen_helper_check_ieee_exceptions();
2439 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2440 gen_update_fprs_dirty(rd);
2441 break;
2442 case 0x46: /* fsubd */
2443 gen_op_load_fpr_DT0(DFPREG(rs1));
2444 gen_op_load_fpr_DT1(DFPREG(rs2));
2445 gen_clear_float_exceptions();
2446 gen_helper_fsubd();
2447 gen_helper_check_ieee_exceptions();
2448 gen_op_store_DT0_fpr(DFPREG(rd));
2449 gen_update_fprs_dirty(DFPREG(rd));
2450 break;
2451 case 0x47: /* fsubq */
2452 CHECK_FPU_FEATURE(dc, FLOAT128);
2453 gen_op_load_fpr_QT0(QFPREG(rs1));
2454 gen_op_load_fpr_QT1(QFPREG(rs2));
2455 gen_clear_float_exceptions();
2456 gen_helper_fsubq();
2457 gen_helper_check_ieee_exceptions();
2458 gen_op_store_QT0_fpr(QFPREG(rd));
2459 gen_update_fprs_dirty(QFPREG(rd));
2460 break;
2461 case 0x49: /* fmuls */
2462 CHECK_FPU_FEATURE(dc, FMUL);
2463 gen_clear_float_exceptions();
2464 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2465 gen_helper_check_ieee_exceptions();
2466 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2467 gen_update_fprs_dirty(rd);
2468 break;
2469 case 0x4a: /* fmuld */
2470 CHECK_FPU_FEATURE(dc, FMUL);
2471 gen_op_load_fpr_DT0(DFPREG(rs1));
2472 gen_op_load_fpr_DT1(DFPREG(rs2));
2473 gen_clear_float_exceptions();
2474 gen_helper_fmuld();
2475 gen_helper_check_ieee_exceptions();
2476 gen_op_store_DT0_fpr(DFPREG(rd));
2477 gen_update_fprs_dirty(DFPREG(rd));
2478 break;
2479 case 0x4b: /* fmulq */
2480 CHECK_FPU_FEATURE(dc, FLOAT128);
2481 CHECK_FPU_FEATURE(dc, FMUL);
2482 gen_op_load_fpr_QT0(QFPREG(rs1));
2483 gen_op_load_fpr_QT1(QFPREG(rs2));
2484 gen_clear_float_exceptions();
2485 gen_helper_fmulq();
2486 gen_helper_check_ieee_exceptions();
2487 gen_op_store_QT0_fpr(QFPREG(rd));
2488 gen_update_fprs_dirty(QFPREG(rd));
2489 break;
2490 case 0x4d: /* fdivs */
2491 gen_clear_float_exceptions();
2492 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2493 gen_helper_check_ieee_exceptions();
2494 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2495 gen_update_fprs_dirty(rd);
2496 break;
2497 case 0x4e: /* fdivd */
2498 gen_op_load_fpr_DT0(DFPREG(rs1));
2499 gen_op_load_fpr_DT1(DFPREG(rs2));
2500 gen_clear_float_exceptions();
2501 gen_helper_fdivd();
2502 gen_helper_check_ieee_exceptions();
2503 gen_op_store_DT0_fpr(DFPREG(rd));
2504 gen_update_fprs_dirty(DFPREG(rd));
2505 break;
2506 case 0x4f: /* fdivq */
2507 CHECK_FPU_FEATURE(dc, FLOAT128);
2508 gen_op_load_fpr_QT0(QFPREG(rs1));
2509 gen_op_load_fpr_QT1(QFPREG(rs2));
2510 gen_clear_float_exceptions();
2511 gen_helper_fdivq();
2512 gen_helper_check_ieee_exceptions();
2513 gen_op_store_QT0_fpr(QFPREG(rd));
2514 gen_update_fprs_dirty(QFPREG(rd));
2515 break;
2516 case 0x69: /* fsmuld */
2517 CHECK_FPU_FEATURE(dc, FSMULD);
2518 gen_clear_float_exceptions();
2519 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2520 gen_helper_check_ieee_exceptions();
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 gen_update_fprs_dirty(DFPREG(rd));
2523 break;
2524 case 0x6e: /* fdmulq */
2525 CHECK_FPU_FEATURE(dc, FLOAT128);
2526 gen_op_load_fpr_DT0(DFPREG(rs1));
2527 gen_op_load_fpr_DT1(DFPREG(rs2));
2528 gen_clear_float_exceptions();
2529 gen_helper_fdmulq();
2530 gen_helper_check_ieee_exceptions();
2531 gen_op_store_QT0_fpr(QFPREG(rd));
2532 gen_update_fprs_dirty(QFPREG(rd));
2533 break;
2534 case 0xc4: /* fitos */
2535 gen_clear_float_exceptions();
2536 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2537 gen_helper_check_ieee_exceptions();
2538 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2539 gen_update_fprs_dirty(rd);
2540 break;
2541 case 0xc6: /* fdtos */
2542 gen_op_load_fpr_DT1(DFPREG(rs2));
2543 gen_clear_float_exceptions();
2544 gen_helper_fdtos(cpu_tmp32);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547 gen_update_fprs_dirty(rd);
2548 break;
2549 case 0xc7: /* fqtos */
2550 CHECK_FPU_FEATURE(dc, FLOAT128);
2551 gen_op_load_fpr_QT1(QFPREG(rs2));
2552 gen_clear_float_exceptions();
2553 gen_helper_fqtos(cpu_tmp32);
2554 gen_helper_check_ieee_exceptions();
2555 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2556 gen_update_fprs_dirty(rd);
2557 break;
2558 case 0xc8: /* fitod */
2559 gen_helper_fitod(cpu_fpr[rs2]);
2560 gen_op_store_DT0_fpr(DFPREG(rd));
2561 gen_update_fprs_dirty(DFPREG(rd));
2562 break;
2563 case 0xc9: /* fstod */
2564 gen_helper_fstod(cpu_fpr[rs2]);
2565 gen_op_store_DT0_fpr(DFPREG(rd));
2566 gen_update_fprs_dirty(DFPREG(rd));
2567 break;
2568 case 0xcb: /* fqtod */
2569 CHECK_FPU_FEATURE(dc, FLOAT128);
2570 gen_op_load_fpr_QT1(QFPREG(rs2));
2571 gen_clear_float_exceptions();
2572 gen_helper_fqtod();
2573 gen_helper_check_ieee_exceptions();
2574 gen_op_store_DT0_fpr(DFPREG(rd));
2575 gen_update_fprs_dirty(DFPREG(rd));
2576 break;
2577 case 0xcc: /* fitoq */
2578 CHECK_FPU_FEATURE(dc, FLOAT128);
2579 gen_helper_fitoq(cpu_fpr[rs2]);
2580 gen_op_store_QT0_fpr(QFPREG(rd));
2581 gen_update_fprs_dirty(QFPREG(rd));
2582 break;
2583 case 0xcd: /* fstoq */
2584 CHECK_FPU_FEATURE(dc, FLOAT128);
2585 gen_helper_fstoq(cpu_fpr[rs2]);
2586 gen_op_store_QT0_fpr(QFPREG(rd));
2587 gen_update_fprs_dirty(QFPREG(rd));
2588 break;
2589 case 0xce: /* fdtoq */
2590 CHECK_FPU_FEATURE(dc, FLOAT128);
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2592 gen_helper_fdtoq();
2593 gen_op_store_QT0_fpr(QFPREG(rd));
2594 gen_update_fprs_dirty(QFPREG(rd));
2595 break;
2596 case 0xd1: /* fstoi */
2597 gen_clear_float_exceptions();
2598 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2599 gen_helper_check_ieee_exceptions();
2600 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2601 gen_update_fprs_dirty(rd);
2602 break;
2603 case 0xd2: /* fdtoi */
2604 gen_op_load_fpr_DT1(DFPREG(rs2));
2605 gen_clear_float_exceptions();
2606 gen_helper_fdtoi(cpu_tmp32);
2607 gen_helper_check_ieee_exceptions();
2608 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2609 gen_update_fprs_dirty(rd);
2610 break;
2611 case 0xd3: /* fqtoi */
2612 CHECK_FPU_FEATURE(dc, FLOAT128);
2613 gen_op_load_fpr_QT1(QFPREG(rs2));
2614 gen_clear_float_exceptions();
2615 gen_helper_fqtoi(cpu_tmp32);
2616 gen_helper_check_ieee_exceptions();
2617 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2618 gen_update_fprs_dirty(rd);
2619 break;
2620 #ifdef TARGET_SPARC64
2621 case 0x2: /* V9 fmovd */
2622 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2623 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2624 cpu_fpr[DFPREG(rs2) + 1]);
2625 gen_update_fprs_dirty(DFPREG(rd));
2626 break;
2627 case 0x3: /* V9 fmovq */
2628 CHECK_FPU_FEATURE(dc, FLOAT128);
2629 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2630 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2631 cpu_fpr[QFPREG(rs2) + 1]);
2632 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2633 cpu_fpr[QFPREG(rs2) + 2]);
2634 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2635 cpu_fpr[QFPREG(rs2) + 3]);
2636 gen_update_fprs_dirty(QFPREG(rd));
2637 break;
2638 case 0x6: /* V9 fnegd */
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_helper_fnegd();
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2642 gen_update_fprs_dirty(DFPREG(rd));
2643 break;
2644 case 0x7: /* V9 fnegq */
2645 CHECK_FPU_FEATURE(dc, FLOAT128);
2646 gen_op_load_fpr_QT1(QFPREG(rs2));
2647 gen_helper_fnegq();
2648 gen_op_store_QT0_fpr(QFPREG(rd));
2649 gen_update_fprs_dirty(QFPREG(rd));
2650 break;
2651 case 0xa: /* V9 fabsd */
2652 gen_op_load_fpr_DT1(DFPREG(rs2));
2653 gen_helper_fabsd();
2654 gen_op_store_DT0_fpr(DFPREG(rd));
2655 gen_update_fprs_dirty(DFPREG(rd));
2656 break;
2657 case 0xb: /* V9 fabsq */
2658 CHECK_FPU_FEATURE(dc, FLOAT128);
2659 gen_op_load_fpr_QT1(QFPREG(rs2));
2660 gen_helper_fabsq();
2661 gen_op_store_QT0_fpr(QFPREG(rd));
2662 gen_update_fprs_dirty(QFPREG(rd));
2663 break;
2664 case 0x81: /* V9 fstox */
2665 gen_clear_float_exceptions();
2666 gen_helper_fstox(cpu_fpr[rs2]);
2667 gen_helper_check_ieee_exceptions();
2668 gen_op_store_DT0_fpr(DFPREG(rd));
2669 gen_update_fprs_dirty(DFPREG(rd));
2670 break;
2671 case 0x82: /* V9 fdtox */
2672 gen_op_load_fpr_DT1(DFPREG(rs2));
2673 gen_clear_float_exceptions();
2674 gen_helper_fdtox();
2675 gen_helper_check_ieee_exceptions();
2676 gen_op_store_DT0_fpr(DFPREG(rd));
2677 gen_update_fprs_dirty(DFPREG(rd));
2678 break;
2679 case 0x83: /* V9 fqtox */
2680 CHECK_FPU_FEATURE(dc, FLOAT128);
2681 gen_op_load_fpr_QT1(QFPREG(rs2));
2682 gen_clear_float_exceptions();
2683 gen_helper_fqtox();
2684 gen_helper_check_ieee_exceptions();
2685 gen_op_store_DT0_fpr(DFPREG(rd));
2686 gen_update_fprs_dirty(DFPREG(rd));
2687 break;
2688 case 0x84: /* V9 fxtos */
2689 gen_op_load_fpr_DT1(DFPREG(rs2));
2690 gen_clear_float_exceptions();
2691 gen_helper_fxtos(cpu_tmp32);
2692 gen_helper_check_ieee_exceptions();
2693 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2694 gen_update_fprs_dirty(rd);
2695 break;
2696 case 0x88: /* V9 fxtod */
2697 gen_op_load_fpr_DT1(DFPREG(rs2));
2698 gen_clear_float_exceptions();
2699 gen_helper_fxtod();
2700 gen_helper_check_ieee_exceptions();
2701 gen_op_store_DT0_fpr(DFPREG(rd));
2702 gen_update_fprs_dirty(DFPREG(rd));
2703 break;
2704 case 0x8c: /* V9 fxtoq */
2705 CHECK_FPU_FEATURE(dc, FLOAT128);
2706 gen_op_load_fpr_DT1(DFPREG(rs2));
2707 gen_clear_float_exceptions();
2708 gen_helper_fxtoq();
2709 gen_helper_check_ieee_exceptions();
2710 gen_op_store_QT0_fpr(QFPREG(rd));
2711 gen_update_fprs_dirty(QFPREG(rd));
2712 break;
2713 #endif
2714 default:
2715 goto illegal_insn;
2717 } else if (xop == 0x35) { /* FPU Operations */
2718 #ifdef TARGET_SPARC64
2719 int cond;
2720 #endif
2721 if (gen_trap_ifnofpu(dc, cpu_cond))
2722 goto jmp_insn;
2723 gen_op_clear_ieee_excp_and_FTT();
2724 rs1 = GET_FIELD(insn, 13, 17);
2725 rs2 = GET_FIELD(insn, 27, 31);
2726 xop = GET_FIELD(insn, 18, 26);
2727 save_state(dc, cpu_cond);
2728 #ifdef TARGET_SPARC64
2729 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2730 int l1;
2732 l1 = gen_new_label();
2733 cond = GET_FIELD_SP(insn, 14, 17);
2734 cpu_src1 = get_src1(insn, cpu_src1);
2735 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2736 0, l1);
2737 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2738 gen_update_fprs_dirty(rd);
2739 gen_set_label(l1);
2740 break;
2741 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2742 int l1;
2744 l1 = gen_new_label();
2745 cond = GET_FIELD_SP(insn, 14, 17);
2746 cpu_src1 = get_src1(insn, cpu_src1);
2747 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2748 0, l1);
2749 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2750 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2751 gen_update_fprs_dirty(DFPREG(rd));
2752 gen_set_label(l1);
2753 break;
2754 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2755 int l1;
2757 CHECK_FPU_FEATURE(dc, FLOAT128);
2758 l1 = gen_new_label();
2759 cond = GET_FIELD_SP(insn, 14, 17);
2760 cpu_src1 = get_src1(insn, cpu_src1);
2761 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2762 0, l1);
2763 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2764 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2766 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2767 gen_update_fprs_dirty(QFPREG(rd));
2768 gen_set_label(l1);
2769 break;
2771 #endif
2772 switch (xop) {
2773 #ifdef TARGET_SPARC64
2774 #define FMOVSCC(fcc) \
2776 TCGv r_cond; \
2777 int l1; \
2779 l1 = gen_new_label(); \
2780 r_cond = tcg_temp_new(); \
2781 cond = GET_FIELD_SP(insn, 14, 17); \
2782 gen_fcond(r_cond, fcc, cond); \
2783 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2784 0, l1); \
2785 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2786 gen_update_fprs_dirty(rd); \
2787 gen_set_label(l1); \
2788 tcg_temp_free(r_cond); \
2790 #define FMOVDCC(fcc) \
2792 TCGv r_cond; \
2793 int l1; \
2795 l1 = gen_new_label(); \
2796 r_cond = tcg_temp_new(); \
2797 cond = GET_FIELD_SP(insn, 14, 17); \
2798 gen_fcond(r_cond, fcc, cond); \
2799 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2800 0, l1); \
2801 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2802 cpu_fpr[DFPREG(rs2)]); \
2803 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2804 cpu_fpr[DFPREG(rs2) + 1]); \
2805 gen_update_fprs_dirty(DFPREG(rd)); \
2806 gen_set_label(l1); \
2807 tcg_temp_free(r_cond); \
2809 #define FMOVQCC(fcc) \
2811 TCGv r_cond; \
2812 int l1; \
2814 l1 = gen_new_label(); \
2815 r_cond = tcg_temp_new(); \
2816 cond = GET_FIELD_SP(insn, 14, 17); \
2817 gen_fcond(r_cond, fcc, cond); \
2818 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2819 0, l1); \
2820 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2821 cpu_fpr[QFPREG(rs2)]); \
2822 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2823 cpu_fpr[QFPREG(rs2) + 1]); \
2824 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2825 cpu_fpr[QFPREG(rs2) + 2]); \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2827 cpu_fpr[QFPREG(rs2) + 3]); \
2828 gen_update_fprs_dirty(QFPREG(rd)); \
2829 gen_set_label(l1); \
2830 tcg_temp_free(r_cond); \
2832 case 0x001: /* V9 fmovscc %fcc0 */
2833 FMOVSCC(0);
2834 break;
2835 case 0x002: /* V9 fmovdcc %fcc0 */
2836 FMOVDCC(0);
2837 break;
2838 case 0x003: /* V9 fmovqcc %fcc0 */
2839 CHECK_FPU_FEATURE(dc, FLOAT128);
2840 FMOVQCC(0);
2841 break;
2842 case 0x041: /* V9 fmovscc %fcc1 */
2843 FMOVSCC(1);
2844 break;
2845 case 0x042: /* V9 fmovdcc %fcc1 */
2846 FMOVDCC(1);
2847 break;
2848 case 0x043: /* V9 fmovqcc %fcc1 */
2849 CHECK_FPU_FEATURE(dc, FLOAT128);
2850 FMOVQCC(1);
2851 break;
2852 case 0x081: /* V9 fmovscc %fcc2 */
2853 FMOVSCC(2);
2854 break;
2855 case 0x082: /* V9 fmovdcc %fcc2 */
2856 FMOVDCC(2);
2857 break;
2858 case 0x083: /* V9 fmovqcc %fcc2 */
2859 CHECK_FPU_FEATURE(dc, FLOAT128);
2860 FMOVQCC(2);
2861 break;
2862 case 0x0c1: /* V9 fmovscc %fcc3 */
2863 FMOVSCC(3);
2864 break;
2865 case 0x0c2: /* V9 fmovdcc %fcc3 */
2866 FMOVDCC(3);
2867 break;
2868 case 0x0c3: /* V9 fmovqcc %fcc3 */
2869 CHECK_FPU_FEATURE(dc, FLOAT128);
2870 FMOVQCC(3);
2871 break;
2872 #undef FMOVSCC
2873 #undef FMOVDCC
2874 #undef FMOVQCC
2875 #define FMOVSCC(icc) \
2877 TCGv r_cond; \
2878 int l1; \
2880 l1 = gen_new_label(); \
2881 r_cond = tcg_temp_new(); \
2882 cond = GET_FIELD_SP(insn, 14, 17); \
2883 gen_cond(r_cond, icc, cond, dc); \
2884 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2885 0, l1); \
2886 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2887 gen_update_fprs_dirty(rd); \
2888 gen_set_label(l1); \
2889 tcg_temp_free(r_cond); \
2891 #define FMOVDCC(icc) \
2893 TCGv r_cond; \
2894 int l1; \
2896 l1 = gen_new_label(); \
2897 r_cond = tcg_temp_new(); \
2898 cond = GET_FIELD_SP(insn, 14, 17); \
2899 gen_cond(r_cond, icc, cond, dc); \
2900 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2901 0, l1); \
2902 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2903 cpu_fpr[DFPREG(rs2)]); \
2904 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2905 cpu_fpr[DFPREG(rs2) + 1]); \
2906 gen_update_fprs_dirty(DFPREG(rd)); \
2907 gen_set_label(l1); \
2908 tcg_temp_free(r_cond); \
2910 #define FMOVQCC(icc) \
2912 TCGv r_cond; \
2913 int l1; \
2915 l1 = gen_new_label(); \
2916 r_cond = tcg_temp_new(); \
2917 cond = GET_FIELD_SP(insn, 14, 17); \
2918 gen_cond(r_cond, icc, cond, dc); \
2919 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2920 0, l1); \
2921 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2922 cpu_fpr[QFPREG(rs2)]); \
2923 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2924 cpu_fpr[QFPREG(rs2) + 1]); \
2925 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2926 cpu_fpr[QFPREG(rs2) + 2]); \
2927 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2928 cpu_fpr[QFPREG(rs2) + 3]); \
2929 gen_update_fprs_dirty(QFPREG(rd)); \
2930 gen_set_label(l1); \
2931 tcg_temp_free(r_cond); \
2934 case 0x101: /* V9 fmovscc %icc */
2935 FMOVSCC(0);
2936 break;
2937 case 0x102: /* V9 fmovdcc %icc */
2938 FMOVDCC(0);
2939 break;
2940 case 0x103: /* V9 fmovqcc %icc */
2941 CHECK_FPU_FEATURE(dc, FLOAT128);
2942 FMOVQCC(0);
2943 break;
2944 case 0x181: /* V9 fmovscc %xcc */
2945 FMOVSCC(1);
2946 break;
2947 case 0x182: /* V9 fmovdcc %xcc */
2948 FMOVDCC(1);
2949 break;
2950 case 0x183: /* V9 fmovqcc %xcc */
2951 CHECK_FPU_FEATURE(dc, FLOAT128);
2952 FMOVQCC(1);
2953 break;
2954 #undef FMOVSCC
2955 #undef FMOVDCC
2956 #undef FMOVQCC
2957 #endif
2958 case 0x51: /* fcmps, V9 %fcc */
2959 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2960 break;
2961 case 0x52: /* fcmpd, V9 %fcc */
2962 gen_op_load_fpr_DT0(DFPREG(rs1));
2963 gen_op_load_fpr_DT1(DFPREG(rs2));
2964 gen_op_fcmpd(rd & 3);
2965 break;
2966 case 0x53: /* fcmpq, V9 %fcc */
2967 CHECK_FPU_FEATURE(dc, FLOAT128);
2968 gen_op_load_fpr_QT0(QFPREG(rs1));
2969 gen_op_load_fpr_QT1(QFPREG(rs2));
2970 gen_op_fcmpq(rd & 3);
2971 break;
2972 case 0x55: /* fcmpes, V9 %fcc */
2973 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2974 break;
2975 case 0x56: /* fcmped, V9 %fcc */
2976 gen_op_load_fpr_DT0(DFPREG(rs1));
2977 gen_op_load_fpr_DT1(DFPREG(rs2));
2978 gen_op_fcmped(rd & 3);
2979 break;
2980 case 0x57: /* fcmpeq, V9 %fcc */
2981 CHECK_FPU_FEATURE(dc, FLOAT128);
2982 gen_op_load_fpr_QT0(QFPREG(rs1));
2983 gen_op_load_fpr_QT1(QFPREG(rs2));
2984 gen_op_fcmpeq(rd & 3);
2985 break;
2986 default:
2987 goto illegal_insn;
2989 } else if (xop == 0x2) {
2990 // clr/mov shortcut
2992 rs1 = GET_FIELD(insn, 13, 17);
2993 if (rs1 == 0) {
2994 // or %g0, x, y -> mov T0, x; mov y, T0
2995 if (IS_IMM) { /* immediate */
2996 TCGv r_const;
2998 simm = GET_FIELDs(insn, 19, 31);
2999 r_const = tcg_const_tl(simm);
3000 gen_movl_TN_reg(rd, r_const);
3001 tcg_temp_free(r_const);
3002 } else { /* register */
3003 rs2 = GET_FIELD(insn, 27, 31);
3004 gen_movl_reg_TN(rs2, cpu_dst);
3005 gen_movl_TN_reg(rd, cpu_dst);
3007 } else {
3008 cpu_src1 = get_src1(insn, cpu_src1);
3009 if (IS_IMM) { /* immediate */
3010 simm = GET_FIELDs(insn, 19, 31);
3011 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3012 gen_movl_TN_reg(rd, cpu_dst);
3013 } else { /* register */
3014 // or x, %g0, y -> mov T1, x; mov y, T1
3015 rs2 = GET_FIELD(insn, 27, 31);
3016 if (rs2 != 0) {
3017 gen_movl_reg_TN(rs2, cpu_src2);
3018 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3019 gen_movl_TN_reg(rd, cpu_dst);
3020 } else
3021 gen_movl_TN_reg(rd, cpu_src1);
3024 #ifdef TARGET_SPARC64
3025 } else if (xop == 0x25) { /* sll, V9 sllx */
3026 cpu_src1 = get_src1(insn, cpu_src1);
3027 if (IS_IMM) { /* immediate */
3028 simm = GET_FIELDs(insn, 20, 31);
3029 if (insn & (1 << 12)) {
3030 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3031 } else {
3032 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3034 } else { /* register */
3035 rs2 = GET_FIELD(insn, 27, 31);
3036 gen_movl_reg_TN(rs2, cpu_src2);
3037 if (insn & (1 << 12)) {
3038 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3039 } else {
3040 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3042 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3044 gen_movl_TN_reg(rd, cpu_dst);
3045 } else if (xop == 0x26) { /* srl, V9 srlx */
3046 cpu_src1 = get_src1(insn, cpu_src1);
3047 if (IS_IMM) { /* immediate */
3048 simm = GET_FIELDs(insn, 20, 31);
3049 if (insn & (1 << 12)) {
3050 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3051 } else {
3052 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3053 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3055 } else { /* register */
3056 rs2 = GET_FIELD(insn, 27, 31);
3057 gen_movl_reg_TN(rs2, cpu_src2);
3058 if (insn & (1 << 12)) {
3059 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3060 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3061 } else {
3062 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3063 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3067 gen_movl_TN_reg(rd, cpu_dst);
3068 } else if (xop == 0x27) { /* sra, V9 srax */
3069 cpu_src1 = get_src1(insn, cpu_src1);
3070 if (IS_IMM) { /* immediate */
3071 simm = GET_FIELDs(insn, 20, 31);
3072 if (insn & (1 << 12)) {
3073 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3074 } else {
3075 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3079 } else { /* register */
3080 rs2 = GET_FIELD(insn, 27, 31);
3081 gen_movl_reg_TN(rs2, cpu_src2);
3082 if (insn & (1 << 12)) {
3083 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3084 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3085 } else {
3086 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3087 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3088 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3089 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3092 gen_movl_TN_reg(rd, cpu_dst);
3093 #endif
3094 } else if (xop < 0x36) {
3095 if (xop < 0x20) {
3096 cpu_src1 = get_src1(insn, cpu_src1);
3097 cpu_src2 = get_src2(insn, cpu_src2);
3098 switch (xop & ~0x10) {
3099 case 0x0: /* add */
3100 if (IS_IMM) {
3101 simm = GET_FIELDs(insn, 19, 31);
3102 if (xop & 0x10) {
3103 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3104 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3105 dc->cc_op = CC_OP_ADD;
3106 } else {
3107 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3109 } else {
3110 if (xop & 0x10) {
3111 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3113 dc->cc_op = CC_OP_ADD;
3114 } else {
3115 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3118 break;
3119 case 0x1: /* and */
3120 if (IS_IMM) {
3121 simm = GET_FIELDs(insn, 19, 31);
3122 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3123 } else {
3124 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3126 if (xop & 0x10) {
3127 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3128 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3129 dc->cc_op = CC_OP_LOGIC;
3131 break;
3132 case 0x2: /* or */
3133 if (IS_IMM) {
3134 simm = GET_FIELDs(insn, 19, 31);
3135 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3136 } else {
3137 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3139 if (xop & 0x10) {
3140 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3141 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3142 dc->cc_op = CC_OP_LOGIC;
3144 break;
3145 case 0x3: /* xor */
3146 if (IS_IMM) {
3147 simm = GET_FIELDs(insn, 19, 31);
3148 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3149 } else {
3150 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3152 if (xop & 0x10) {
3153 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3154 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3155 dc->cc_op = CC_OP_LOGIC;
3157 break;
3158 case 0x4: /* sub */
3159 if (IS_IMM) {
3160 simm = GET_FIELDs(insn, 19, 31);
3161 if (xop & 0x10) {
3162 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3163 } else {
3164 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3166 } else {
3167 if (xop & 0x10) {
3168 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3169 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3170 dc->cc_op = CC_OP_SUB;
3171 } else {
3172 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3175 break;
3176 case 0x5: /* andn */
3177 if (IS_IMM) {
3178 simm = GET_FIELDs(insn, 19, 31);
3179 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3180 } else {
3181 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3183 if (xop & 0x10) {
3184 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3185 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3186 dc->cc_op = CC_OP_LOGIC;
3188 break;
3189 case 0x6: /* orn */
3190 if (IS_IMM) {
3191 simm = GET_FIELDs(insn, 19, 31);
3192 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3193 } else {
3194 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3196 if (xop & 0x10) {
3197 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3198 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3199 dc->cc_op = CC_OP_LOGIC;
3201 break;
3202 case 0x7: /* xorn */
3203 if (IS_IMM) {
3204 simm = GET_FIELDs(insn, 19, 31);
3205 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3206 } else {
3207 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3208 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3210 if (xop & 0x10) {
3211 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3212 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3213 dc->cc_op = CC_OP_LOGIC;
3215 break;
3216 case 0x8: /* addx, V9 addc */
3217 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3218 (xop & 0x10));
3219 break;
3220 #ifdef TARGET_SPARC64
3221 case 0x9: /* V9 mulx */
3222 if (IS_IMM) {
3223 simm = GET_FIELDs(insn, 19, 31);
3224 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3225 } else {
3226 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3228 break;
3229 #endif
3230 case 0xa: /* umul */
3231 CHECK_IU_FEATURE(dc, MUL);
3232 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3233 if (xop & 0x10) {
3234 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3235 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3236 dc->cc_op = CC_OP_LOGIC;
3238 break;
3239 case 0xb: /* smul */
3240 CHECK_IU_FEATURE(dc, MUL);
3241 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3242 if (xop & 0x10) {
3243 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3244 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3245 dc->cc_op = CC_OP_LOGIC;
3247 break;
3248 case 0xc: /* subx, V9 subc */
3249 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3250 (xop & 0x10));
3251 break;
3252 #ifdef TARGET_SPARC64
3253 case 0xd: /* V9 udivx */
3255 TCGv r_temp1, r_temp2;
3256 r_temp1 = tcg_temp_local_new();
3257 r_temp2 = tcg_temp_local_new();
3258 tcg_gen_mov_tl(r_temp1, cpu_src1);
3259 tcg_gen_mov_tl(r_temp2, cpu_src2);
3260 gen_trap_ifdivzero_tl(r_temp2);
3261 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3262 tcg_temp_free(r_temp1);
3263 tcg_temp_free(r_temp2);
3265 break;
3266 #endif
3267 case 0xe: /* udiv */
3268 CHECK_IU_FEATURE(dc, DIV);
3269 if (xop & 0x10) {
3270 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3271 dc->cc_op = CC_OP_DIV;
3272 } else {
3273 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3275 break;
3276 case 0xf: /* sdiv */
3277 CHECK_IU_FEATURE(dc, DIV);
3278 if (xop & 0x10) {
3279 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3280 dc->cc_op = CC_OP_DIV;
3281 } else {
3282 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3284 break;
3285 default:
3286 goto illegal_insn;
3288 gen_movl_TN_reg(rd, cpu_dst);
3289 } else {
3290 cpu_src1 = get_src1(insn, cpu_src1);
3291 cpu_src2 = get_src2(insn, cpu_src2);
3292 switch (xop) {
3293 case 0x20: /* taddcc */
3294 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3295 gen_movl_TN_reg(rd, cpu_dst);
3296 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3297 dc->cc_op = CC_OP_TADD;
3298 break;
3299 case 0x21: /* tsubcc */
3300 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3301 gen_movl_TN_reg(rd, cpu_dst);
3302 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3303 dc->cc_op = CC_OP_TSUB;
3304 break;
3305 case 0x22: /* taddcctv */
3306 save_state(dc, cpu_cond);
3307 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3308 gen_movl_TN_reg(rd, cpu_dst);
3309 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3310 dc->cc_op = CC_OP_TADDTV;
3311 break;
3312 case 0x23: /* tsubcctv */
3313 save_state(dc, cpu_cond);
3314 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3315 gen_movl_TN_reg(rd, cpu_dst);
3316 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3317 dc->cc_op = CC_OP_TSUBTV;
3318 break;
3319 case 0x24: /* mulscc */
3320 gen_helper_compute_psr();
3321 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3322 gen_movl_TN_reg(rd, cpu_dst);
3323 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3324 dc->cc_op = CC_OP_ADD;
3325 break;
3326 #ifndef TARGET_SPARC64
3327 case 0x25: /* sll */
3328 if (IS_IMM) { /* immediate */
3329 simm = GET_FIELDs(insn, 20, 31);
3330 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3331 } else { /* register */
3332 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3333 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3335 gen_movl_TN_reg(rd, cpu_dst);
3336 break;
3337 case 0x26: /* srl */
3338 if (IS_IMM) { /* immediate */
3339 simm = GET_FIELDs(insn, 20, 31);
3340 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3341 } else { /* register */
3342 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3343 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3345 gen_movl_TN_reg(rd, cpu_dst);
3346 break;
3347 case 0x27: /* sra */
3348 if (IS_IMM) { /* immediate */
3349 simm = GET_FIELDs(insn, 20, 31);
3350 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3351 } else { /* register */
3352 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3353 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3355 gen_movl_TN_reg(rd, cpu_dst);
3356 break;
3357 #endif
3358 case 0x30:
3360 switch(rd) {
3361 case 0: /* wry */
3362 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3363 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3364 break;
3365 #ifndef TARGET_SPARC64
3366 case 0x01 ... 0x0f: /* undefined in the
3367 SPARCv8 manual, nop
3368 on the microSPARC
3369 II */
3370 case 0x10 ... 0x1f: /* implementation-dependent
3371 in the SPARCv8
3372 manual, nop on the
3373 microSPARC II */
3374 break;
3375 #else
3376 case 0x2: /* V9 wrccr */
3377 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3378 gen_helper_wrccr(cpu_dst);
3379 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3380 dc->cc_op = CC_OP_FLAGS;
3381 break;
3382 case 0x3: /* V9 wrasi */
3383 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3384 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3385 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3386 break;
3387 case 0x6: /* V9 wrfprs */
3388 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3389 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3390 save_state(dc, cpu_cond);
3391 gen_op_next_insn();
3392 tcg_gen_exit_tb(0);
3393 dc->is_br = 1;
3394 break;
3395 case 0xf: /* V9 sir, nop if user */
3396 #if !defined(CONFIG_USER_ONLY)
3397 if (supervisor(dc)) {
3398 ; // XXX
3400 #endif
3401 break;
3402 case 0x13: /* Graphics Status */
3403 if (gen_trap_ifnofpu(dc, cpu_cond))
3404 goto jmp_insn;
3405 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3406 break;
3407 case 0x14: /* Softint set */
3408 if (!supervisor(dc))
3409 goto illegal_insn;
3410 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3411 gen_helper_set_softint(cpu_tmp64);
3412 break;
3413 case 0x15: /* Softint clear */
3414 if (!supervisor(dc))
3415 goto illegal_insn;
3416 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3417 gen_helper_clear_softint(cpu_tmp64);
3418 break;
3419 case 0x16: /* Softint write */
3420 if (!supervisor(dc))
3421 goto illegal_insn;
3422 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3423 gen_helper_write_softint(cpu_tmp64);
3424 break;
3425 case 0x17: /* Tick compare */
3426 #if !defined(CONFIG_USER_ONLY)
3427 if (!supervisor(dc))
3428 goto illegal_insn;
3429 #endif
3431 TCGv_ptr r_tickptr;
3433 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3434 cpu_src2);
3435 r_tickptr = tcg_temp_new_ptr();
3436 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3437 offsetof(CPUState, tick));
3438 gen_helper_tick_set_limit(r_tickptr,
3439 cpu_tick_cmpr);
3440 tcg_temp_free_ptr(r_tickptr);
3442 break;
3443 case 0x18: /* System tick */
3444 #if !defined(CONFIG_USER_ONLY)
3445 if (!supervisor(dc))
3446 goto illegal_insn;
3447 #endif
3449 TCGv_ptr r_tickptr;
3451 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3452 cpu_src2);
3453 r_tickptr = tcg_temp_new_ptr();
3454 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3455 offsetof(CPUState, stick));
3456 gen_helper_tick_set_count(r_tickptr,
3457 cpu_dst);
3458 tcg_temp_free_ptr(r_tickptr);
3460 break;
3461 case 0x19: /* System tick compare */
3462 #if !defined(CONFIG_USER_ONLY)
3463 if (!supervisor(dc))
3464 goto illegal_insn;
3465 #endif
3467 TCGv_ptr r_tickptr;
3469 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3470 cpu_src2);
3471 r_tickptr = tcg_temp_new_ptr();
3472 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3473 offsetof(CPUState, stick));
3474 gen_helper_tick_set_limit(r_tickptr,
3475 cpu_stick_cmpr);
3476 tcg_temp_free_ptr(r_tickptr);
3478 break;
3480 case 0x10: /* Performance Control */
3481 case 0x11: /* Performance Instrumentation
3482 Counter */
3483 case 0x12: /* Dispatch Control */
3484 #endif
3485 default:
3486 goto illegal_insn;
3489 break;
3490 #if !defined(CONFIG_USER_ONLY)
3491 case 0x31: /* wrpsr, V9 saved, restored */
3493 if (!supervisor(dc))
3494 goto priv_insn;
3495 #ifdef TARGET_SPARC64
3496 switch (rd) {
3497 case 0:
3498 gen_helper_saved();
3499 break;
3500 case 1:
3501 gen_helper_restored();
3502 break;
3503 case 2: /* UA2005 allclean */
3504 case 3: /* UA2005 otherw */
3505 case 4: /* UA2005 normalw */
3506 case 5: /* UA2005 invalw */
3507 // XXX
3508 default:
3509 goto illegal_insn;
3511 #else
3512 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3513 gen_helper_wrpsr(cpu_dst);
3514 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3515 dc->cc_op = CC_OP_FLAGS;
3516 save_state(dc, cpu_cond);
3517 gen_op_next_insn();
3518 tcg_gen_exit_tb(0);
3519 dc->is_br = 1;
3520 #endif
3522 break;
3523 case 0x32: /* wrwim, V9 wrpr */
3525 if (!supervisor(dc))
3526 goto priv_insn;
3527 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3528 #ifdef TARGET_SPARC64
3529 switch (rd) {
3530 case 0: // tpc
3532 TCGv_ptr r_tsptr;
3534 r_tsptr = tcg_temp_new_ptr();
3535 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3536 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3537 offsetof(trap_state, tpc));
3538 tcg_temp_free_ptr(r_tsptr);
3540 break;
3541 case 1: // tnpc
3543 TCGv_ptr r_tsptr;
3545 r_tsptr = tcg_temp_new_ptr();
3546 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3547 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3548 offsetof(trap_state, tnpc));
3549 tcg_temp_free_ptr(r_tsptr);
3551 break;
3552 case 2: // tstate
3554 TCGv_ptr r_tsptr;
3556 r_tsptr = tcg_temp_new_ptr();
3557 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3558 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3559 offsetof(trap_state,
3560 tstate));
3561 tcg_temp_free_ptr(r_tsptr);
3563 break;
3564 case 3: // tt
3566 TCGv_ptr r_tsptr;
3568 r_tsptr = tcg_temp_new_ptr();
3569 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3570 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3571 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3572 offsetof(trap_state, tt));
3573 tcg_temp_free_ptr(r_tsptr);
3575 break;
3576 case 4: // tick
3578 TCGv_ptr r_tickptr;
3580 r_tickptr = tcg_temp_new_ptr();
3581 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3582 offsetof(CPUState, tick));
3583 gen_helper_tick_set_count(r_tickptr,
3584 cpu_tmp0);
3585 tcg_temp_free_ptr(r_tickptr);
3587 break;
3588 case 5: // tba
3589 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3590 break;
3591 case 6: // pstate
3593 TCGv r_tmp = tcg_temp_local_new();
3595 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3596 save_state(dc, cpu_cond);
3597 gen_helper_wrpstate(r_tmp);
3598 tcg_temp_free(r_tmp);
3599 dc->npc = DYNAMIC_PC;
3601 break;
3602 case 7: // tl
3604 TCGv r_tmp = tcg_temp_local_new();
3606 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3607 save_state(dc, cpu_cond);
3608 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3609 tcg_temp_free(r_tmp);
3610 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3611 offsetof(CPUSPARCState, tl));
3612 dc->npc = DYNAMIC_PC;
3614 break;
3615 case 8: // pil
3616 gen_helper_wrpil(cpu_tmp0);
3617 break;
3618 case 9: // cwp
3619 gen_helper_wrcwp(cpu_tmp0);
3620 break;
3621 case 10: // cansave
3622 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3623 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3624 offsetof(CPUSPARCState,
3625 cansave));
3626 break;
3627 case 11: // canrestore
3628 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3629 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3630 offsetof(CPUSPARCState,
3631 canrestore));
3632 break;
3633 case 12: // cleanwin
3634 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3635 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3636 offsetof(CPUSPARCState,
3637 cleanwin));
3638 break;
3639 case 13: // otherwin
3640 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3641 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3642 offsetof(CPUSPARCState,
3643 otherwin));
3644 break;
3645 case 14: // wstate
3646 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3647 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3648 offsetof(CPUSPARCState,
3649 wstate));
3650 break;
3651 case 16: // UA2005 gl
3652 CHECK_IU_FEATURE(dc, GL);
3653 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3654 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3655 offsetof(CPUSPARCState, gl));
3656 break;
3657 case 26: // UA2005 strand status
3658 CHECK_IU_FEATURE(dc, HYPV);
3659 if (!hypervisor(dc))
3660 goto priv_insn;
3661 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3662 break;
3663 default:
3664 goto illegal_insn;
3666 #else
3667 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3668 if (dc->def->nwindows != 32)
3669 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3670 (1 << dc->def->nwindows) - 1);
3671 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3672 #endif
3674 break;
3675 case 0x33: /* wrtbr, UA2005 wrhpr */
3677 #ifndef TARGET_SPARC64
3678 if (!supervisor(dc))
3679 goto priv_insn;
3680 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3681 #else
3682 CHECK_IU_FEATURE(dc, HYPV);
3683 if (!hypervisor(dc))
3684 goto priv_insn;
3685 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3686 switch (rd) {
3687 case 0: // hpstate
3688 // XXX gen_op_wrhpstate();
3689 save_state(dc, cpu_cond);
3690 gen_op_next_insn();
3691 tcg_gen_exit_tb(0);
3692 dc->is_br = 1;
3693 break;
3694 case 1: // htstate
3695 // XXX gen_op_wrhtstate();
3696 break;
3697 case 3: // hintp
3698 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3699 break;
3700 case 5: // htba
3701 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3702 break;
3703 case 31: // hstick_cmpr
3705 TCGv_ptr r_tickptr;
3707 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3708 r_tickptr = tcg_temp_new_ptr();
3709 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3710 offsetof(CPUState, hstick));
3711 gen_helper_tick_set_limit(r_tickptr,
3712 cpu_hstick_cmpr);
3713 tcg_temp_free_ptr(r_tickptr);
3715 break;
3716 case 6: // hver readonly
3717 default:
3718 goto illegal_insn;
3720 #endif
3722 break;
3723 #endif
3724 #ifdef TARGET_SPARC64
3725 case 0x2c: /* V9 movcc */
3727 int cc = GET_FIELD_SP(insn, 11, 12);
3728 int cond = GET_FIELD_SP(insn, 14, 17);
3729 TCGv r_cond;
3730 int l1;
3732 r_cond = tcg_temp_new();
3733 if (insn & (1 << 18)) {
3734 if (cc == 0)
3735 gen_cond(r_cond, 0, cond, dc);
3736 else if (cc == 2)
3737 gen_cond(r_cond, 1, cond, dc);
3738 else
3739 goto illegal_insn;
3740 } else {
3741 gen_fcond(r_cond, cc, cond);
3744 l1 = gen_new_label();
3746 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3747 if (IS_IMM) { /* immediate */
3748 TCGv r_const;
3750 simm = GET_FIELD_SPs(insn, 0, 10);
3751 r_const = tcg_const_tl(simm);
3752 gen_movl_TN_reg(rd, r_const);
3753 tcg_temp_free(r_const);
3754 } else {
3755 rs2 = GET_FIELD_SP(insn, 0, 4);
3756 gen_movl_reg_TN(rs2, cpu_tmp0);
3757 gen_movl_TN_reg(rd, cpu_tmp0);
3759 gen_set_label(l1);
3760 tcg_temp_free(r_cond);
3761 break;
3763 case 0x2d: /* V9 sdivx */
3764 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3765 gen_movl_TN_reg(rd, cpu_dst);
3766 break;
3767 case 0x2e: /* V9 popc */
3769 cpu_src2 = get_src2(insn, cpu_src2);
3770 gen_helper_popc(cpu_dst, cpu_src2);
3771 gen_movl_TN_reg(rd, cpu_dst);
3773 case 0x2f: /* V9 movr */
3775 int cond = GET_FIELD_SP(insn, 10, 12);
3776 int l1;
3778 cpu_src1 = get_src1(insn, cpu_src1);
3780 l1 = gen_new_label();
3782 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3783 cpu_src1, 0, l1);
3784 if (IS_IMM) { /* immediate */
3785 TCGv r_const;
3787 simm = GET_FIELD_SPs(insn, 0, 9);
3788 r_const = tcg_const_tl(simm);
3789 gen_movl_TN_reg(rd, r_const);
3790 tcg_temp_free(r_const);
3791 } else {
3792 rs2 = GET_FIELD_SP(insn, 0, 4);
3793 gen_movl_reg_TN(rs2, cpu_tmp0);
3794 gen_movl_TN_reg(rd, cpu_tmp0);
3796 gen_set_label(l1);
3797 break;
3799 #endif
3800 default:
3801 goto illegal_insn;
3804 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3805 #ifdef TARGET_SPARC64
3806 int opf = GET_FIELD_SP(insn, 5, 13);
3807 rs1 = GET_FIELD(insn, 13, 17);
3808 rs2 = GET_FIELD(insn, 27, 31);
3809 if (gen_trap_ifnofpu(dc, cpu_cond))
3810 goto jmp_insn;
3812 switch (opf) {
3813 case 0x000: /* VIS I edge8cc */
3814 case 0x001: /* VIS II edge8n */
3815 case 0x002: /* VIS I edge8lcc */
3816 case 0x003: /* VIS II edge8ln */
3817 case 0x004: /* VIS I edge16cc */
3818 case 0x005: /* VIS II edge16n */
3819 case 0x006: /* VIS I edge16lcc */
3820 case 0x007: /* VIS II edge16ln */
3821 case 0x008: /* VIS I edge32cc */
3822 case 0x009: /* VIS II edge32n */
3823 case 0x00a: /* VIS I edge32lcc */
3824 case 0x00b: /* VIS II edge32ln */
3825 // XXX
3826 goto illegal_insn;
3827 case 0x010: /* VIS I array8 */
3828 CHECK_FPU_FEATURE(dc, VIS1);
3829 cpu_src1 = get_src1(insn, cpu_src1);
3830 gen_movl_reg_TN(rs2, cpu_src2);
3831 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3832 gen_movl_TN_reg(rd, cpu_dst);
3833 break;
3834 case 0x012: /* VIS I array16 */
3835 CHECK_FPU_FEATURE(dc, VIS1);
3836 cpu_src1 = get_src1(insn, cpu_src1);
3837 gen_movl_reg_TN(rs2, cpu_src2);
3838 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3839 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3840 gen_movl_TN_reg(rd, cpu_dst);
3841 break;
3842 case 0x014: /* VIS I array32 */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 cpu_src1 = get_src1(insn, cpu_src1);
3845 gen_movl_reg_TN(rs2, cpu_src2);
3846 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3847 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3848 gen_movl_TN_reg(rd, cpu_dst);
3849 break;
3850 case 0x018: /* VIS I alignaddr */
3851 CHECK_FPU_FEATURE(dc, VIS1);
3852 cpu_src1 = get_src1(insn, cpu_src1);
3853 gen_movl_reg_TN(rs2, cpu_src2);
3854 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3855 gen_movl_TN_reg(rd, cpu_dst);
3856 break;
3857 case 0x019: /* VIS II bmask */
3858 case 0x01a: /* VIS I alignaddrl */
3859 // XXX
3860 goto illegal_insn;
3861 case 0x020: /* VIS I fcmple16 */
3862 CHECK_FPU_FEATURE(dc, VIS1);
3863 gen_op_load_fpr_DT0(DFPREG(rs1));
3864 gen_op_load_fpr_DT1(DFPREG(rs2));
3865 gen_helper_fcmple16(cpu_dst);
3866 gen_movl_TN_reg(rd, cpu_dst);
3867 break;
3868 case 0x022: /* VIS I fcmpne16 */
3869 CHECK_FPU_FEATURE(dc, VIS1);
3870 gen_op_load_fpr_DT0(DFPREG(rs1));
3871 gen_op_load_fpr_DT1(DFPREG(rs2));
3872 gen_helper_fcmpne16(cpu_dst);
3873 gen_movl_TN_reg(rd, cpu_dst);
3874 break;
3875 case 0x024: /* VIS I fcmple32 */
3876 CHECK_FPU_FEATURE(dc, VIS1);
3877 gen_op_load_fpr_DT0(DFPREG(rs1));
3878 gen_op_load_fpr_DT1(DFPREG(rs2));
3879 gen_helper_fcmple32(cpu_dst);
3880 gen_movl_TN_reg(rd, cpu_dst);
3881 break;
3882 case 0x026: /* VIS I fcmpne32 */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 gen_op_load_fpr_DT0(DFPREG(rs1));
3885 gen_op_load_fpr_DT1(DFPREG(rs2));
3886 gen_helper_fcmpne32(cpu_dst);
3887 gen_movl_TN_reg(rd, cpu_dst);
3888 break;
3889 case 0x028: /* VIS I fcmpgt16 */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 gen_op_load_fpr_DT0(DFPREG(rs1));
3892 gen_op_load_fpr_DT1(DFPREG(rs2));
3893 gen_helper_fcmpgt16(cpu_dst);
3894 gen_movl_TN_reg(rd, cpu_dst);
3895 break;
3896 case 0x02a: /* VIS I fcmpeq16 */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_DT0(DFPREG(rs1));
3899 gen_op_load_fpr_DT1(DFPREG(rs2));
3900 gen_helper_fcmpeq16(cpu_dst);
3901 gen_movl_TN_reg(rd, cpu_dst);
3902 break;
3903 case 0x02c: /* VIS I fcmpgt32 */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_op_load_fpr_DT0(DFPREG(rs1));
3906 gen_op_load_fpr_DT1(DFPREG(rs2));
3907 gen_helper_fcmpgt32(cpu_dst);
3908 gen_movl_TN_reg(rd, cpu_dst);
3909 break;
3910 case 0x02e: /* VIS I fcmpeq32 */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 gen_op_load_fpr_DT0(DFPREG(rs1));
3913 gen_op_load_fpr_DT1(DFPREG(rs2));
3914 gen_helper_fcmpeq32(cpu_dst);
3915 gen_movl_TN_reg(rd, cpu_dst);
3916 break;
3917 case 0x031: /* VIS I fmul8x16 */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 gen_op_load_fpr_DT0(DFPREG(rs1));
3920 gen_op_load_fpr_DT1(DFPREG(rs2));
3921 gen_helper_fmul8x16();
3922 gen_op_store_DT0_fpr(DFPREG(rd));
3923 gen_update_fprs_dirty(DFPREG(rd));
3924 break;
3925 case 0x033: /* VIS I fmul8x16au */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 gen_op_load_fpr_DT0(DFPREG(rs1));
3928 gen_op_load_fpr_DT1(DFPREG(rs2));
3929 gen_helper_fmul8x16au();
3930 gen_op_store_DT0_fpr(DFPREG(rd));
3931 gen_update_fprs_dirty(DFPREG(rd));
3932 break;
3933 case 0x035: /* VIS I fmul8x16al */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 gen_op_load_fpr_DT0(DFPREG(rs1));
3936 gen_op_load_fpr_DT1(DFPREG(rs2));
3937 gen_helper_fmul8x16al();
3938 gen_op_store_DT0_fpr(DFPREG(rd));
3939 gen_update_fprs_dirty(DFPREG(rd));
3940 break;
3941 case 0x036: /* VIS I fmul8sux16 */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 gen_op_load_fpr_DT0(DFPREG(rs1));
3944 gen_op_load_fpr_DT1(DFPREG(rs2));
3945 gen_helper_fmul8sux16();
3946 gen_op_store_DT0_fpr(DFPREG(rd));
3947 gen_update_fprs_dirty(DFPREG(rd));
3948 break;
3949 case 0x037: /* VIS I fmul8ulx16 */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 gen_op_load_fpr_DT0(DFPREG(rs1));
3952 gen_op_load_fpr_DT1(DFPREG(rs2));
3953 gen_helper_fmul8ulx16();
3954 gen_op_store_DT0_fpr(DFPREG(rd));
3955 gen_update_fprs_dirty(DFPREG(rd));
3956 break;
3957 case 0x038: /* VIS I fmuld8sux16 */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 gen_op_load_fpr_DT0(DFPREG(rs1));
3960 gen_op_load_fpr_DT1(DFPREG(rs2));
3961 gen_helper_fmuld8sux16();
3962 gen_op_store_DT0_fpr(DFPREG(rd));
3963 gen_update_fprs_dirty(DFPREG(rd));
3964 break;
3965 case 0x039: /* VIS I fmuld8ulx16 */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 gen_op_load_fpr_DT0(DFPREG(rs1));
3968 gen_op_load_fpr_DT1(DFPREG(rs2));
3969 gen_helper_fmuld8ulx16();
3970 gen_op_store_DT0_fpr(DFPREG(rd));
3971 gen_update_fprs_dirty(DFPREG(rd));
3972 break;
3973 case 0x03a: /* VIS I fpack32 */
3974 case 0x03b: /* VIS I fpack16 */
3975 case 0x03d: /* VIS I fpackfix */
3976 case 0x03e: /* VIS I pdist */
3977 // XXX
3978 goto illegal_insn;
3979 case 0x048: /* VIS I faligndata */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 gen_op_load_fpr_DT0(DFPREG(rs1));
3982 gen_op_load_fpr_DT1(DFPREG(rs2));
3983 gen_helper_faligndata();
3984 gen_op_store_DT0_fpr(DFPREG(rd));
3985 gen_update_fprs_dirty(DFPREG(rd));
3986 break;
3987 case 0x04b: /* VIS I fpmerge */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 gen_op_load_fpr_DT0(DFPREG(rs1));
3990 gen_op_load_fpr_DT1(DFPREG(rs2));
3991 gen_helper_fpmerge();
3992 gen_op_store_DT0_fpr(DFPREG(rd));
3993 gen_update_fprs_dirty(DFPREG(rd));
3994 break;
3995 case 0x04c: /* VIS II bshuffle */
3996 // XXX
3997 goto illegal_insn;
3998 case 0x04d: /* VIS I fexpand */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 gen_op_load_fpr_DT0(DFPREG(rs1));
4001 gen_op_load_fpr_DT1(DFPREG(rs2));
4002 gen_helper_fexpand();
4003 gen_op_store_DT0_fpr(DFPREG(rd));
4004 gen_update_fprs_dirty(DFPREG(rd));
4005 break;
4006 case 0x050: /* VIS I fpadd16 */
4007 CHECK_FPU_FEATURE(dc, VIS1);
4008 gen_op_load_fpr_DT0(DFPREG(rs1));
4009 gen_op_load_fpr_DT1(DFPREG(rs2));
4010 gen_helper_fpadd16();
4011 gen_op_store_DT0_fpr(DFPREG(rd));
4012 gen_update_fprs_dirty(DFPREG(rd));
4013 break;
4014 case 0x051: /* VIS I fpadd16s */
4015 CHECK_FPU_FEATURE(dc, VIS1);
4016 gen_helper_fpadd16s(cpu_fpr[rd],
4017 cpu_fpr[rs1], cpu_fpr[rs2]);
4018 gen_update_fprs_dirty(rd);
4019 break;
4020 case 0x052: /* VIS I fpadd32 */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 gen_op_load_fpr_DT0(DFPREG(rs1));
4023 gen_op_load_fpr_DT1(DFPREG(rs2));
4024 gen_helper_fpadd32();
4025 gen_op_store_DT0_fpr(DFPREG(rd));
4026 gen_update_fprs_dirty(DFPREG(rd));
4027 break;
4028 case 0x053: /* VIS I fpadd32s */
4029 CHECK_FPU_FEATURE(dc, VIS1);
4030 gen_helper_fpadd32s(cpu_fpr[rd],
4031 cpu_fpr[rs1], cpu_fpr[rs2]);
4032 gen_update_fprs_dirty(rd);
4033 break;
4034 case 0x054: /* VIS I fpsub16 */
4035 CHECK_FPU_FEATURE(dc, VIS1);
4036 gen_op_load_fpr_DT0(DFPREG(rs1));
4037 gen_op_load_fpr_DT1(DFPREG(rs2));
4038 gen_helper_fpsub16();
4039 gen_op_store_DT0_fpr(DFPREG(rd));
4040 gen_update_fprs_dirty(DFPREG(rd));
4041 break;
4042 case 0x055: /* VIS I fpsub16s */
4043 CHECK_FPU_FEATURE(dc, VIS1);
4044 gen_helper_fpsub16s(cpu_fpr[rd],
4045 cpu_fpr[rs1], cpu_fpr[rs2]);
4046 gen_update_fprs_dirty(rd);
4047 break;
4048 case 0x056: /* VIS I fpsub32 */
4049 CHECK_FPU_FEATURE(dc, VIS1);
4050 gen_op_load_fpr_DT0(DFPREG(rs1));
4051 gen_op_load_fpr_DT1(DFPREG(rs2));
4052 gen_helper_fpsub32();
4053 gen_op_store_DT0_fpr(DFPREG(rd));
4054 gen_update_fprs_dirty(DFPREG(rd));
4055 break;
4056 case 0x057: /* VIS I fpsub32s */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 gen_helper_fpsub32s(cpu_fpr[rd],
4059 cpu_fpr[rs1], cpu_fpr[rs2]);
4060 gen_update_fprs_dirty(rd);
4061 break;
4062 case 0x060: /* VIS I fzero */
4063 CHECK_FPU_FEATURE(dc, VIS1);
4064 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4065 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4066 gen_update_fprs_dirty(DFPREG(rd));
4067 break;
4068 case 0x061: /* VIS I fzeros */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4071 gen_update_fprs_dirty(rd);
4072 break;
4073 case 0x062: /* VIS I fnor */
4074 CHECK_FPU_FEATURE(dc, VIS1);
4075 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4076 cpu_fpr[DFPREG(rs2)]);
4077 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd) + 1],
4078 cpu_fpr[DFPREG(rs1) + 1],
4079 cpu_fpr[DFPREG(rs2) + 1]);
4080 gen_update_fprs_dirty(DFPREG(rd));
4081 break;
4082 case 0x063: /* VIS I fnors */
4083 CHECK_FPU_FEATURE(dc, VIS1);
4084 tcg_gen_nor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4085 gen_update_fprs_dirty(rd);
4086 break;
4087 case 0x064: /* VIS I fandnot2 */
4088 CHECK_FPU_FEATURE(dc, VIS1);
4089 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4090 cpu_fpr[DFPREG(rs2)]);
4091 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4092 cpu_fpr[DFPREG(rs1) + 1],
4093 cpu_fpr[DFPREG(rs2) + 1]);
4094 gen_update_fprs_dirty(DFPREG(rd));
4095 break;
4096 case 0x065: /* VIS I fandnot2s */
4097 CHECK_FPU_FEATURE(dc, VIS1);
4098 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4099 gen_update_fprs_dirty(rd);
4100 break;
4101 case 0x066: /* VIS I fnot2 */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4104 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4105 cpu_fpr[DFPREG(rs2) + 1]);
4106 gen_update_fprs_dirty(DFPREG(rd));
4107 break;
4108 case 0x067: /* VIS I fnot2s */
4109 CHECK_FPU_FEATURE(dc, VIS1);
4110 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4111 gen_update_fprs_dirty(rd);
4112 break;
4113 case 0x068: /* VIS I fandnot1 */
4114 CHECK_FPU_FEATURE(dc, VIS1);
4115 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4116 cpu_fpr[DFPREG(rs1)]);
4117 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4118 cpu_fpr[DFPREG(rs2) + 1],
4119 cpu_fpr[DFPREG(rs1) + 1]);
4120 gen_update_fprs_dirty(DFPREG(rd));
4121 break;
4122 case 0x069: /* VIS I fandnot1s */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4125 gen_update_fprs_dirty(rd);
4126 break;
4127 case 0x06a: /* VIS I fnot1 */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4130 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4131 cpu_fpr[DFPREG(rs1) + 1]);
4132 gen_update_fprs_dirty(DFPREG(rd));
4133 break;
4134 case 0x06b: /* VIS I fnot1s */
4135 CHECK_FPU_FEATURE(dc, VIS1);
4136 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4137 gen_update_fprs_dirty(rd);
4138 break;
4139 case 0x06c: /* VIS I fxor */
4140 CHECK_FPU_FEATURE(dc, VIS1);
4141 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4142 cpu_fpr[DFPREG(rs2)]);
4143 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4144 cpu_fpr[DFPREG(rs1) + 1],
4145 cpu_fpr[DFPREG(rs2) + 1]);
4146 gen_update_fprs_dirty(DFPREG(rd));
4147 break;
4148 case 0x06d: /* VIS I fxors */
4149 CHECK_FPU_FEATURE(dc, VIS1);
4150 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4151 gen_update_fprs_dirty(rd);
4152 break;
4153 case 0x06e: /* VIS I fnand */
4154 CHECK_FPU_FEATURE(dc, VIS1);
4155 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4156 cpu_fpr[DFPREG(rs2)]);
4157 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd) + 1],
4158 cpu_fpr[DFPREG(rs1) + 1],
4159 cpu_fpr[DFPREG(rs2) + 1]);
4160 gen_update_fprs_dirty(DFPREG(rd));
4161 break;
4162 case 0x06f: /* VIS I fnands */
4163 CHECK_FPU_FEATURE(dc, VIS1);
4164 tcg_gen_nand_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4165 gen_update_fprs_dirty(rd);
4166 break;
4167 case 0x070: /* VIS I fand */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4170 cpu_fpr[DFPREG(rs2)]);
4171 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4172 cpu_fpr[DFPREG(rs1) + 1],
4173 cpu_fpr[DFPREG(rs2) + 1]);
4174 gen_update_fprs_dirty(DFPREG(rd));
4175 break;
4176 case 0x071: /* VIS I fands */
4177 CHECK_FPU_FEATURE(dc, VIS1);
4178 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4179 gen_update_fprs_dirty(rd);
4180 break;
4181 case 0x072: /* VIS I fxnor */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4184 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4185 cpu_fpr[DFPREG(rs1)]);
4186 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4187 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4188 cpu_fpr[DFPREG(rs1) + 1]);
4189 gen_update_fprs_dirty(DFPREG(rd));
4190 break;
4191 case 0x073: /* VIS I fxnors */
4192 CHECK_FPU_FEATURE(dc, VIS1);
4193 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4194 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4195 gen_update_fprs_dirty(rd);
4196 break;
4197 case 0x074: /* VIS I fsrc1 */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4200 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4201 cpu_fpr[DFPREG(rs1) + 1]);
4202 gen_update_fprs_dirty(DFPREG(rd));
4203 break;
4204 case 0x075: /* VIS I fsrc1s */
4205 CHECK_FPU_FEATURE(dc, VIS1);
4206 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4207 gen_update_fprs_dirty(rd);
4208 break;
4209 case 0x076: /* VIS I fornot2 */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4212 cpu_fpr[DFPREG(rs2)]);
4213 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4214 cpu_fpr[DFPREG(rs1) + 1],
4215 cpu_fpr[DFPREG(rs2) + 1]);
4216 gen_update_fprs_dirty(DFPREG(rd));
4217 break;
4218 case 0x077: /* VIS I fornot2s */
4219 CHECK_FPU_FEATURE(dc, VIS1);
4220 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4221 gen_update_fprs_dirty(rd);
4222 break;
4223 case 0x078: /* VIS I fsrc2 */
4224 CHECK_FPU_FEATURE(dc, VIS1);
4225 gen_op_load_fpr_DT0(DFPREG(rs2));
4226 gen_op_store_DT0_fpr(DFPREG(rd));
4227 gen_update_fprs_dirty(DFPREG(rd));
4228 break;
4229 case 0x079: /* VIS I fsrc2s */
4230 CHECK_FPU_FEATURE(dc, VIS1);
4231 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4232 gen_update_fprs_dirty(rd);
4233 break;
4234 case 0x07a: /* VIS I fornot1 */
4235 CHECK_FPU_FEATURE(dc, VIS1);
4236 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4237 cpu_fpr[DFPREG(rs1)]);
4238 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4239 cpu_fpr[DFPREG(rs2) + 1],
4240 cpu_fpr[DFPREG(rs1) + 1]);
4241 gen_update_fprs_dirty(DFPREG(rd));
4242 break;
4243 case 0x07b: /* VIS I fornot1s */
4244 CHECK_FPU_FEATURE(dc, VIS1);
4245 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4246 gen_update_fprs_dirty(rd);
4247 break;
4248 case 0x07c: /* VIS I for */
4249 CHECK_FPU_FEATURE(dc, VIS1);
4250 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4251 cpu_fpr[DFPREG(rs2)]);
4252 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4253 cpu_fpr[DFPREG(rs1) + 1],
4254 cpu_fpr[DFPREG(rs2) + 1]);
4255 gen_update_fprs_dirty(DFPREG(rd));
4256 break;
4257 case 0x07d: /* VIS I fors */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4260 gen_update_fprs_dirty(rd);
4261 break;
4262 case 0x07e: /* VIS I fone */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4265 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4266 gen_update_fprs_dirty(DFPREG(rd));
4267 break;
4268 case 0x07f: /* VIS I fones */
4269 CHECK_FPU_FEATURE(dc, VIS1);
4270 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4271 gen_update_fprs_dirty(rd);
4272 break;
4273 case 0x080: /* VIS I shutdown */
4274 case 0x081: /* VIS II siam */
4275 // XXX
4276 goto illegal_insn;
4277 default:
4278 goto illegal_insn;
4280 #else
4281 goto ncp_insn;
4282 #endif
4283 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4284 #ifdef TARGET_SPARC64
4285 goto illegal_insn;
4286 #else
4287 goto ncp_insn;
4288 #endif
4289 #ifdef TARGET_SPARC64
4290 } else if (xop == 0x39) { /* V9 return */
4291 TCGv_i32 r_const;
4293 save_state(dc, cpu_cond);
4294 cpu_src1 = get_src1(insn, cpu_src1);
4295 if (IS_IMM) { /* immediate */
4296 simm = GET_FIELDs(insn, 19, 31);
4297 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4298 } else { /* register */
4299 rs2 = GET_FIELD(insn, 27, 31);
4300 if (rs2) {
4301 gen_movl_reg_TN(rs2, cpu_src2);
4302 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4303 } else
4304 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4306 gen_helper_restore();
4307 gen_mov_pc_npc(dc, cpu_cond);
4308 r_const = tcg_const_i32(3);
4309 gen_helper_check_align(cpu_dst, r_const);
4310 tcg_temp_free_i32(r_const);
4311 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4312 dc->npc = DYNAMIC_PC;
4313 goto jmp_insn;
4314 #endif
4315 } else {
4316 cpu_src1 = get_src1(insn, cpu_src1);
4317 if (IS_IMM) { /* immediate */
4318 simm = GET_FIELDs(insn, 19, 31);
4319 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4320 } else { /* register */
4321 rs2 = GET_FIELD(insn, 27, 31);
4322 if (rs2) {
4323 gen_movl_reg_TN(rs2, cpu_src2);
4324 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4325 } else
4326 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4328 switch (xop) {
4329 case 0x38: /* jmpl */
4331 TCGv r_pc;
4332 TCGv_i32 r_const;
4334 r_pc = tcg_const_tl(dc->pc);
4335 gen_movl_TN_reg(rd, r_pc);
4336 tcg_temp_free(r_pc);
4337 gen_mov_pc_npc(dc, cpu_cond);
4338 r_const = tcg_const_i32(3);
4339 gen_helper_check_align(cpu_dst, r_const);
4340 tcg_temp_free_i32(r_const);
4341 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4342 dc->npc = DYNAMIC_PC;
4344 goto jmp_insn;
4345 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4346 case 0x39: /* rett, V9 return */
4348 TCGv_i32 r_const;
4350 if (!supervisor(dc))
4351 goto priv_insn;
4352 gen_mov_pc_npc(dc, cpu_cond);
4353 r_const = tcg_const_i32(3);
4354 gen_helper_check_align(cpu_dst, r_const);
4355 tcg_temp_free_i32(r_const);
4356 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4357 dc->npc = DYNAMIC_PC;
4358 gen_helper_rett();
4360 goto jmp_insn;
4361 #endif
4362 case 0x3b: /* flush */
4363 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4364 goto unimp_flush;
4365 /* nop */
4366 break;
4367 case 0x3c: /* save */
4368 save_state(dc, cpu_cond);
4369 gen_helper_save();
4370 gen_movl_TN_reg(rd, cpu_dst);
4371 break;
4372 case 0x3d: /* restore */
4373 save_state(dc, cpu_cond);
4374 gen_helper_restore();
4375 gen_movl_TN_reg(rd, cpu_dst);
4376 break;
4377 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4378 case 0x3e: /* V9 done/retry */
4380 switch (rd) {
4381 case 0:
4382 if (!supervisor(dc))
4383 goto priv_insn;
4384 dc->npc = DYNAMIC_PC;
4385 dc->pc = DYNAMIC_PC;
4386 gen_helper_done();
4387 goto jmp_insn;
4388 case 1:
4389 if (!supervisor(dc))
4390 goto priv_insn;
4391 dc->npc = DYNAMIC_PC;
4392 dc->pc = DYNAMIC_PC;
4393 gen_helper_retry();
4394 goto jmp_insn;
4395 default:
4396 goto illegal_insn;
4399 break;
4400 #endif
4401 default:
4402 goto illegal_insn;
4405 break;
4407 break;
4408 case 3: /* load/store instructions */
4410 unsigned int xop = GET_FIELD(insn, 7, 12);
4412 /* flush pending conditional evaluations before exposing
4413 cpu state */
4414 if (dc->cc_op != CC_OP_FLAGS) {
4415 dc->cc_op = CC_OP_FLAGS;
4416 gen_helper_compute_psr();
4418 cpu_src1 = get_src1(insn, cpu_src1);
4419 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4420 rs2 = GET_FIELD(insn, 27, 31);
4421 gen_movl_reg_TN(rs2, cpu_src2);
4422 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4423 } else if (IS_IMM) { /* immediate */
4424 simm = GET_FIELDs(insn, 19, 31);
4425 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4426 } else { /* register */
4427 rs2 = GET_FIELD(insn, 27, 31);
4428 if (rs2 != 0) {
4429 gen_movl_reg_TN(rs2, cpu_src2);
4430 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4431 } else
4432 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4434 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4435 (xop > 0x17 && xop <= 0x1d ) ||
4436 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4437 switch (xop) {
4438 case 0x0: /* ld, V9 lduw, load unsigned word */
4439 gen_address_mask(dc, cpu_addr);
4440 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4441 break;
4442 case 0x1: /* ldub, load unsigned byte */
4443 gen_address_mask(dc, cpu_addr);
4444 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4445 break;
4446 case 0x2: /* lduh, load unsigned halfword */
4447 gen_address_mask(dc, cpu_addr);
4448 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4449 break;
4450 case 0x3: /* ldd, load double word */
4451 if (rd & 1)
4452 goto illegal_insn;
4453 else {
4454 TCGv_i32 r_const;
4456 save_state(dc, cpu_cond);
4457 r_const = tcg_const_i32(7);
4458 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4459 tcg_temp_free_i32(r_const);
4460 gen_address_mask(dc, cpu_addr);
4461 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4462 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4463 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4464 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4465 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4466 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4467 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4469 break;
4470 case 0x9: /* ldsb, load signed byte */
4471 gen_address_mask(dc, cpu_addr);
4472 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4473 break;
4474 case 0xa: /* ldsh, load signed halfword */
4475 gen_address_mask(dc, cpu_addr);
4476 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4477 break;
4478 case 0xd: /* ldstub -- XXX: should be atomically */
4480 TCGv r_const;
4482 gen_address_mask(dc, cpu_addr);
4483 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4484 r_const = tcg_const_tl(0xff);
4485 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4486 tcg_temp_free(r_const);
4488 break;
4489 case 0x0f: /* swap, swap register with memory. Also
4490 atomically */
4491 CHECK_IU_FEATURE(dc, SWAP);
4492 gen_movl_reg_TN(rd, cpu_val);
4493 gen_address_mask(dc, cpu_addr);
4494 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4495 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4496 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4497 break;
4498 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4499 case 0x10: /* lda, V9 lduwa, load word alternate */
4500 #ifndef TARGET_SPARC64
4501 if (IS_IMM)
4502 goto illegal_insn;
4503 if (!supervisor(dc))
4504 goto priv_insn;
4505 #endif
4506 save_state(dc, cpu_cond);
4507 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4508 break;
4509 case 0x11: /* lduba, load unsigned byte alternate */
4510 #ifndef TARGET_SPARC64
4511 if (IS_IMM)
4512 goto illegal_insn;
4513 if (!supervisor(dc))
4514 goto priv_insn;
4515 #endif
4516 save_state(dc, cpu_cond);
4517 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4518 break;
4519 case 0x12: /* lduha, load unsigned halfword alternate */
4520 #ifndef TARGET_SPARC64
4521 if (IS_IMM)
4522 goto illegal_insn;
4523 if (!supervisor(dc))
4524 goto priv_insn;
4525 #endif
4526 save_state(dc, cpu_cond);
4527 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4528 break;
4529 case 0x13: /* ldda, load double word alternate */
4530 #ifndef TARGET_SPARC64
4531 if (IS_IMM)
4532 goto illegal_insn;
4533 if (!supervisor(dc))
4534 goto priv_insn;
4535 #endif
4536 if (rd & 1)
4537 goto illegal_insn;
4538 save_state(dc, cpu_cond);
4539 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4540 goto skip_move;
4541 case 0x19: /* ldsba, load signed byte alternate */
4542 #ifndef TARGET_SPARC64
4543 if (IS_IMM)
4544 goto illegal_insn;
4545 if (!supervisor(dc))
4546 goto priv_insn;
4547 #endif
4548 save_state(dc, cpu_cond);
4549 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4550 break;
4551 case 0x1a: /* ldsha, load signed halfword alternate */
4552 #ifndef TARGET_SPARC64
4553 if (IS_IMM)
4554 goto illegal_insn;
4555 if (!supervisor(dc))
4556 goto priv_insn;
4557 #endif
4558 save_state(dc, cpu_cond);
4559 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4560 break;
4561 case 0x1d: /* ldstuba -- XXX: should be atomically */
4562 #ifndef TARGET_SPARC64
4563 if (IS_IMM)
4564 goto illegal_insn;
4565 if (!supervisor(dc))
4566 goto priv_insn;
4567 #endif
4568 save_state(dc, cpu_cond);
4569 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4570 break;
4571 case 0x1f: /* swapa, swap reg with alt. memory. Also
4572 atomically */
4573 CHECK_IU_FEATURE(dc, SWAP);
4574 #ifndef TARGET_SPARC64
4575 if (IS_IMM)
4576 goto illegal_insn;
4577 if (!supervisor(dc))
4578 goto priv_insn;
4579 #endif
4580 save_state(dc, cpu_cond);
4581 gen_movl_reg_TN(rd, cpu_val);
4582 gen_swap_asi(cpu_val, cpu_addr, insn);
4583 break;
4585 #ifndef TARGET_SPARC64
4586 case 0x30: /* ldc */
4587 case 0x31: /* ldcsr */
4588 case 0x33: /* lddc */
4589 goto ncp_insn;
4590 #endif
4591 #endif
4592 #ifdef TARGET_SPARC64
4593 case 0x08: /* V9 ldsw */
4594 gen_address_mask(dc, cpu_addr);
4595 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4596 break;
4597 case 0x0b: /* V9 ldx */
4598 gen_address_mask(dc, cpu_addr);
4599 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4600 break;
4601 case 0x18: /* V9 ldswa */
4602 save_state(dc, cpu_cond);
4603 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4604 break;
4605 case 0x1b: /* V9 ldxa */
4606 save_state(dc, cpu_cond);
4607 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4608 break;
4609 case 0x2d: /* V9 prefetch, no effect */
4610 goto skip_move;
4611 case 0x30: /* V9 ldfa */
4612 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4613 goto jmp_insn;
4615 save_state(dc, cpu_cond);
4616 gen_ldf_asi(cpu_addr, insn, 4, rd);
4617 gen_update_fprs_dirty(rd);
4618 goto skip_move;
4619 case 0x33: /* V9 lddfa */
4620 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4621 goto jmp_insn;
4623 save_state(dc, cpu_cond);
4624 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4625 gen_update_fprs_dirty(DFPREG(rd));
4626 goto skip_move;
4627 case 0x3d: /* V9 prefetcha, no effect */
4628 goto skip_move;
4629 case 0x32: /* V9 ldqfa */
4630 CHECK_FPU_FEATURE(dc, FLOAT128);
4631 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4632 goto jmp_insn;
4634 save_state(dc, cpu_cond);
4635 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4636 gen_update_fprs_dirty(QFPREG(rd));
4637 goto skip_move;
4638 #endif
4639 default:
4640 goto illegal_insn;
4642 gen_movl_TN_reg(rd, cpu_val);
4643 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4644 skip_move: ;
4645 #endif
4646 } else if (xop >= 0x20 && xop < 0x24) {
4647 if (gen_trap_ifnofpu(dc, cpu_cond))
4648 goto jmp_insn;
4649 save_state(dc, cpu_cond);
4650 switch (xop) {
4651 case 0x20: /* ldf, load fpreg */
4652 gen_address_mask(dc, cpu_addr);
4653 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4654 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4655 gen_update_fprs_dirty(rd);
4656 break;
4657 case 0x21: /* ldfsr, V9 ldxfsr */
4658 #ifdef TARGET_SPARC64
4659 gen_address_mask(dc, cpu_addr);
4660 if (rd == 1) {
4661 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4662 gen_helper_ldxfsr(cpu_tmp64);
4663 } else {
4664 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4665 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4666 gen_helper_ldfsr(cpu_tmp32);
4668 #else
4670 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4671 gen_helper_ldfsr(cpu_tmp32);
4673 #endif
4674 break;
4675 case 0x22: /* ldqf, load quad fpreg */
4677 TCGv_i32 r_const;
4679 CHECK_FPU_FEATURE(dc, FLOAT128);
4680 r_const = tcg_const_i32(dc->mem_idx);
4681 gen_address_mask(dc, cpu_addr);
4682 gen_helper_ldqf(cpu_addr, r_const);
4683 tcg_temp_free_i32(r_const);
4684 gen_op_store_QT0_fpr(QFPREG(rd));
4685 gen_update_fprs_dirty(QFPREG(rd));
4687 break;
4688 case 0x23: /* lddf, load double fpreg */
4690 TCGv_i32 r_const;
4692 r_const = tcg_const_i32(dc->mem_idx);
4693 gen_address_mask(dc, cpu_addr);
4694 gen_helper_lddf(cpu_addr, r_const);
4695 tcg_temp_free_i32(r_const);
4696 gen_op_store_DT0_fpr(DFPREG(rd));
4697 gen_update_fprs_dirty(DFPREG(rd));
4699 break;
4700 default:
4701 goto illegal_insn;
4703 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4704 xop == 0xe || xop == 0x1e) {
4705 gen_movl_reg_TN(rd, cpu_val);
4706 switch (xop) {
4707 case 0x4: /* st, store word */
4708 gen_address_mask(dc, cpu_addr);
4709 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4710 break;
4711 case 0x5: /* stb, store byte */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4714 break;
4715 case 0x6: /* sth, store halfword */
4716 gen_address_mask(dc, cpu_addr);
4717 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4718 break;
4719 case 0x7: /* std, store double word */
4720 if (rd & 1)
4721 goto illegal_insn;
4722 else {
4723 TCGv_i32 r_const;
4725 save_state(dc, cpu_cond);
4726 gen_address_mask(dc, cpu_addr);
4727 r_const = tcg_const_i32(7);
4728 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4729 tcg_temp_free_i32(r_const);
4730 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4731 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4732 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4734 break;
4735 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4736 case 0x14: /* sta, V9 stwa, store word alternate */
4737 #ifndef TARGET_SPARC64
4738 if (IS_IMM)
4739 goto illegal_insn;
4740 if (!supervisor(dc))
4741 goto priv_insn;
4742 #endif
4743 save_state(dc, cpu_cond);
4744 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4745 dc->npc = DYNAMIC_PC;
4746 break;
4747 case 0x15: /* stba, store byte alternate */
4748 #ifndef TARGET_SPARC64
4749 if (IS_IMM)
4750 goto illegal_insn;
4751 if (!supervisor(dc))
4752 goto priv_insn;
4753 #endif
4754 save_state(dc, cpu_cond);
4755 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4756 dc->npc = DYNAMIC_PC;
4757 break;
4758 case 0x16: /* stha, store halfword alternate */
4759 #ifndef TARGET_SPARC64
4760 if (IS_IMM)
4761 goto illegal_insn;
4762 if (!supervisor(dc))
4763 goto priv_insn;
4764 #endif
4765 save_state(dc, cpu_cond);
4766 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4767 dc->npc = DYNAMIC_PC;
4768 break;
4769 case 0x17: /* stda, store double word alternate */
4770 #ifndef TARGET_SPARC64
4771 if (IS_IMM)
4772 goto illegal_insn;
4773 if (!supervisor(dc))
4774 goto priv_insn;
4775 #endif
4776 if (rd & 1)
4777 goto illegal_insn;
4778 else {
4779 save_state(dc, cpu_cond);
4780 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4782 break;
4783 #endif
4784 #ifdef TARGET_SPARC64
4785 case 0x0e: /* V9 stx */
4786 gen_address_mask(dc, cpu_addr);
4787 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4788 break;
4789 case 0x1e: /* V9 stxa */
4790 save_state(dc, cpu_cond);
4791 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4792 dc->npc = DYNAMIC_PC;
4793 break;
4794 #endif
4795 default:
4796 goto illegal_insn;
4798 } else if (xop > 0x23 && xop < 0x28) {
4799 if (gen_trap_ifnofpu(dc, cpu_cond))
4800 goto jmp_insn;
4801 save_state(dc, cpu_cond);
4802 switch (xop) {
4803 case 0x24: /* stf, store fpreg */
4804 gen_address_mask(dc, cpu_addr);
4805 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4806 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4807 break;
4808 case 0x25: /* stfsr, V9 stxfsr */
4809 #ifdef TARGET_SPARC64
4810 gen_address_mask(dc, cpu_addr);
4811 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4812 if (rd == 1)
4813 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4814 else
4815 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4816 #else
4817 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4818 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4819 #endif
4820 break;
4821 case 0x26:
4822 #ifdef TARGET_SPARC64
4823 /* V9 stqf, store quad fpreg */
4825 TCGv_i32 r_const;
4827 CHECK_FPU_FEATURE(dc, FLOAT128);
4828 gen_op_load_fpr_QT0(QFPREG(rd));
4829 r_const = tcg_const_i32(dc->mem_idx);
4830 gen_address_mask(dc, cpu_addr);
4831 gen_helper_stqf(cpu_addr, r_const);
4832 tcg_temp_free_i32(r_const);
4834 break;
4835 #else /* !TARGET_SPARC64 */
4836 /* stdfq, store floating point queue */
4837 #if defined(CONFIG_USER_ONLY)
4838 goto illegal_insn;
4839 #else
4840 if (!supervisor(dc))
4841 goto priv_insn;
4842 if (gen_trap_ifnofpu(dc, cpu_cond))
4843 goto jmp_insn;
4844 goto nfq_insn;
4845 #endif
4846 #endif
4847 case 0x27: /* stdf, store double fpreg */
4849 TCGv_i32 r_const;
4851 gen_op_load_fpr_DT0(DFPREG(rd));
4852 r_const = tcg_const_i32(dc->mem_idx);
4853 gen_address_mask(dc, cpu_addr);
4854 gen_helper_stdf(cpu_addr, r_const);
4855 tcg_temp_free_i32(r_const);
4857 break;
4858 default:
4859 goto illegal_insn;
4861 } else if (xop > 0x33 && xop < 0x3f) {
4862 save_state(dc, cpu_cond);
4863 switch (xop) {
4864 #ifdef TARGET_SPARC64
4865 case 0x34: /* V9 stfa */
4866 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4867 goto jmp_insn;
4869 gen_stf_asi(cpu_addr, insn, 4, rd);
4870 break;
4871 case 0x36: /* V9 stqfa */
4873 TCGv_i32 r_const;
4875 CHECK_FPU_FEATURE(dc, FLOAT128);
4876 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4877 goto jmp_insn;
4879 r_const = tcg_const_i32(7);
4880 gen_helper_check_align(cpu_addr, r_const);
4881 tcg_temp_free_i32(r_const);
4882 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4884 break;
4885 case 0x37: /* V9 stdfa */
4886 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4887 goto jmp_insn;
4889 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4890 break;
4891 case 0x3c: /* V9 casa */
4892 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4893 gen_movl_TN_reg(rd, cpu_val);
4894 break;
4895 case 0x3e: /* V9 casxa */
4896 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4897 gen_movl_TN_reg(rd, cpu_val);
4898 break;
4899 #else
4900 case 0x34: /* stc */
4901 case 0x35: /* stcsr */
4902 case 0x36: /* stdcq */
4903 case 0x37: /* stdc */
4904 goto ncp_insn;
4905 #endif
4906 default:
4907 goto illegal_insn;
4909 } else
4910 goto illegal_insn;
4912 break;
4914 /* default case for non jump instructions */
4915 if (dc->npc == DYNAMIC_PC) {
4916 dc->pc = DYNAMIC_PC;
4917 gen_op_next_insn();
4918 } else if (dc->npc == JUMP_PC) {
4919 /* we can do a static jump */
4920 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4921 dc->is_br = 1;
4922 } else {
4923 dc->pc = dc->npc;
4924 dc->npc = dc->npc + 4;
4926 jmp_insn:
4927 goto egress;
4928 illegal_insn:
4930 TCGv_i32 r_const;
4932 save_state(dc, cpu_cond);
4933 r_const = tcg_const_i32(TT_ILL_INSN);
4934 gen_helper_raise_exception(r_const);
4935 tcg_temp_free_i32(r_const);
4936 dc->is_br = 1;
4938 goto egress;
4939 unimp_flush:
4941 TCGv_i32 r_const;
4943 save_state(dc, cpu_cond);
4944 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4945 gen_helper_raise_exception(r_const);
4946 tcg_temp_free_i32(r_const);
4947 dc->is_br = 1;
4949 goto egress;
4950 #if !defined(CONFIG_USER_ONLY)
4951 priv_insn:
4953 TCGv_i32 r_const;
4955 save_state(dc, cpu_cond);
4956 r_const = tcg_const_i32(TT_PRIV_INSN);
4957 gen_helper_raise_exception(r_const);
4958 tcg_temp_free_i32(r_const);
4959 dc->is_br = 1;
4961 goto egress;
4962 #endif
4963 nfpu_insn:
4964 save_state(dc, cpu_cond);
4965 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4966 dc->is_br = 1;
4967 goto egress;
4968 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4969 nfq_insn:
4970 save_state(dc, cpu_cond);
4971 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4972 dc->is_br = 1;
4973 goto egress;
4974 #endif
4975 #ifndef TARGET_SPARC64
4976 ncp_insn:
4978 TCGv r_const;
4980 save_state(dc, cpu_cond);
4981 r_const = tcg_const_i32(TT_NCP_INSN);
4982 gen_helper_raise_exception(r_const);
4983 tcg_temp_free(r_const);
4984 dc->is_br = 1;
4986 goto egress;
4987 #endif
4988 egress:
4989 tcg_temp_free(cpu_tmp1);
4990 tcg_temp_free(cpu_tmp2);
4993 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4994 int spc, CPUSPARCState *env)
4996 target_ulong pc_start, last_pc;
4997 uint16_t *gen_opc_end;
4998 DisasContext dc1, *dc = &dc1;
4999 CPUBreakpoint *bp;
5000 int j, lj = -1;
5001 int num_insns;
5002 int max_insns;
5004 memset(dc, 0, sizeof(DisasContext));
5005 dc->tb = tb;
5006 pc_start = tb->pc;
5007 dc->pc = pc_start;
5008 last_pc = dc->pc;
5009 dc->npc = (target_ulong) tb->cs_base;
5010 dc->cc_op = CC_OP_DYNAMIC;
5011 dc->mem_idx = cpu_mmu_index(env);
5012 dc->def = env->def;
5013 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5014 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5015 dc->singlestep = (env->singlestep_enabled || singlestep);
5016 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5018 cpu_tmp0 = tcg_temp_new();
5019 cpu_tmp32 = tcg_temp_new_i32();
5020 cpu_tmp64 = tcg_temp_new_i64();
5022 cpu_dst = tcg_temp_local_new();
5024 // loads and stores
5025 cpu_val = tcg_temp_local_new();
5026 cpu_addr = tcg_temp_local_new();
5028 num_insns = 0;
5029 max_insns = tb->cflags & CF_COUNT_MASK;
5030 if (max_insns == 0)
5031 max_insns = CF_COUNT_MASK;
5032 gen_icount_start();
5033 do {
5034 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5035 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5036 if (bp->pc == dc->pc) {
5037 if (dc->pc != pc_start)
5038 save_state(dc, cpu_cond);
5039 gen_helper_debug();
5040 tcg_gen_exit_tb(0);
5041 dc->is_br = 1;
5042 goto exit_gen_loop;
5046 if (spc) {
5047 qemu_log("Search PC...\n");
5048 j = gen_opc_ptr - gen_opc_buf;
5049 if (lj < j) {
5050 lj++;
5051 while (lj < j)
5052 gen_opc_instr_start[lj++] = 0;
5053 gen_opc_pc[lj] = dc->pc;
5054 gen_opc_npc[lj] = dc->npc;
5055 gen_opc_instr_start[lj] = 1;
5056 gen_opc_icount[lj] = num_insns;
5059 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5060 gen_io_start();
5061 last_pc = dc->pc;
5062 disas_sparc_insn(dc);
5063 num_insns++;
5065 if (dc->is_br)
5066 break;
5067 /* if the next PC is different, we abort now */
5068 if (dc->pc != (last_pc + 4))
5069 break;
5070 /* if we reach a page boundary, we stop generation so that the
5071 PC of a TT_TFAULT exception is always in the right page */
5072 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5073 break;
5074 /* if single step mode, we generate only one instruction and
5075 generate an exception */
5076 if (dc->singlestep) {
5077 break;
5079 } while ((gen_opc_ptr < gen_opc_end) &&
5080 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5081 num_insns < max_insns);
5083 exit_gen_loop:
5084 tcg_temp_free(cpu_addr);
5085 tcg_temp_free(cpu_val);
5086 tcg_temp_free(cpu_dst);
5087 tcg_temp_free_i64(cpu_tmp64);
5088 tcg_temp_free_i32(cpu_tmp32);
5089 tcg_temp_free(cpu_tmp0);
5090 if (tb->cflags & CF_LAST_IO)
5091 gen_io_end();
5092 if (!dc->is_br) {
5093 if (dc->pc != DYNAMIC_PC &&
5094 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5095 /* static PC and NPC: we can use direct chaining */
5096 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5097 } else {
5098 if (dc->pc != DYNAMIC_PC)
5099 tcg_gen_movi_tl(cpu_pc, dc->pc);
5100 save_npc(dc, cpu_cond);
5101 tcg_gen_exit_tb(0);
5104 gen_icount_end(tb, num_insns);
5105 *gen_opc_ptr = INDEX_op_end;
5106 if (spc) {
5107 j = gen_opc_ptr - gen_opc_buf;
5108 lj++;
5109 while (lj <= j)
5110 gen_opc_instr_start[lj++] = 0;
5111 #if 0
5112 log_page_dump();
5113 #endif
5114 gen_opc_jump_pc[0] = dc->jump_pc[0];
5115 gen_opc_jump_pc[1] = dc->jump_pc[1];
5116 } else {
5117 tb->size = last_pc + 4 - pc_start;
5118 tb->icount = num_insns;
5120 #ifdef DEBUG_DISAS
5121 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5122 qemu_log("--------------\n");
5123 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5124 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5125 qemu_log("\n");
5127 #endif
5130 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5132 gen_intermediate_code_internal(tb, 0, env);
5135 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5137 gen_intermediate_code_internal(tb, 1, env);
5140 void gen_intermediate_code_init(CPUSPARCState *env)
5142 unsigned int i;
5143 static int inited;
5144 static const char * const gregnames[8] = {
5145 NULL, // g0 not used
5146 "g1",
5147 "g2",
5148 "g3",
5149 "g4",
5150 "g5",
5151 "g6",
5152 "g7",
5154 static const char * const fregnames[64] = {
5155 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5156 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5157 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5158 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5159 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5160 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5161 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5162 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5165 /* init various static tables */
5166 if (!inited) {
5167 inited = 1;
5169 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5170 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5171 offsetof(CPUState, regwptr),
5172 "regwptr");
5173 #ifdef TARGET_SPARC64
5174 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5175 "xcc");
5176 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5177 "asi");
5178 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5179 "fprs");
5180 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5181 "gsr");
5182 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5183 offsetof(CPUState, tick_cmpr),
5184 "tick_cmpr");
5185 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5186 offsetof(CPUState, stick_cmpr),
5187 "stick_cmpr");
5188 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5189 offsetof(CPUState, hstick_cmpr),
5190 "hstick_cmpr");
5191 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5192 "hintp");
5193 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5194 "htba");
5195 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5196 "hver");
5197 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5198 offsetof(CPUState, ssr), "ssr");
5199 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5200 offsetof(CPUState, version), "ver");
5201 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5202 offsetof(CPUState, softint),
5203 "softint");
5204 #else
5205 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5206 "wim");
5207 #endif
5208 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5209 "cond");
5210 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5211 "cc_src");
5212 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5213 offsetof(CPUState, cc_src2),
5214 "cc_src2");
5215 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5216 "cc_dst");
5217 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5218 "cc_op");
5219 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5220 "psr");
5221 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5222 "fsr");
5223 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5224 "pc");
5225 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5226 "npc");
5227 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5228 #ifndef CONFIG_USER_ONLY
5229 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5230 "tbr");
5231 #endif
5232 for (i = 1; i < 8; i++)
5233 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5234 offsetof(CPUState, gregs[i]),
5235 gregnames[i]);
5236 for (i = 0; i < TARGET_FPREGS; i++)
5237 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5238 offsetof(CPUState, fpr[i]),
5239 fregnames[i]);
5241 /* register helpers */
5243 #define GEN_HELPER 2
5244 #include "helper.h"
5248 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5250 target_ulong npc;
5251 env->pc = gen_opc_pc[pc_pos];
5252 npc = gen_opc_npc[pc_pos];
5253 if (npc == 1) {
5254 /* dynamic NPC: already stored */
5255 } else if (npc == 2) {
5256 /* jump PC: use 'cond' and the jump targets of the translation */
5257 if (env->cond) {
5258 env->npc = gen_opc_jump_pc[0];
5259 } else {
5260 env->npc = gen_opc_jump_pc[1];
5262 } else {
5263 env->npc = npc;
5266 /* flush pending conditional evaluations before exposing cpu state */
5267 if (CC_OP != CC_OP_FLAGS) {
5268 helper_compute_psr();