Merge remote-tracking branch 'amit/for-anthony' into staging
[qemu.git] / target-sparc / translate.c
blob992cd77e72cc1739416a70b23291f6b56db07347
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #define DEBUG_DISAS
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x, int len)
111 len = 32 - len;
112 return (x << len) >> len;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
126 static void gen_op_load_fpr_DT1(unsigned int src)
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
134 static void gen_op_store_DT0_fpr(unsigned int dst)
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
142 static void gen_op_load_fpr_QT0(unsigned int src)
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
154 static void gen_op_load_fpr_QT1(unsigned int src)
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
166 static void gen_op_store_QT0_fpr(unsigned int dst)
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188 #else
189 #endif
190 #endif
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
233 TranslationBlock *tb;
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 !s->singlestep) {
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num);
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244 } else {
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc, pc);
247 tcg_gen_movi_tl(cpu_npc, npc);
248 tcg_gen_exit_tb(0);
252 // XXX suboptimal
253 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
269 tcg_gen_extu_i32_tl(reg, src);
270 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271 tcg_gen_andi_tl(reg, reg, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
276 tcg_gen_extu_i32_tl(reg, src);
277 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278 tcg_gen_andi_tl(reg, reg, 0x1);
281 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
283 TCGv r_temp;
284 TCGv_i32 r_const;
285 int l1;
287 l1 = gen_new_label();
289 r_temp = tcg_temp_new();
290 tcg_gen_xor_tl(r_temp, src1, src2);
291 tcg_gen_not_tl(r_temp, r_temp);
292 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296 r_const = tcg_const_i32(TT_TOVF);
297 gen_helper_raise_exception(r_const);
298 tcg_temp_free_i32(r_const);
299 gen_set_label(l1);
300 tcg_temp_free(r_temp);
303 static inline void gen_tag_tv(TCGv src1, TCGv src2)
305 int l1;
306 TCGv_i32 r_const;
308 l1 = gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0, src1, src2);
310 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312 r_const = tcg_const_i32(TT_TOVF);
313 gen_helper_raise_exception(r_const);
314 tcg_temp_free_i32(r_const);
315 gen_set_label(l1);
318 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
320 tcg_gen_mov_tl(cpu_cc_src, src1);
321 tcg_gen_movi_tl(cpu_cc_src2, src2);
322 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323 tcg_gen_mov_tl(dst, cpu_cc_dst);
326 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
328 tcg_gen_mov_tl(cpu_cc_src, src1);
329 tcg_gen_mov_tl(cpu_cc_src2, src2);
330 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331 tcg_gen_mov_tl(dst, cpu_cc_dst);
334 static TCGv_i32 gen_add32_carry32(void)
336 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32 = tcg_temp_new_i32();
341 cc_src2_32 = tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344 #else
345 cc_src1_32 = cpu_cc_dst;
346 cc_src2_32 = cpu_cc_src;
347 #endif
349 carry_32 = tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32);
354 tcg_temp_free_i32(cc_src2_32);
355 #endif
357 return carry_32;
360 static TCGv_i32 gen_sub32_carry32(void)
362 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32 = tcg_temp_new_i32();
367 cc_src2_32 = tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370 #else
371 cc_src1_32 = cpu_cc_src;
372 cc_src2_32 = cpu_cc_src2;
373 #endif
375 carry_32 = tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32);
380 tcg_temp_free_i32(cc_src2_32);
381 #endif
383 return carry_32;
386 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387 TCGv src2, int update_cc)
389 TCGv_i32 carry_32;
390 TCGv carry;
392 switch (dc->cc_op) {
393 case CC_OP_DIV:
394 case CC_OP_LOGIC:
395 /* Carry is known to be zero. Fall back to plain ADD. */
396 if (update_cc) {
397 gen_op_add_cc(dst, src1, src2);
398 } else {
399 tcg_gen_add_tl(dst, src1, src2);
401 return;
403 case CC_OP_ADD:
404 case CC_OP_TADD:
405 case CC_OP_TADDTV:
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low = tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414 cpu_cc_src, src1, cpu_cc_src2, src2);
415 tcg_temp_free(dst_low);
416 goto add_done;
418 #endif
419 carry_32 = gen_add32_carry32();
420 break;
422 case CC_OP_SUB:
423 case CC_OP_TSUB:
424 case CC_OP_TSUBTV:
425 carry_32 = gen_sub32_carry32();
426 break;
428 default:
429 /* We need external help to produce the carry. */
430 carry_32 = tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32);
432 break;
435 #if TARGET_LONG_BITS == 64
436 carry = tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry, carry_32);
438 #else
439 carry = carry_32;
440 #endif
442 tcg_gen_add_tl(dst, src1, src2);
443 tcg_gen_add_tl(dst, dst, carry);
445 tcg_temp_free_i32(carry_32);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry);
448 #endif
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451 add_done:
452 #endif
453 if (update_cc) {
454 tcg_gen_mov_tl(cpu_cc_src, src1);
455 tcg_gen_mov_tl(cpu_cc_src2, src2);
456 tcg_gen_mov_tl(cpu_cc_dst, dst);
457 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458 dc->cc_op = CC_OP_ADDX;
462 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_mov_tl(cpu_cc_src2, src2);
466 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 tcg_gen_mov_tl(dst, cpu_cc_dst);
480 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
482 TCGv r_temp;
483 TCGv_i32 r_const;
484 int l1;
486 l1 = gen_new_label();
488 r_temp = tcg_temp_new();
489 tcg_gen_xor_tl(r_temp, src1, src2);
490 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494 r_const = tcg_const_i32(TT_TOVF);
495 gen_helper_raise_exception(r_const);
496 tcg_temp_free_i32(r_const);
497 gen_set_label(l1);
498 tcg_temp_free(r_temp);
501 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
503 tcg_gen_mov_tl(cpu_cc_src, src1);
504 tcg_gen_movi_tl(cpu_cc_src2, src2);
505 if (src2 == 0) {
506 tcg_gen_mov_tl(cpu_cc_dst, src1);
507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508 dc->cc_op = CC_OP_LOGIC;
509 } else {
510 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512 dc->cc_op = CC_OP_SUB;
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
517 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 tcg_gen_mov_tl(dst, cpu_cc_dst);
525 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526 TCGv src2, int update_cc)
528 TCGv_i32 carry_32;
529 TCGv carry;
531 switch (dc->cc_op) {
532 case CC_OP_DIV:
533 case CC_OP_LOGIC:
534 /* Carry is known to be zero. Fall back to plain SUB. */
535 if (update_cc) {
536 gen_op_sub_cc(dst, src1, src2);
537 } else {
538 tcg_gen_sub_tl(dst, src1, src2);
540 return;
542 case CC_OP_ADD:
543 case CC_OP_TADD:
544 case CC_OP_TADDTV:
545 carry_32 = gen_add32_carry32();
546 break;
548 case CC_OP_SUB:
549 case CC_OP_TSUB:
550 case CC_OP_TSUBTV:
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low = tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559 cpu_cc_src, src1, cpu_cc_src2, src2);
560 tcg_temp_free(dst_low);
561 goto sub_done;
563 #endif
564 carry_32 = gen_sub32_carry32();
565 break;
567 default:
568 /* We need external help to produce the carry. */
569 carry_32 = tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32);
571 break;
574 #if TARGET_LONG_BITS == 64
575 carry = tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry, carry_32);
577 #else
578 carry = carry_32;
579 #endif
581 tcg_gen_sub_tl(dst, src1, src2);
582 tcg_gen_sub_tl(dst, dst, carry);
584 tcg_temp_free_i32(carry_32);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry);
587 #endif
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590 sub_done:
591 #endif
592 if (update_cc) {
593 tcg_gen_mov_tl(cpu_cc_src, src1);
594 tcg_gen_mov_tl(cpu_cc_src2, src2);
595 tcg_gen_mov_tl(cpu_cc_dst, dst);
596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597 dc->cc_op = CC_OP_SUBX;
601 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
603 tcg_gen_mov_tl(cpu_cc_src, src1);
604 tcg_gen_mov_tl(cpu_cc_src2, src2);
605 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606 tcg_gen_mov_tl(dst, cpu_cc_dst);
609 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
611 tcg_gen_mov_tl(cpu_cc_src, src1);
612 tcg_gen_mov_tl(cpu_cc_src2, src2);
613 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
619 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
621 TCGv r_temp;
622 int l1;
624 l1 = gen_new_label();
625 r_temp = tcg_temp_new();
627 /* old op:
628 if (!(env->y & 1))
629 T1 = 0;
631 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635 tcg_gen_movi_tl(cpu_cc_src2, 0);
636 gen_set_label(l1);
638 // b2 = T0 & 1;
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641 tcg_gen_shli_tl(r_temp, r_temp, 31);
642 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
647 // b1 = N ^ V;
648 gen_mov_reg_N(cpu_tmp0, cpu_psr);
649 gen_mov_reg_V(r_temp, cpu_psr);
650 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651 tcg_temp_free(r_temp);
653 // T0 = (b1 << 31) | (T0 >> 1);
654 // src1 = T0;
655 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
659 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661 tcg_gen_mov_tl(dst, cpu_cc_dst);
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
666 TCGv_i32 r_src1, r_src2;
667 TCGv_i64 r_temp, r_temp2;
669 r_src1 = tcg_temp_new_i32();
670 r_src2 = tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1, src1);
673 tcg_gen_trunc_tl_i32(r_src2, src2);
675 r_temp = tcg_temp_new_i64();
676 r_temp2 = tcg_temp_new_i64();
678 if (sign_ext) {
679 tcg_gen_ext_i32_i64(r_temp, r_src2);
680 tcg_gen_ext_i32_i64(r_temp2, r_src1);
681 } else {
682 tcg_gen_extu_i32_i64(r_temp, r_src2);
683 tcg_gen_extu_i32_i64(r_temp2, r_src1);
686 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
688 tcg_gen_shri_i64(r_temp, r_temp2, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690 tcg_temp_free_i64(r_temp);
691 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst, r_temp2);
695 tcg_temp_free_i64(r_temp2);
697 tcg_temp_free_i32(r_src1);
698 tcg_temp_free_i32(r_src2);
701 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst, src1, src2, 0);
707 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst, src1, src2, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
716 TCGv_i32 r_const;
717 int l1;
719 l1 = gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721 r_const = tcg_const_i32(TT_DIV_ZERO);
722 gen_helper_raise_exception(r_const);
723 tcg_temp_free_i32(r_const);
724 gen_set_label(l1);
727 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
729 int l1, l2;
731 l1 = gen_new_label();
732 l2 = gen_new_label();
733 tcg_gen_mov_tl(cpu_cc_src, src1);
734 tcg_gen_mov_tl(cpu_cc_src2, src2);
735 gen_trap_ifdivzero_tl(cpu_cc_src2);
736 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
737 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
738 tcg_gen_movi_i64(dst, INT64_MIN);
739 tcg_gen_br(l2);
740 gen_set_label(l1);
741 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
742 gen_set_label(l2);
744 #endif
746 // 1
747 static inline void gen_op_eval_ba(TCGv dst)
749 tcg_gen_movi_tl(dst, 1);
752 // Z
753 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
755 gen_mov_reg_Z(dst, src);
758 // Z | (N ^ V)
759 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
761 gen_mov_reg_N(cpu_tmp0, src);
762 gen_mov_reg_V(dst, src);
763 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
764 gen_mov_reg_Z(cpu_tmp0, src);
765 tcg_gen_or_tl(dst, dst, cpu_tmp0);
768 // N ^ V
769 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
771 gen_mov_reg_V(cpu_tmp0, src);
772 gen_mov_reg_N(dst, src);
773 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
776 // C | Z
777 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
779 gen_mov_reg_Z(cpu_tmp0, src);
780 gen_mov_reg_C(dst, src);
781 tcg_gen_or_tl(dst, dst, cpu_tmp0);
784 // C
785 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
787 gen_mov_reg_C(dst, src);
790 // V
791 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
793 gen_mov_reg_V(dst, src);
796 // 0
797 static inline void gen_op_eval_bn(TCGv dst)
799 tcg_gen_movi_tl(dst, 0);
802 // N
803 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
805 gen_mov_reg_N(dst, src);
808 // !Z
809 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
811 gen_mov_reg_Z(dst, src);
812 tcg_gen_xori_tl(dst, dst, 0x1);
815 // !(Z | (N ^ V))
816 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
818 gen_mov_reg_N(cpu_tmp0, src);
819 gen_mov_reg_V(dst, src);
820 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
821 gen_mov_reg_Z(cpu_tmp0, src);
822 tcg_gen_or_tl(dst, dst, cpu_tmp0);
823 tcg_gen_xori_tl(dst, dst, 0x1);
826 // !(N ^ V)
827 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
829 gen_mov_reg_V(cpu_tmp0, src);
830 gen_mov_reg_N(dst, src);
831 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
832 tcg_gen_xori_tl(dst, dst, 0x1);
835 // !(C | Z)
836 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
838 gen_mov_reg_Z(cpu_tmp0, src);
839 gen_mov_reg_C(dst, src);
840 tcg_gen_or_tl(dst, dst, cpu_tmp0);
841 tcg_gen_xori_tl(dst, dst, 0x1);
844 // !C
845 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
847 gen_mov_reg_C(dst, src);
848 tcg_gen_xori_tl(dst, dst, 0x1);
851 // !N
852 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
854 gen_mov_reg_N(dst, src);
855 tcg_gen_xori_tl(dst, dst, 0x1);
858 // !V
859 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
861 gen_mov_reg_V(dst, src);
862 tcg_gen_xori_tl(dst, dst, 0x1);
866 FPSR bit field FCC1 | FCC0:
870 3 unordered
872 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
873 unsigned int fcc_offset)
875 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
876 tcg_gen_andi_tl(reg, reg, 0x1);
879 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
880 unsigned int fcc_offset)
882 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
883 tcg_gen_andi_tl(reg, reg, 0x1);
886 // !0: FCC0 | FCC1
887 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
888 unsigned int fcc_offset)
890 gen_mov_reg_FCC0(dst, src, fcc_offset);
891 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
892 tcg_gen_or_tl(dst, dst, cpu_tmp0);
895 // 1 or 2: FCC0 ^ FCC1
896 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
897 unsigned int fcc_offset)
899 gen_mov_reg_FCC0(dst, src, fcc_offset);
900 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
901 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
904 // 1 or 3: FCC0
905 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
906 unsigned int fcc_offset)
908 gen_mov_reg_FCC0(dst, src, fcc_offset);
911 // 1: FCC0 & !FCC1
912 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
913 unsigned int fcc_offset)
915 gen_mov_reg_FCC0(dst, src, fcc_offset);
916 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
917 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
918 tcg_gen_and_tl(dst, dst, cpu_tmp0);
921 // 2 or 3: FCC1
922 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
923 unsigned int fcc_offset)
925 gen_mov_reg_FCC1(dst, src, fcc_offset);
928 // 2: !FCC0 & FCC1
929 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
930 unsigned int fcc_offset)
932 gen_mov_reg_FCC0(dst, src, fcc_offset);
933 tcg_gen_xori_tl(dst, dst, 0x1);
934 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
935 tcg_gen_and_tl(dst, dst, cpu_tmp0);
938 // 3: FCC0 & FCC1
939 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
940 unsigned int fcc_offset)
942 gen_mov_reg_FCC0(dst, src, fcc_offset);
943 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
944 tcg_gen_and_tl(dst, dst, cpu_tmp0);
947 // 0: !(FCC0 | FCC1)
948 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
949 unsigned int fcc_offset)
951 gen_mov_reg_FCC0(dst, src, fcc_offset);
952 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
953 tcg_gen_or_tl(dst, dst, cpu_tmp0);
954 tcg_gen_xori_tl(dst, dst, 0x1);
957 // 0 or 3: !(FCC0 ^ FCC1)
958 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
959 unsigned int fcc_offset)
961 gen_mov_reg_FCC0(dst, src, fcc_offset);
962 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
963 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
964 tcg_gen_xori_tl(dst, dst, 0x1);
967 // 0 or 2: !FCC0
968 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 tcg_gen_xori_tl(dst, dst, 0x1);
975 // !1: !(FCC0 & !FCC1)
976 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
977 unsigned int fcc_offset)
979 gen_mov_reg_FCC0(dst, src, fcc_offset);
980 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
981 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
982 tcg_gen_and_tl(dst, dst, cpu_tmp0);
983 tcg_gen_xori_tl(dst, dst, 0x1);
986 // 0 or 1: !FCC1
987 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
988 unsigned int fcc_offset)
990 gen_mov_reg_FCC1(dst, src, fcc_offset);
991 tcg_gen_xori_tl(dst, dst, 0x1);
994 // !2: !(!FCC0 & FCC1)
995 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 tcg_gen_xori_tl(dst, dst, 0x1);
1000 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1002 tcg_gen_xori_tl(dst, dst, 0x1);
1005 // !3: !(FCC0 & FCC1)
1006 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1007 unsigned int fcc_offset)
1009 gen_mov_reg_FCC0(dst, src, fcc_offset);
1010 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1011 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1012 tcg_gen_xori_tl(dst, dst, 0x1);
1015 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1016 target_ulong pc2, TCGv r_cond)
1018 int l1;
1020 l1 = gen_new_label();
1022 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1024 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1026 gen_set_label(l1);
1027 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1030 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1031 target_ulong pc2, TCGv r_cond)
1033 int l1;
1035 l1 = gen_new_label();
1037 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1039 gen_goto_tb(dc, 0, pc2, pc1);
1041 gen_set_label(l1);
1042 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1045 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1046 TCGv r_cond)
1048 int l1, l2;
1050 l1 = gen_new_label();
1051 l2 = gen_new_label();
1053 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1055 tcg_gen_movi_tl(cpu_npc, npc1);
1056 tcg_gen_br(l2);
1058 gen_set_label(l1);
1059 tcg_gen_movi_tl(cpu_npc, npc2);
1060 gen_set_label(l2);
1063 /* call this function before using the condition register as it may
1064 have been set for a jump */
1065 static inline void flush_cond(DisasContext *dc, TCGv cond)
1067 if (dc->npc == JUMP_PC) {
1068 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1069 dc->npc = DYNAMIC_PC;
1073 static inline void save_npc(DisasContext *dc, TCGv cond)
1075 if (dc->npc == JUMP_PC) {
1076 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1077 dc->npc = DYNAMIC_PC;
1078 } else if (dc->npc != DYNAMIC_PC) {
1079 tcg_gen_movi_tl(cpu_npc, dc->npc);
1083 static inline void save_state(DisasContext *dc, TCGv cond)
1085 tcg_gen_movi_tl(cpu_pc, dc->pc);
1086 /* flush pending conditional evaluations before exposing cpu state */
1087 if (dc->cc_op != CC_OP_FLAGS) {
1088 dc->cc_op = CC_OP_FLAGS;
1089 gen_helper_compute_psr();
1091 save_npc(dc, cond);
1094 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1096 if (dc->npc == JUMP_PC) {
1097 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1098 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1099 dc->pc = DYNAMIC_PC;
1100 } else if (dc->npc == DYNAMIC_PC) {
1101 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1102 dc->pc = DYNAMIC_PC;
1103 } else {
1104 dc->pc = dc->npc;
1108 static inline void gen_op_next_insn(void)
1110 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1111 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1114 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1115 DisasContext *dc)
1117 TCGv_i32 r_src;
1119 #ifdef TARGET_SPARC64
1120 if (cc)
1121 r_src = cpu_xcc;
1122 else
1123 r_src = cpu_psr;
1124 #else
1125 r_src = cpu_psr;
1126 #endif
1127 switch (dc->cc_op) {
1128 case CC_OP_FLAGS:
1129 break;
1130 default:
1131 gen_helper_compute_psr();
1132 dc->cc_op = CC_OP_FLAGS;
1133 break;
1135 switch (cond) {
1136 case 0x0:
1137 gen_op_eval_bn(r_dst);
1138 break;
1139 case 0x1:
1140 gen_op_eval_be(r_dst, r_src);
1141 break;
1142 case 0x2:
1143 gen_op_eval_ble(r_dst, r_src);
1144 break;
1145 case 0x3:
1146 gen_op_eval_bl(r_dst, r_src);
1147 break;
1148 case 0x4:
1149 gen_op_eval_bleu(r_dst, r_src);
1150 break;
1151 case 0x5:
1152 gen_op_eval_bcs(r_dst, r_src);
1153 break;
1154 case 0x6:
1155 gen_op_eval_bneg(r_dst, r_src);
1156 break;
1157 case 0x7:
1158 gen_op_eval_bvs(r_dst, r_src);
1159 break;
1160 case 0x8:
1161 gen_op_eval_ba(r_dst);
1162 break;
1163 case 0x9:
1164 gen_op_eval_bne(r_dst, r_src);
1165 break;
1166 case 0xa:
1167 gen_op_eval_bg(r_dst, r_src);
1168 break;
1169 case 0xb:
1170 gen_op_eval_bge(r_dst, r_src);
1171 break;
1172 case 0xc:
1173 gen_op_eval_bgu(r_dst, r_src);
1174 break;
1175 case 0xd:
1176 gen_op_eval_bcc(r_dst, r_src);
1177 break;
1178 case 0xe:
1179 gen_op_eval_bpos(r_dst, r_src);
1180 break;
1181 case 0xf:
1182 gen_op_eval_bvc(r_dst, r_src);
1183 break;
1187 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1189 unsigned int offset;
1191 switch (cc) {
1192 default:
1193 case 0x0:
1194 offset = 0;
1195 break;
1196 case 0x1:
1197 offset = 32 - 10;
1198 break;
1199 case 0x2:
1200 offset = 34 - 10;
1201 break;
1202 case 0x3:
1203 offset = 36 - 10;
1204 break;
1207 switch (cond) {
1208 case 0x0:
1209 gen_op_eval_bn(r_dst);
1210 break;
1211 case 0x1:
1212 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1213 break;
1214 case 0x2:
1215 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1216 break;
1217 case 0x3:
1218 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1219 break;
1220 case 0x4:
1221 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1222 break;
1223 case 0x5:
1224 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1225 break;
1226 case 0x6:
1227 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1228 break;
1229 case 0x7:
1230 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1231 break;
1232 case 0x8:
1233 gen_op_eval_ba(r_dst);
1234 break;
1235 case 0x9:
1236 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1237 break;
1238 case 0xa:
1239 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1240 break;
1241 case 0xb:
1242 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1243 break;
1244 case 0xc:
1245 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1246 break;
1247 case 0xd:
1248 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1249 break;
1250 case 0xe:
1251 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1252 break;
1253 case 0xf:
1254 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1255 break;
1259 #ifdef TARGET_SPARC64
1260 // Inverted logic
1261 static const int gen_tcg_cond_reg[8] = {
1263 TCG_COND_NE,
1264 TCG_COND_GT,
1265 TCG_COND_GE,
1267 TCG_COND_EQ,
1268 TCG_COND_LE,
1269 TCG_COND_LT,
1272 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1274 int l1;
1276 l1 = gen_new_label();
1277 tcg_gen_movi_tl(r_dst, 0);
1278 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1279 tcg_gen_movi_tl(r_dst, 1);
1280 gen_set_label(l1);
1282 #endif
1284 /* XXX: potentially incorrect if dynamic npc */
1285 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1286 TCGv r_cond)
1288 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1289 target_ulong target = dc->pc + offset;
1291 if (cond == 0x0) {
1292 /* unconditional not taken */
1293 if (a) {
1294 dc->pc = dc->npc + 4;
1295 dc->npc = dc->pc + 4;
1296 } else {
1297 dc->pc = dc->npc;
1298 dc->npc = dc->pc + 4;
1300 } else if (cond == 0x8) {
1301 /* unconditional taken */
1302 if (a) {
1303 dc->pc = target;
1304 dc->npc = dc->pc + 4;
1305 } else {
1306 dc->pc = dc->npc;
1307 dc->npc = target;
1308 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1310 } else {
1311 flush_cond(dc, r_cond);
1312 gen_cond(r_cond, cc, cond, dc);
1313 if (a) {
1314 gen_branch_a(dc, target, dc->npc, r_cond);
1315 dc->is_br = 1;
1316 } else {
1317 dc->pc = dc->npc;
1318 dc->jump_pc[0] = target;
1319 dc->jump_pc[1] = dc->npc + 4;
1320 dc->npc = JUMP_PC;
1325 /* XXX: potentially incorrect if dynamic npc */
1326 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1327 TCGv r_cond)
1329 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1330 target_ulong target = dc->pc + offset;
1332 if (cond == 0x0) {
1333 /* unconditional not taken */
1334 if (a) {
1335 dc->pc = dc->npc + 4;
1336 dc->npc = dc->pc + 4;
1337 } else {
1338 dc->pc = dc->npc;
1339 dc->npc = dc->pc + 4;
1341 } else if (cond == 0x8) {
1342 /* unconditional taken */
1343 if (a) {
1344 dc->pc = target;
1345 dc->npc = dc->pc + 4;
1346 } else {
1347 dc->pc = dc->npc;
1348 dc->npc = target;
1349 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1351 } else {
1352 flush_cond(dc, r_cond);
1353 gen_fcond(r_cond, cc, cond);
1354 if (a) {
1355 gen_branch_a(dc, target, dc->npc, r_cond);
1356 dc->is_br = 1;
1357 } else {
1358 dc->pc = dc->npc;
1359 dc->jump_pc[0] = target;
1360 dc->jump_pc[1] = dc->npc + 4;
1361 dc->npc = JUMP_PC;
1366 #ifdef TARGET_SPARC64
1367 /* XXX: potentially incorrect if dynamic npc */
1368 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1369 TCGv r_cond, TCGv r_reg)
1371 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1372 target_ulong target = dc->pc + offset;
1374 flush_cond(dc, r_cond);
1375 gen_cond_reg(r_cond, cond, r_reg);
1376 if (a) {
1377 gen_branch_a(dc, target, dc->npc, r_cond);
1378 dc->is_br = 1;
1379 } else {
1380 dc->pc = dc->npc;
1381 dc->jump_pc[0] = target;
1382 dc->jump_pc[1] = dc->npc + 4;
1383 dc->npc = JUMP_PC;
1387 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1389 switch (fccno) {
1390 case 0:
1391 gen_helper_fcmps(r_rs1, r_rs2);
1392 break;
1393 case 1:
1394 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1395 break;
1396 case 2:
1397 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1398 break;
1399 case 3:
1400 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1401 break;
1405 static inline void gen_op_fcmpd(int fccno)
1407 switch (fccno) {
1408 case 0:
1409 gen_helper_fcmpd();
1410 break;
1411 case 1:
1412 gen_helper_fcmpd_fcc1();
1413 break;
1414 case 2:
1415 gen_helper_fcmpd_fcc2();
1416 break;
1417 case 3:
1418 gen_helper_fcmpd_fcc3();
1419 break;
1423 static inline void gen_op_fcmpq(int fccno)
1425 switch (fccno) {
1426 case 0:
1427 gen_helper_fcmpq();
1428 break;
1429 case 1:
1430 gen_helper_fcmpq_fcc1();
1431 break;
1432 case 2:
1433 gen_helper_fcmpq_fcc2();
1434 break;
1435 case 3:
1436 gen_helper_fcmpq_fcc3();
1437 break;
1441 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1443 switch (fccno) {
1444 case 0:
1445 gen_helper_fcmpes(r_rs1, r_rs2);
1446 break;
1447 case 1:
1448 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1449 break;
1450 case 2:
1451 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1452 break;
1453 case 3:
1454 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1455 break;
1459 static inline void gen_op_fcmped(int fccno)
1461 switch (fccno) {
1462 case 0:
1463 gen_helper_fcmped();
1464 break;
1465 case 1:
1466 gen_helper_fcmped_fcc1();
1467 break;
1468 case 2:
1469 gen_helper_fcmped_fcc2();
1470 break;
1471 case 3:
1472 gen_helper_fcmped_fcc3();
1473 break;
1477 static inline void gen_op_fcmpeq(int fccno)
1479 switch (fccno) {
1480 case 0:
1481 gen_helper_fcmpeq();
1482 break;
1483 case 1:
1484 gen_helper_fcmpeq_fcc1();
1485 break;
1486 case 2:
1487 gen_helper_fcmpeq_fcc2();
1488 break;
1489 case 3:
1490 gen_helper_fcmpeq_fcc3();
1491 break;
1495 #else
1497 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1499 gen_helper_fcmps(r_rs1, r_rs2);
1502 static inline void gen_op_fcmpd(int fccno)
1504 gen_helper_fcmpd();
1507 static inline void gen_op_fcmpq(int fccno)
1509 gen_helper_fcmpq();
1512 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1514 gen_helper_fcmpes(r_rs1, r_rs2);
1517 static inline void gen_op_fcmped(int fccno)
1519 gen_helper_fcmped();
1522 static inline void gen_op_fcmpeq(int fccno)
1524 gen_helper_fcmpeq();
1526 #endif
1528 static inline void gen_op_fpexception_im(int fsr_flags)
1530 TCGv_i32 r_const;
1532 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1533 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1534 r_const = tcg_const_i32(TT_FP_EXCP);
1535 gen_helper_raise_exception(r_const);
1536 tcg_temp_free_i32(r_const);
1539 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1541 #if !defined(CONFIG_USER_ONLY)
1542 if (!dc->fpu_enabled) {
1543 TCGv_i32 r_const;
1545 save_state(dc, r_cond);
1546 r_const = tcg_const_i32(TT_NFPU_INSN);
1547 gen_helper_raise_exception(r_const);
1548 tcg_temp_free_i32(r_const);
1549 dc->is_br = 1;
1550 return 1;
1552 #endif
1553 return 0;
1556 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1558 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1561 static inline void gen_clear_float_exceptions(void)
1563 gen_helper_clear_float_exceptions();
1566 /* asi moves */
1567 #ifdef TARGET_SPARC64
1568 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1570 int asi;
1571 TCGv_i32 r_asi;
1573 if (IS_IMM) {
1574 r_asi = tcg_temp_new_i32();
1575 tcg_gen_mov_i32(r_asi, cpu_asi);
1576 } else {
1577 asi = GET_FIELD(insn, 19, 26);
1578 r_asi = tcg_const_i32(asi);
1580 return r_asi;
1583 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1584 int sign)
1586 TCGv_i32 r_asi, r_size, r_sign;
1588 r_asi = gen_get_asi(insn, addr);
1589 r_size = tcg_const_i32(size);
1590 r_sign = tcg_const_i32(sign);
1591 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1592 tcg_temp_free_i32(r_sign);
1593 tcg_temp_free_i32(r_size);
1594 tcg_temp_free_i32(r_asi);
1597 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1599 TCGv_i32 r_asi, r_size;
1601 r_asi = gen_get_asi(insn, addr);
1602 r_size = tcg_const_i32(size);
1603 gen_helper_st_asi(addr, src, r_asi, r_size);
1604 tcg_temp_free_i32(r_size);
1605 tcg_temp_free_i32(r_asi);
1608 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1610 TCGv_i32 r_asi, r_size, r_rd;
1612 r_asi = gen_get_asi(insn, addr);
1613 r_size = tcg_const_i32(size);
1614 r_rd = tcg_const_i32(rd);
1615 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1616 tcg_temp_free_i32(r_rd);
1617 tcg_temp_free_i32(r_size);
1618 tcg_temp_free_i32(r_asi);
1621 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1623 TCGv_i32 r_asi, r_size, r_rd;
1625 r_asi = gen_get_asi(insn, addr);
1626 r_size = tcg_const_i32(size);
1627 r_rd = tcg_const_i32(rd);
1628 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1629 tcg_temp_free_i32(r_rd);
1630 tcg_temp_free_i32(r_size);
1631 tcg_temp_free_i32(r_asi);
1634 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1636 TCGv_i32 r_asi, r_size, r_sign;
1638 r_asi = gen_get_asi(insn, addr);
1639 r_size = tcg_const_i32(4);
1640 r_sign = tcg_const_i32(0);
1641 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1642 tcg_temp_free_i32(r_sign);
1643 gen_helper_st_asi(addr, dst, r_asi, r_size);
1644 tcg_temp_free_i32(r_size);
1645 tcg_temp_free_i32(r_asi);
1646 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1649 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1651 TCGv_i32 r_asi, r_rd;
1653 r_asi = gen_get_asi(insn, addr);
1654 r_rd = tcg_const_i32(rd);
1655 gen_helper_ldda_asi(addr, r_asi, r_rd);
1656 tcg_temp_free_i32(r_rd);
1657 tcg_temp_free_i32(r_asi);
1660 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1662 TCGv_i32 r_asi, r_size;
1664 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1665 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1666 r_asi = gen_get_asi(insn, addr);
1667 r_size = tcg_const_i32(8);
1668 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1669 tcg_temp_free_i32(r_size);
1670 tcg_temp_free_i32(r_asi);
1673 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1674 int rd)
1676 TCGv r_val1;
1677 TCGv_i32 r_asi;
1679 r_val1 = tcg_temp_new();
1680 gen_movl_reg_TN(rd, r_val1);
1681 r_asi = gen_get_asi(insn, addr);
1682 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1683 tcg_temp_free_i32(r_asi);
1684 tcg_temp_free(r_val1);
1687 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1688 int rd)
1690 TCGv_i32 r_asi;
1692 gen_movl_reg_TN(rd, cpu_tmp64);
1693 r_asi = gen_get_asi(insn, addr);
1694 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1695 tcg_temp_free_i32(r_asi);
1698 #elif !defined(CONFIG_USER_ONLY)
1700 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1701 int sign)
1703 TCGv_i32 r_asi, r_size, r_sign;
1705 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1706 r_size = tcg_const_i32(size);
1707 r_sign = tcg_const_i32(sign);
1708 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1709 tcg_temp_free(r_sign);
1710 tcg_temp_free(r_size);
1711 tcg_temp_free(r_asi);
1712 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1715 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1717 TCGv_i32 r_asi, r_size;
1719 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1720 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1721 r_size = tcg_const_i32(size);
1722 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1723 tcg_temp_free(r_size);
1724 tcg_temp_free(r_asi);
1727 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1729 TCGv_i32 r_asi, r_size, r_sign;
1730 TCGv_i64 r_val;
1732 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1733 r_size = tcg_const_i32(4);
1734 r_sign = tcg_const_i32(0);
1735 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1736 tcg_temp_free(r_sign);
1737 r_val = tcg_temp_new_i64();
1738 tcg_gen_extu_tl_i64(r_val, dst);
1739 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1740 tcg_temp_free_i64(r_val);
1741 tcg_temp_free(r_size);
1742 tcg_temp_free(r_asi);
1743 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1746 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1748 TCGv_i32 r_asi, r_size, r_sign;
1750 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1751 r_size = tcg_const_i32(8);
1752 r_sign = tcg_const_i32(0);
1753 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1754 tcg_temp_free(r_sign);
1755 tcg_temp_free(r_size);
1756 tcg_temp_free(r_asi);
1757 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1758 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1759 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1760 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1761 gen_movl_TN_reg(rd, hi);
1764 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1766 TCGv_i32 r_asi, r_size;
1768 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1769 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1770 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1771 r_size = tcg_const_i32(8);
1772 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1773 tcg_temp_free(r_size);
1774 tcg_temp_free(r_asi);
1776 #endif
1778 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1779 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1781 TCGv_i64 r_val;
1782 TCGv_i32 r_asi, r_size;
1784 gen_ld_asi(dst, addr, insn, 1, 0);
1786 r_val = tcg_const_i64(0xffULL);
1787 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1788 r_size = tcg_const_i32(1);
1789 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1790 tcg_temp_free_i32(r_size);
1791 tcg_temp_free_i32(r_asi);
1792 tcg_temp_free_i64(r_val);
1794 #endif
1796 static inline TCGv get_src1(unsigned int insn, TCGv def)
1798 TCGv r_rs1 = def;
1799 unsigned int rs1;
1801 rs1 = GET_FIELD(insn, 13, 17);
1802 if (rs1 == 0) {
1803 tcg_gen_movi_tl(def, 0);
1804 } else if (rs1 < 8) {
1805 r_rs1 = cpu_gregs[rs1];
1806 } else {
1807 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1809 return r_rs1;
1812 static inline TCGv get_src2(unsigned int insn, TCGv def)
1814 TCGv r_rs2 = def;
1816 if (IS_IMM) { /* immediate */
1817 target_long simm = GET_FIELDs(insn, 19, 31);
1818 tcg_gen_movi_tl(def, simm);
1819 } else { /* register */
1820 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1821 if (rs2 == 0) {
1822 tcg_gen_movi_tl(def, 0);
1823 } else if (rs2 < 8) {
1824 r_rs2 = cpu_gregs[rs2];
1825 } else {
1826 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1829 return r_rs2;
1832 #ifdef TARGET_SPARC64
1833 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1835 TCGv_i32 r_tl = tcg_temp_new_i32();
1837 /* load env->tl into r_tl */
1838 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1840 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1841 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1843 /* calculate offset to current trap state from env->ts, reuse r_tl */
1844 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1845 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1847 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1849 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1850 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1851 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1852 tcg_temp_free_ptr(r_tl_tmp);
1855 tcg_temp_free_i32(r_tl);
1857 #endif
1859 #define CHECK_IU_FEATURE(dc, FEATURE) \
1860 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1861 goto illegal_insn;
1862 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1863 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1864 goto nfpu_insn;
1866 /* before an instruction, dc->pc must be static */
1867 static void disas_sparc_insn(DisasContext * dc)
1869 unsigned int insn, opc, rs1, rs2, rd;
1870 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1871 target_long simm;
1873 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1874 tcg_gen_debug_insn_start(dc->pc);
1875 insn = ldl_code(dc->pc);
1876 opc = GET_FIELD(insn, 0, 1);
1878 rd = GET_FIELD(insn, 2, 6);
1880 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1881 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1883 switch (opc) {
1884 case 0: /* branches/sethi */
1886 unsigned int xop = GET_FIELD(insn, 7, 9);
1887 int32_t target;
1888 switch (xop) {
1889 #ifdef TARGET_SPARC64
1890 case 0x1: /* V9 BPcc */
1892 int cc;
1894 target = GET_FIELD_SP(insn, 0, 18);
1895 target = sign_extend(target, 19);
1896 target <<= 2;
1897 cc = GET_FIELD_SP(insn, 20, 21);
1898 if (cc == 0)
1899 do_branch(dc, target, insn, 0, cpu_cond);
1900 else if (cc == 2)
1901 do_branch(dc, target, insn, 1, cpu_cond);
1902 else
1903 goto illegal_insn;
1904 goto jmp_insn;
1906 case 0x3: /* V9 BPr */
1908 target = GET_FIELD_SP(insn, 0, 13) |
1909 (GET_FIELD_SP(insn, 20, 21) << 14);
1910 target = sign_extend(target, 16);
1911 target <<= 2;
1912 cpu_src1 = get_src1(insn, cpu_src1);
1913 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1914 goto jmp_insn;
1916 case 0x5: /* V9 FBPcc */
1918 int cc = GET_FIELD_SP(insn, 20, 21);
1919 if (gen_trap_ifnofpu(dc, cpu_cond))
1920 goto jmp_insn;
1921 target = GET_FIELD_SP(insn, 0, 18);
1922 target = sign_extend(target, 19);
1923 target <<= 2;
1924 do_fbranch(dc, target, insn, cc, cpu_cond);
1925 goto jmp_insn;
1927 #else
1928 case 0x7: /* CBN+x */
1930 goto ncp_insn;
1932 #endif
1933 case 0x2: /* BN+x */
1935 target = GET_FIELD(insn, 10, 31);
1936 target = sign_extend(target, 22);
1937 target <<= 2;
1938 do_branch(dc, target, insn, 0, cpu_cond);
1939 goto jmp_insn;
1941 case 0x6: /* FBN+x */
1943 if (gen_trap_ifnofpu(dc, cpu_cond))
1944 goto jmp_insn;
1945 target = GET_FIELD(insn, 10, 31);
1946 target = sign_extend(target, 22);
1947 target <<= 2;
1948 do_fbranch(dc, target, insn, 0, cpu_cond);
1949 goto jmp_insn;
1951 case 0x4: /* SETHI */
1952 if (rd) { // nop
1953 uint32_t value = GET_FIELD(insn, 10, 31);
1954 TCGv r_const;
1956 r_const = tcg_const_tl(value << 10);
1957 gen_movl_TN_reg(rd, r_const);
1958 tcg_temp_free(r_const);
1960 break;
1961 case 0x0: /* UNIMPL */
1962 default:
1963 goto illegal_insn;
1965 break;
1967 break;
1968 case 1: /*CALL*/
1970 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1971 TCGv r_const;
1973 r_const = tcg_const_tl(dc->pc);
1974 gen_movl_TN_reg(15, r_const);
1975 tcg_temp_free(r_const);
1976 target += dc->pc;
1977 gen_mov_pc_npc(dc, cpu_cond);
1978 dc->npc = target;
1980 goto jmp_insn;
1981 case 2: /* FPU & Logical Operations */
1983 unsigned int xop = GET_FIELD(insn, 7, 12);
1984 if (xop == 0x3a) { /* generate trap */
1985 int cond;
1987 cpu_src1 = get_src1(insn, cpu_src1);
1988 if (IS_IMM) {
1989 rs2 = GET_FIELD(insn, 25, 31);
1990 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1991 } else {
1992 rs2 = GET_FIELD(insn, 27, 31);
1993 if (rs2 != 0) {
1994 gen_movl_reg_TN(rs2, cpu_src2);
1995 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1996 } else
1997 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2000 cond = GET_FIELD(insn, 3, 6);
2001 if (cond == 0x8) { /* Trap Always */
2002 save_state(dc, cpu_cond);
2003 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2004 supervisor(dc))
2005 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2006 else
2007 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2008 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2009 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2011 if (rs2 == 0 &&
2012 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2014 gen_helper_shutdown();
2016 } else {
2017 gen_helper_raise_exception(cpu_tmp32);
2019 } else if (cond != 0) {
2020 TCGv r_cond = tcg_temp_new();
2021 int l1;
2022 #ifdef TARGET_SPARC64
2023 /* V9 icc/xcc */
2024 int cc = GET_FIELD_SP(insn, 11, 12);
2026 save_state(dc, cpu_cond);
2027 if (cc == 0)
2028 gen_cond(r_cond, 0, cond, dc);
2029 else if (cc == 2)
2030 gen_cond(r_cond, 1, cond, dc);
2031 else
2032 goto illegal_insn;
2033 #else
2034 save_state(dc, cpu_cond);
2035 gen_cond(r_cond, 0, cond, dc);
2036 #endif
2037 l1 = gen_new_label();
2038 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2040 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2041 supervisor(dc))
2042 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2043 else
2044 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2045 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2046 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2047 gen_helper_raise_exception(cpu_tmp32);
2049 gen_set_label(l1);
2050 tcg_temp_free(r_cond);
2052 gen_op_next_insn();
2053 tcg_gen_exit_tb(0);
2054 dc->is_br = 1;
2055 goto jmp_insn;
2056 } else if (xop == 0x28) {
2057 rs1 = GET_FIELD(insn, 13, 17);
2058 switch(rs1) {
2059 case 0: /* rdy */
2060 #ifndef TARGET_SPARC64
2061 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2062 manual, rdy on the microSPARC
2063 II */
2064 case 0x0f: /* stbar in the SPARCv8 manual,
2065 rdy on the microSPARC II */
2066 case 0x10 ... 0x1f: /* implementation-dependent in the
2067 SPARCv8 manual, rdy on the
2068 microSPARC II */
2069 /* Read Asr17 */
2070 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2071 TCGv r_const;
2073 /* Read Asr17 for a Leon3 monoprocessor */
2074 r_const = tcg_const_tl((1 << 8)
2075 | (dc->def->nwindows - 1));
2076 gen_movl_TN_reg(rd, r_const);
2077 tcg_temp_free(r_const);
2078 break;
2080 #endif
2081 gen_movl_TN_reg(rd, cpu_y);
2082 break;
2083 #ifdef TARGET_SPARC64
2084 case 0x2: /* V9 rdccr */
2085 gen_helper_compute_psr();
2086 gen_helper_rdccr(cpu_dst);
2087 gen_movl_TN_reg(rd, cpu_dst);
2088 break;
2089 case 0x3: /* V9 rdasi */
2090 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2091 gen_movl_TN_reg(rd, cpu_dst);
2092 break;
2093 case 0x4: /* V9 rdtick */
2095 TCGv_ptr r_tickptr;
2097 r_tickptr = tcg_temp_new_ptr();
2098 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2099 offsetof(CPUState, tick));
2100 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2101 tcg_temp_free_ptr(r_tickptr);
2102 gen_movl_TN_reg(rd, cpu_dst);
2104 break;
2105 case 0x5: /* V9 rdpc */
2107 TCGv r_const;
2109 r_const = tcg_const_tl(dc->pc);
2110 gen_movl_TN_reg(rd, r_const);
2111 tcg_temp_free(r_const);
2113 break;
2114 case 0x6: /* V9 rdfprs */
2115 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2116 gen_movl_TN_reg(rd, cpu_dst);
2117 break;
2118 case 0xf: /* V9 membar */
2119 break; /* no effect */
2120 case 0x13: /* Graphics Status */
2121 if (gen_trap_ifnofpu(dc, cpu_cond))
2122 goto jmp_insn;
2123 gen_movl_TN_reg(rd, cpu_gsr);
2124 break;
2125 case 0x16: /* Softint */
2126 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2127 gen_movl_TN_reg(rd, cpu_dst);
2128 break;
2129 case 0x17: /* Tick compare */
2130 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2131 break;
2132 case 0x18: /* System tick */
2134 TCGv_ptr r_tickptr;
2136 r_tickptr = tcg_temp_new_ptr();
2137 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2138 offsetof(CPUState, stick));
2139 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2140 tcg_temp_free_ptr(r_tickptr);
2141 gen_movl_TN_reg(rd, cpu_dst);
2143 break;
2144 case 0x19: /* System tick compare */
2145 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2146 break;
2147 case 0x10: /* Performance Control */
2148 case 0x11: /* Performance Instrumentation Counter */
2149 case 0x12: /* Dispatch Control */
2150 case 0x14: /* Softint set, WO */
2151 case 0x15: /* Softint clear, WO */
2152 #endif
2153 default:
2154 goto illegal_insn;
2156 #if !defined(CONFIG_USER_ONLY)
2157 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2158 #ifndef TARGET_SPARC64
2159 if (!supervisor(dc))
2160 goto priv_insn;
2161 gen_helper_compute_psr();
2162 dc->cc_op = CC_OP_FLAGS;
2163 gen_helper_rdpsr(cpu_dst);
2164 #else
2165 CHECK_IU_FEATURE(dc, HYPV);
2166 if (!hypervisor(dc))
2167 goto priv_insn;
2168 rs1 = GET_FIELD(insn, 13, 17);
2169 switch (rs1) {
2170 case 0: // hpstate
2171 // gen_op_rdhpstate();
2172 break;
2173 case 1: // htstate
2174 // gen_op_rdhtstate();
2175 break;
2176 case 3: // hintp
2177 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2178 break;
2179 case 5: // htba
2180 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2181 break;
2182 case 6: // hver
2183 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2184 break;
2185 case 31: // hstick_cmpr
2186 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2187 break;
2188 default:
2189 goto illegal_insn;
2191 #endif
2192 gen_movl_TN_reg(rd, cpu_dst);
2193 break;
2194 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2195 if (!supervisor(dc))
2196 goto priv_insn;
2197 #ifdef TARGET_SPARC64
2198 rs1 = GET_FIELD(insn, 13, 17);
2199 switch (rs1) {
2200 case 0: // tpc
2202 TCGv_ptr r_tsptr;
2204 r_tsptr = tcg_temp_new_ptr();
2205 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2206 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2207 offsetof(trap_state, tpc));
2208 tcg_temp_free_ptr(r_tsptr);
2210 break;
2211 case 1: // tnpc
2213 TCGv_ptr r_tsptr;
2215 r_tsptr = tcg_temp_new_ptr();
2216 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2217 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2218 offsetof(trap_state, tnpc));
2219 tcg_temp_free_ptr(r_tsptr);
2221 break;
2222 case 2: // tstate
2224 TCGv_ptr r_tsptr;
2226 r_tsptr = tcg_temp_new_ptr();
2227 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2228 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2229 offsetof(trap_state, tstate));
2230 tcg_temp_free_ptr(r_tsptr);
2232 break;
2233 case 3: // tt
2235 TCGv_ptr r_tsptr;
2237 r_tsptr = tcg_temp_new_ptr();
2238 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2239 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2240 offsetof(trap_state, tt));
2241 tcg_temp_free_ptr(r_tsptr);
2242 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2244 break;
2245 case 4: // tick
2247 TCGv_ptr r_tickptr;
2249 r_tickptr = tcg_temp_new_ptr();
2250 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2251 offsetof(CPUState, tick));
2252 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2253 gen_movl_TN_reg(rd, cpu_tmp0);
2254 tcg_temp_free_ptr(r_tickptr);
2256 break;
2257 case 5: // tba
2258 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2259 break;
2260 case 6: // pstate
2261 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2262 offsetof(CPUSPARCState, pstate));
2263 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2264 break;
2265 case 7: // tl
2266 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2267 offsetof(CPUSPARCState, tl));
2268 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2269 break;
2270 case 8: // pil
2271 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2272 offsetof(CPUSPARCState, psrpil));
2273 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2274 break;
2275 case 9: // cwp
2276 gen_helper_rdcwp(cpu_tmp0);
2277 break;
2278 case 10: // cansave
2279 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2280 offsetof(CPUSPARCState, cansave));
2281 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2282 break;
2283 case 11: // canrestore
2284 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285 offsetof(CPUSPARCState, canrestore));
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287 break;
2288 case 12: // cleanwin
2289 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2290 offsetof(CPUSPARCState, cleanwin));
2291 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 break;
2293 case 13: // otherwin
2294 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2295 offsetof(CPUSPARCState, otherwin));
2296 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2297 break;
2298 case 14: // wstate
2299 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2300 offsetof(CPUSPARCState, wstate));
2301 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2302 break;
2303 case 16: // UA2005 gl
2304 CHECK_IU_FEATURE(dc, GL);
2305 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2306 offsetof(CPUSPARCState, gl));
2307 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2308 break;
2309 case 26: // UA2005 strand status
2310 CHECK_IU_FEATURE(dc, HYPV);
2311 if (!hypervisor(dc))
2312 goto priv_insn;
2313 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2314 break;
2315 case 31: // ver
2316 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2317 break;
2318 case 15: // fq
2319 default:
2320 goto illegal_insn;
2322 #else
2323 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2324 #endif
2325 gen_movl_TN_reg(rd, cpu_tmp0);
2326 break;
2327 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2328 #ifdef TARGET_SPARC64
2329 save_state(dc, cpu_cond);
2330 gen_helper_flushw();
2331 #else
2332 if (!supervisor(dc))
2333 goto priv_insn;
2334 gen_movl_TN_reg(rd, cpu_tbr);
2335 #endif
2336 break;
2337 #endif
2338 } else if (xop == 0x34) { /* FPU Operations */
2339 if (gen_trap_ifnofpu(dc, cpu_cond))
2340 goto jmp_insn;
2341 gen_op_clear_ieee_excp_and_FTT();
2342 rs1 = GET_FIELD(insn, 13, 17);
2343 rs2 = GET_FIELD(insn, 27, 31);
2344 xop = GET_FIELD(insn, 18, 26);
2345 save_state(dc, cpu_cond);
2346 switch (xop) {
2347 case 0x1: /* fmovs */
2348 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2349 break;
2350 case 0x5: /* fnegs */
2351 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2352 break;
2353 case 0x9: /* fabss */
2354 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2355 break;
2356 case 0x29: /* fsqrts */
2357 CHECK_FPU_FEATURE(dc, FSQRT);
2358 gen_clear_float_exceptions();
2359 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2360 gen_helper_check_ieee_exceptions();
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc, FSQRT);
2365 gen_op_load_fpr_DT1(DFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 gen_helper_fsqrtd();
2368 gen_helper_check_ieee_exceptions();
2369 gen_op_store_DT0_fpr(DFPREG(rd));
2370 break;
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc, FLOAT128);
2373 gen_op_load_fpr_QT1(QFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 gen_helper_fsqrtq();
2376 gen_helper_check_ieee_exceptions();
2377 gen_op_store_QT0_fpr(QFPREG(rd));
2378 break;
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2382 gen_helper_check_ieee_exceptions();
2383 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2384 break;
2385 case 0x42: /* faddd */
2386 gen_op_load_fpr_DT0(DFPREG(rs1));
2387 gen_op_load_fpr_DT1(DFPREG(rs2));
2388 gen_clear_float_exceptions();
2389 gen_helper_faddd();
2390 gen_helper_check_ieee_exceptions();
2391 gen_op_store_DT0_fpr(DFPREG(rd));
2392 break;
2393 case 0x43: /* faddq */
2394 CHECK_FPU_FEATURE(dc, FLOAT128);
2395 gen_op_load_fpr_QT0(QFPREG(rs1));
2396 gen_op_load_fpr_QT1(QFPREG(rs2));
2397 gen_clear_float_exceptions();
2398 gen_helper_faddq();
2399 gen_helper_check_ieee_exceptions();
2400 gen_op_store_QT0_fpr(QFPREG(rd));
2401 break;
2402 case 0x45: /* fsubs */
2403 gen_clear_float_exceptions();
2404 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2405 gen_helper_check_ieee_exceptions();
2406 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2407 break;
2408 case 0x46: /* fsubd */
2409 gen_op_load_fpr_DT0(DFPREG(rs1));
2410 gen_op_load_fpr_DT1(DFPREG(rs2));
2411 gen_clear_float_exceptions();
2412 gen_helper_fsubd();
2413 gen_helper_check_ieee_exceptions();
2414 gen_op_store_DT0_fpr(DFPREG(rd));
2415 break;
2416 case 0x47: /* fsubq */
2417 CHECK_FPU_FEATURE(dc, FLOAT128);
2418 gen_op_load_fpr_QT0(QFPREG(rs1));
2419 gen_op_load_fpr_QT1(QFPREG(rs2));
2420 gen_clear_float_exceptions();
2421 gen_helper_fsubq();
2422 gen_helper_check_ieee_exceptions();
2423 gen_op_store_QT0_fpr(QFPREG(rd));
2424 break;
2425 case 0x49: /* fmuls */
2426 CHECK_FPU_FEATURE(dc, FMUL);
2427 gen_clear_float_exceptions();
2428 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2429 gen_helper_check_ieee_exceptions();
2430 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2431 break;
2432 case 0x4a: /* fmuld */
2433 CHECK_FPU_FEATURE(dc, FMUL);
2434 gen_op_load_fpr_DT0(DFPREG(rs1));
2435 gen_op_load_fpr_DT1(DFPREG(rs2));
2436 gen_clear_float_exceptions();
2437 gen_helper_fmuld();
2438 gen_helper_check_ieee_exceptions();
2439 gen_op_store_DT0_fpr(DFPREG(rd));
2440 break;
2441 case 0x4b: /* fmulq */
2442 CHECK_FPU_FEATURE(dc, FLOAT128);
2443 CHECK_FPU_FEATURE(dc, FMUL);
2444 gen_op_load_fpr_QT0(QFPREG(rs1));
2445 gen_op_load_fpr_QT1(QFPREG(rs2));
2446 gen_clear_float_exceptions();
2447 gen_helper_fmulq();
2448 gen_helper_check_ieee_exceptions();
2449 gen_op_store_QT0_fpr(QFPREG(rd));
2450 break;
2451 case 0x4d: /* fdivs */
2452 gen_clear_float_exceptions();
2453 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2454 gen_helper_check_ieee_exceptions();
2455 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2456 break;
2457 case 0x4e: /* fdivd */
2458 gen_op_load_fpr_DT0(DFPREG(rs1));
2459 gen_op_load_fpr_DT1(DFPREG(rs2));
2460 gen_clear_float_exceptions();
2461 gen_helper_fdivd();
2462 gen_helper_check_ieee_exceptions();
2463 gen_op_store_DT0_fpr(DFPREG(rd));
2464 break;
2465 case 0x4f: /* fdivq */
2466 CHECK_FPU_FEATURE(dc, FLOAT128);
2467 gen_op_load_fpr_QT0(QFPREG(rs1));
2468 gen_op_load_fpr_QT1(QFPREG(rs2));
2469 gen_clear_float_exceptions();
2470 gen_helper_fdivq();
2471 gen_helper_check_ieee_exceptions();
2472 gen_op_store_QT0_fpr(QFPREG(rd));
2473 break;
2474 case 0x69: /* fsmuld */
2475 CHECK_FPU_FEATURE(dc, FSMULD);
2476 gen_clear_float_exceptions();
2477 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2478 gen_helper_check_ieee_exceptions();
2479 gen_op_store_DT0_fpr(DFPREG(rd));
2480 break;
2481 case 0x6e: /* fdmulq */
2482 CHECK_FPU_FEATURE(dc, FLOAT128);
2483 gen_op_load_fpr_DT0(DFPREG(rs1));
2484 gen_op_load_fpr_DT1(DFPREG(rs2));
2485 gen_clear_float_exceptions();
2486 gen_helper_fdmulq();
2487 gen_helper_check_ieee_exceptions();
2488 gen_op_store_QT0_fpr(QFPREG(rd));
2489 break;
2490 case 0xc4: /* fitos */
2491 gen_clear_float_exceptions();
2492 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2493 gen_helper_check_ieee_exceptions();
2494 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2495 break;
2496 case 0xc6: /* fdtos */
2497 gen_op_load_fpr_DT1(DFPREG(rs2));
2498 gen_clear_float_exceptions();
2499 gen_helper_fdtos(cpu_tmp32);
2500 gen_helper_check_ieee_exceptions();
2501 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2502 break;
2503 case 0xc7: /* fqtos */
2504 CHECK_FPU_FEATURE(dc, FLOAT128);
2505 gen_op_load_fpr_QT1(QFPREG(rs2));
2506 gen_clear_float_exceptions();
2507 gen_helper_fqtos(cpu_tmp32);
2508 gen_helper_check_ieee_exceptions();
2509 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2510 break;
2511 case 0xc8: /* fitod */
2512 gen_helper_fitod(cpu_fpr[rs2]);
2513 gen_op_store_DT0_fpr(DFPREG(rd));
2514 break;
2515 case 0xc9: /* fstod */
2516 gen_helper_fstod(cpu_fpr[rs2]);
2517 gen_op_store_DT0_fpr(DFPREG(rd));
2518 break;
2519 case 0xcb: /* fqtod */
2520 CHECK_FPU_FEATURE(dc, FLOAT128);
2521 gen_op_load_fpr_QT1(QFPREG(rs2));
2522 gen_clear_float_exceptions();
2523 gen_helper_fqtod();
2524 gen_helper_check_ieee_exceptions();
2525 gen_op_store_DT0_fpr(DFPREG(rd));
2526 break;
2527 case 0xcc: /* fitoq */
2528 CHECK_FPU_FEATURE(dc, FLOAT128);
2529 gen_helper_fitoq(cpu_fpr[rs2]);
2530 gen_op_store_QT0_fpr(QFPREG(rd));
2531 break;
2532 case 0xcd: /* fstoq */
2533 CHECK_FPU_FEATURE(dc, FLOAT128);
2534 gen_helper_fstoq(cpu_fpr[rs2]);
2535 gen_op_store_QT0_fpr(QFPREG(rd));
2536 break;
2537 case 0xce: /* fdtoq */
2538 CHECK_FPU_FEATURE(dc, FLOAT128);
2539 gen_op_load_fpr_DT1(DFPREG(rs2));
2540 gen_helper_fdtoq();
2541 gen_op_store_QT0_fpr(QFPREG(rd));
2542 break;
2543 case 0xd1: /* fstoi */
2544 gen_clear_float_exceptions();
2545 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2546 gen_helper_check_ieee_exceptions();
2547 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2548 break;
2549 case 0xd2: /* fdtoi */
2550 gen_op_load_fpr_DT1(DFPREG(rs2));
2551 gen_clear_float_exceptions();
2552 gen_helper_fdtoi(cpu_tmp32);
2553 gen_helper_check_ieee_exceptions();
2554 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2555 break;
2556 case 0xd3: /* fqtoi */
2557 CHECK_FPU_FEATURE(dc, FLOAT128);
2558 gen_op_load_fpr_QT1(QFPREG(rs2));
2559 gen_clear_float_exceptions();
2560 gen_helper_fqtoi(cpu_tmp32);
2561 gen_helper_check_ieee_exceptions();
2562 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2563 break;
2564 #ifdef TARGET_SPARC64
2565 case 0x2: /* V9 fmovd */
2566 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2567 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2568 cpu_fpr[DFPREG(rs2) + 1]);
2569 break;
2570 case 0x3: /* V9 fmovq */
2571 CHECK_FPU_FEATURE(dc, FLOAT128);
2572 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2573 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2574 cpu_fpr[QFPREG(rs2) + 1]);
2575 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2576 cpu_fpr[QFPREG(rs2) + 2]);
2577 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2578 cpu_fpr[QFPREG(rs2) + 3]);
2579 break;
2580 case 0x6: /* V9 fnegd */
2581 gen_op_load_fpr_DT1(DFPREG(rs2));
2582 gen_helper_fnegd();
2583 gen_op_store_DT0_fpr(DFPREG(rd));
2584 break;
2585 case 0x7: /* V9 fnegq */
2586 CHECK_FPU_FEATURE(dc, FLOAT128);
2587 gen_op_load_fpr_QT1(QFPREG(rs2));
2588 gen_helper_fnegq();
2589 gen_op_store_QT0_fpr(QFPREG(rd));
2590 break;
2591 case 0xa: /* V9 fabsd */
2592 gen_op_load_fpr_DT1(DFPREG(rs2));
2593 gen_helper_fabsd();
2594 gen_op_store_DT0_fpr(DFPREG(rd));
2595 break;
2596 case 0xb: /* V9 fabsq */
2597 CHECK_FPU_FEATURE(dc, FLOAT128);
2598 gen_op_load_fpr_QT1(QFPREG(rs2));
2599 gen_helper_fabsq();
2600 gen_op_store_QT0_fpr(QFPREG(rd));
2601 break;
2602 case 0x81: /* V9 fstox */
2603 gen_clear_float_exceptions();
2604 gen_helper_fstox(cpu_fpr[rs2]);
2605 gen_helper_check_ieee_exceptions();
2606 gen_op_store_DT0_fpr(DFPREG(rd));
2607 break;
2608 case 0x82: /* V9 fdtox */
2609 gen_op_load_fpr_DT1(DFPREG(rs2));
2610 gen_clear_float_exceptions();
2611 gen_helper_fdtox();
2612 gen_helper_check_ieee_exceptions();
2613 gen_op_store_DT0_fpr(DFPREG(rd));
2614 break;
2615 case 0x83: /* V9 fqtox */
2616 CHECK_FPU_FEATURE(dc, FLOAT128);
2617 gen_op_load_fpr_QT1(QFPREG(rs2));
2618 gen_clear_float_exceptions();
2619 gen_helper_fqtox();
2620 gen_helper_check_ieee_exceptions();
2621 gen_op_store_DT0_fpr(DFPREG(rd));
2622 break;
2623 case 0x84: /* V9 fxtos */
2624 gen_op_load_fpr_DT1(DFPREG(rs2));
2625 gen_clear_float_exceptions();
2626 gen_helper_fxtos(cpu_tmp32);
2627 gen_helper_check_ieee_exceptions();
2628 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2629 break;
2630 case 0x88: /* V9 fxtod */
2631 gen_op_load_fpr_DT1(DFPREG(rs2));
2632 gen_clear_float_exceptions();
2633 gen_helper_fxtod();
2634 gen_helper_check_ieee_exceptions();
2635 gen_op_store_DT0_fpr(DFPREG(rd));
2636 break;
2637 case 0x8c: /* V9 fxtoq */
2638 CHECK_FPU_FEATURE(dc, FLOAT128);
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_clear_float_exceptions();
2641 gen_helper_fxtoq();
2642 gen_helper_check_ieee_exceptions();
2643 gen_op_store_QT0_fpr(QFPREG(rd));
2644 break;
2645 #endif
2646 default:
2647 goto illegal_insn;
2649 } else if (xop == 0x35) { /* FPU Operations */
2650 #ifdef TARGET_SPARC64
2651 int cond;
2652 #endif
2653 if (gen_trap_ifnofpu(dc, cpu_cond))
2654 goto jmp_insn;
2655 gen_op_clear_ieee_excp_and_FTT();
2656 rs1 = GET_FIELD(insn, 13, 17);
2657 rs2 = GET_FIELD(insn, 27, 31);
2658 xop = GET_FIELD(insn, 18, 26);
2659 save_state(dc, cpu_cond);
2660 #ifdef TARGET_SPARC64
2661 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2662 int l1;
2664 l1 = gen_new_label();
2665 cond = GET_FIELD_SP(insn, 14, 17);
2666 cpu_src1 = get_src1(insn, cpu_src1);
2667 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2668 0, l1);
2669 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2670 gen_set_label(l1);
2671 break;
2672 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2673 int l1;
2675 l1 = gen_new_label();
2676 cond = GET_FIELD_SP(insn, 14, 17);
2677 cpu_src1 = get_src1(insn, cpu_src1);
2678 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2679 0, l1);
2680 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2681 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2682 gen_set_label(l1);
2683 break;
2684 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2685 int l1;
2687 CHECK_FPU_FEATURE(dc, FLOAT128);
2688 l1 = gen_new_label();
2689 cond = GET_FIELD_SP(insn, 14, 17);
2690 cpu_src1 = get_src1(insn, cpu_src1);
2691 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2692 0, l1);
2693 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2694 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2695 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2696 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2697 gen_set_label(l1);
2698 break;
2700 #endif
2701 switch (xop) {
2702 #ifdef TARGET_SPARC64
2703 #define FMOVSCC(fcc) \
2705 TCGv r_cond; \
2706 int l1; \
2708 l1 = gen_new_label(); \
2709 r_cond = tcg_temp_new(); \
2710 cond = GET_FIELD_SP(insn, 14, 17); \
2711 gen_fcond(r_cond, fcc, cond); \
2712 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2713 0, l1); \
2714 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2715 gen_set_label(l1); \
2716 tcg_temp_free(r_cond); \
2718 #define FMOVDCC(fcc) \
2720 TCGv r_cond; \
2721 int l1; \
2723 l1 = gen_new_label(); \
2724 r_cond = tcg_temp_new(); \
2725 cond = GET_FIELD_SP(insn, 14, 17); \
2726 gen_fcond(r_cond, fcc, cond); \
2727 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2728 0, l1); \
2729 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2730 cpu_fpr[DFPREG(rs2)]); \
2731 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2732 cpu_fpr[DFPREG(rs2) + 1]); \
2733 gen_set_label(l1); \
2734 tcg_temp_free(r_cond); \
2736 #define FMOVQCC(fcc) \
2738 TCGv r_cond; \
2739 int l1; \
2741 l1 = gen_new_label(); \
2742 r_cond = tcg_temp_new(); \
2743 cond = GET_FIELD_SP(insn, 14, 17); \
2744 gen_fcond(r_cond, fcc, cond); \
2745 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2746 0, l1); \
2747 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2748 cpu_fpr[QFPREG(rs2)]); \
2749 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2750 cpu_fpr[QFPREG(rs2) + 1]); \
2751 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2752 cpu_fpr[QFPREG(rs2) + 2]); \
2753 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2754 cpu_fpr[QFPREG(rs2) + 3]); \
2755 gen_set_label(l1); \
2756 tcg_temp_free(r_cond); \
2758 case 0x001: /* V9 fmovscc %fcc0 */
2759 FMOVSCC(0);
2760 break;
2761 case 0x002: /* V9 fmovdcc %fcc0 */
2762 FMOVDCC(0);
2763 break;
2764 case 0x003: /* V9 fmovqcc %fcc0 */
2765 CHECK_FPU_FEATURE(dc, FLOAT128);
2766 FMOVQCC(0);
2767 break;
2768 case 0x041: /* V9 fmovscc %fcc1 */
2769 FMOVSCC(1);
2770 break;
2771 case 0x042: /* V9 fmovdcc %fcc1 */
2772 FMOVDCC(1);
2773 break;
2774 case 0x043: /* V9 fmovqcc %fcc1 */
2775 CHECK_FPU_FEATURE(dc, FLOAT128);
2776 FMOVQCC(1);
2777 break;
2778 case 0x081: /* V9 fmovscc %fcc2 */
2779 FMOVSCC(2);
2780 break;
2781 case 0x082: /* V9 fmovdcc %fcc2 */
2782 FMOVDCC(2);
2783 break;
2784 case 0x083: /* V9 fmovqcc %fcc2 */
2785 CHECK_FPU_FEATURE(dc, FLOAT128);
2786 FMOVQCC(2);
2787 break;
2788 case 0x0c1: /* V9 fmovscc %fcc3 */
2789 FMOVSCC(3);
2790 break;
2791 case 0x0c2: /* V9 fmovdcc %fcc3 */
2792 FMOVDCC(3);
2793 break;
2794 case 0x0c3: /* V9 fmovqcc %fcc3 */
2795 CHECK_FPU_FEATURE(dc, FLOAT128);
2796 FMOVQCC(3);
2797 break;
2798 #undef FMOVSCC
2799 #undef FMOVDCC
2800 #undef FMOVQCC
2801 #define FMOVSCC(icc) \
2803 TCGv r_cond; \
2804 int l1; \
2806 l1 = gen_new_label(); \
2807 r_cond = tcg_temp_new(); \
2808 cond = GET_FIELD_SP(insn, 14, 17); \
2809 gen_cond(r_cond, icc, cond, dc); \
2810 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2811 0, l1); \
2812 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2813 gen_set_label(l1); \
2814 tcg_temp_free(r_cond); \
2816 #define FMOVDCC(icc) \
2818 TCGv r_cond; \
2819 int l1; \
2821 l1 = gen_new_label(); \
2822 r_cond = tcg_temp_new(); \
2823 cond = GET_FIELD_SP(insn, 14, 17); \
2824 gen_cond(r_cond, icc, cond, dc); \
2825 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2826 0, l1); \
2827 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2828 cpu_fpr[DFPREG(rs2)]); \
2829 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2830 cpu_fpr[DFPREG(rs2) + 1]); \
2831 gen_set_label(l1); \
2832 tcg_temp_free(r_cond); \
2834 #define FMOVQCC(icc) \
2836 TCGv r_cond; \
2837 int l1; \
2839 l1 = gen_new_label(); \
2840 r_cond = tcg_temp_new(); \
2841 cond = GET_FIELD_SP(insn, 14, 17); \
2842 gen_cond(r_cond, icc, cond, dc); \
2843 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2844 0, l1); \
2845 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2846 cpu_fpr[QFPREG(rs2)]); \
2847 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2848 cpu_fpr[QFPREG(rs2) + 1]); \
2849 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2850 cpu_fpr[QFPREG(rs2) + 2]); \
2851 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2852 cpu_fpr[QFPREG(rs2) + 3]); \
2853 gen_set_label(l1); \
2854 tcg_temp_free(r_cond); \
2857 case 0x101: /* V9 fmovscc %icc */
2858 FMOVSCC(0);
2859 break;
2860 case 0x102: /* V9 fmovdcc %icc */
2861 FMOVDCC(0);
2862 case 0x103: /* V9 fmovqcc %icc */
2863 CHECK_FPU_FEATURE(dc, FLOAT128);
2864 FMOVQCC(0);
2865 break;
2866 case 0x181: /* V9 fmovscc %xcc */
2867 FMOVSCC(1);
2868 break;
2869 case 0x182: /* V9 fmovdcc %xcc */
2870 FMOVDCC(1);
2871 break;
2872 case 0x183: /* V9 fmovqcc %xcc */
2873 CHECK_FPU_FEATURE(dc, FLOAT128);
2874 FMOVQCC(1);
2875 break;
2876 #undef FMOVSCC
2877 #undef FMOVDCC
2878 #undef FMOVQCC
2879 #endif
2880 case 0x51: /* fcmps, V9 %fcc */
2881 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2882 break;
2883 case 0x52: /* fcmpd, V9 %fcc */
2884 gen_op_load_fpr_DT0(DFPREG(rs1));
2885 gen_op_load_fpr_DT1(DFPREG(rs2));
2886 gen_op_fcmpd(rd & 3);
2887 break;
2888 case 0x53: /* fcmpq, V9 %fcc */
2889 CHECK_FPU_FEATURE(dc, FLOAT128);
2890 gen_op_load_fpr_QT0(QFPREG(rs1));
2891 gen_op_load_fpr_QT1(QFPREG(rs2));
2892 gen_op_fcmpq(rd & 3);
2893 break;
2894 case 0x55: /* fcmpes, V9 %fcc */
2895 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2896 break;
2897 case 0x56: /* fcmped, V9 %fcc */
2898 gen_op_load_fpr_DT0(DFPREG(rs1));
2899 gen_op_load_fpr_DT1(DFPREG(rs2));
2900 gen_op_fcmped(rd & 3);
2901 break;
2902 case 0x57: /* fcmpeq, V9 %fcc */
2903 CHECK_FPU_FEATURE(dc, FLOAT128);
2904 gen_op_load_fpr_QT0(QFPREG(rs1));
2905 gen_op_load_fpr_QT1(QFPREG(rs2));
2906 gen_op_fcmpeq(rd & 3);
2907 break;
2908 default:
2909 goto illegal_insn;
2911 } else if (xop == 0x2) {
2912 // clr/mov shortcut
2914 rs1 = GET_FIELD(insn, 13, 17);
2915 if (rs1 == 0) {
2916 // or %g0, x, y -> mov T0, x; mov y, T0
2917 if (IS_IMM) { /* immediate */
2918 TCGv r_const;
2920 simm = GET_FIELDs(insn, 19, 31);
2921 r_const = tcg_const_tl(simm);
2922 gen_movl_TN_reg(rd, r_const);
2923 tcg_temp_free(r_const);
2924 } else { /* register */
2925 rs2 = GET_FIELD(insn, 27, 31);
2926 gen_movl_reg_TN(rs2, cpu_dst);
2927 gen_movl_TN_reg(rd, cpu_dst);
2929 } else {
2930 cpu_src1 = get_src1(insn, cpu_src1);
2931 if (IS_IMM) { /* immediate */
2932 simm = GET_FIELDs(insn, 19, 31);
2933 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2934 gen_movl_TN_reg(rd, cpu_dst);
2935 } else { /* register */
2936 // or x, %g0, y -> mov T1, x; mov y, T1
2937 rs2 = GET_FIELD(insn, 27, 31);
2938 if (rs2 != 0) {
2939 gen_movl_reg_TN(rs2, cpu_src2);
2940 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2941 gen_movl_TN_reg(rd, cpu_dst);
2942 } else
2943 gen_movl_TN_reg(rd, cpu_src1);
2946 #ifdef TARGET_SPARC64
2947 } else if (xop == 0x25) { /* sll, V9 sllx */
2948 cpu_src1 = get_src1(insn, cpu_src1);
2949 if (IS_IMM) { /* immediate */
2950 simm = GET_FIELDs(insn, 20, 31);
2951 if (insn & (1 << 12)) {
2952 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2953 } else {
2954 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2956 } else { /* register */
2957 rs2 = GET_FIELD(insn, 27, 31);
2958 gen_movl_reg_TN(rs2, cpu_src2);
2959 if (insn & (1 << 12)) {
2960 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2961 } else {
2962 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2964 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2966 gen_movl_TN_reg(rd, cpu_dst);
2967 } else if (xop == 0x26) { /* srl, V9 srlx */
2968 cpu_src1 = get_src1(insn, cpu_src1);
2969 if (IS_IMM) { /* immediate */
2970 simm = GET_FIELDs(insn, 20, 31);
2971 if (insn & (1 << 12)) {
2972 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2973 } else {
2974 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2975 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2977 } else { /* register */
2978 rs2 = GET_FIELD(insn, 27, 31);
2979 gen_movl_reg_TN(rs2, cpu_src2);
2980 if (insn & (1 << 12)) {
2981 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2982 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2983 } else {
2984 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2985 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2986 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2989 gen_movl_TN_reg(rd, cpu_dst);
2990 } else if (xop == 0x27) { /* sra, V9 srax */
2991 cpu_src1 = get_src1(insn, cpu_src1);
2992 if (IS_IMM) { /* immediate */
2993 simm = GET_FIELDs(insn, 20, 31);
2994 if (insn & (1 << 12)) {
2995 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2996 } else {
2997 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2998 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2999 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3001 } else { /* register */
3002 rs2 = GET_FIELD(insn, 27, 31);
3003 gen_movl_reg_TN(rs2, cpu_src2);
3004 if (insn & (1 << 12)) {
3005 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3006 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3007 } else {
3008 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3009 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3010 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3011 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3014 gen_movl_TN_reg(rd, cpu_dst);
3015 #endif
3016 } else if (xop < 0x36) {
3017 if (xop < 0x20) {
3018 cpu_src1 = get_src1(insn, cpu_src1);
3019 cpu_src2 = get_src2(insn, cpu_src2);
3020 switch (xop & ~0x10) {
3021 case 0x0: /* add */
3022 if (IS_IMM) {
3023 simm = GET_FIELDs(insn, 19, 31);
3024 if (xop & 0x10) {
3025 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3026 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3027 dc->cc_op = CC_OP_ADD;
3028 } else {
3029 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3031 } else {
3032 if (xop & 0x10) {
3033 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3034 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3035 dc->cc_op = CC_OP_ADD;
3036 } else {
3037 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3040 break;
3041 case 0x1: /* and */
3042 if (IS_IMM) {
3043 simm = GET_FIELDs(insn, 19, 31);
3044 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3045 } else {
3046 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3048 if (xop & 0x10) {
3049 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3050 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3051 dc->cc_op = CC_OP_LOGIC;
3053 break;
3054 case 0x2: /* or */
3055 if (IS_IMM) {
3056 simm = GET_FIELDs(insn, 19, 31);
3057 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3058 } else {
3059 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3061 if (xop & 0x10) {
3062 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3063 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3064 dc->cc_op = CC_OP_LOGIC;
3066 break;
3067 case 0x3: /* xor */
3068 if (IS_IMM) {
3069 simm = GET_FIELDs(insn, 19, 31);
3070 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3071 } else {
3072 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3074 if (xop & 0x10) {
3075 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3076 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3077 dc->cc_op = CC_OP_LOGIC;
3079 break;
3080 case 0x4: /* sub */
3081 if (IS_IMM) {
3082 simm = GET_FIELDs(insn, 19, 31);
3083 if (xop & 0x10) {
3084 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3085 } else {
3086 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3088 } else {
3089 if (xop & 0x10) {
3090 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3092 dc->cc_op = CC_OP_SUB;
3093 } else {
3094 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3097 break;
3098 case 0x5: /* andn */
3099 if (IS_IMM) {
3100 simm = GET_FIELDs(insn, 19, 31);
3101 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3102 } else {
3103 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3105 if (xop & 0x10) {
3106 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3107 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3108 dc->cc_op = CC_OP_LOGIC;
3110 break;
3111 case 0x6: /* orn */
3112 if (IS_IMM) {
3113 simm = GET_FIELDs(insn, 19, 31);
3114 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3115 } else {
3116 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3118 if (xop & 0x10) {
3119 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3120 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3121 dc->cc_op = CC_OP_LOGIC;
3123 break;
3124 case 0x7: /* xorn */
3125 if (IS_IMM) {
3126 simm = GET_FIELDs(insn, 19, 31);
3127 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3128 } else {
3129 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3130 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3132 if (xop & 0x10) {
3133 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3134 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3135 dc->cc_op = CC_OP_LOGIC;
3137 break;
3138 case 0x8: /* addx, V9 addc */
3139 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3140 (xop & 0x10));
3141 break;
3142 #ifdef TARGET_SPARC64
3143 case 0x9: /* V9 mulx */
3144 if (IS_IMM) {
3145 simm = GET_FIELDs(insn, 19, 31);
3146 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3147 } else {
3148 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3150 break;
3151 #endif
3152 case 0xa: /* umul */
3153 CHECK_IU_FEATURE(dc, MUL);
3154 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3155 if (xop & 0x10) {
3156 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3157 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3158 dc->cc_op = CC_OP_LOGIC;
3160 break;
3161 case 0xb: /* smul */
3162 CHECK_IU_FEATURE(dc, MUL);
3163 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3164 if (xop & 0x10) {
3165 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3166 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3167 dc->cc_op = CC_OP_LOGIC;
3169 break;
3170 case 0xc: /* subx, V9 subc */
3171 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3172 (xop & 0x10));
3173 break;
3174 #ifdef TARGET_SPARC64
3175 case 0xd: /* V9 udivx */
3176 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3177 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3178 gen_trap_ifdivzero_tl(cpu_cc_src2);
3179 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3180 break;
3181 #endif
3182 case 0xe: /* udiv */
3183 CHECK_IU_FEATURE(dc, DIV);
3184 if (xop & 0x10) {
3185 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3186 dc->cc_op = CC_OP_DIV;
3187 } else {
3188 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3190 break;
3191 case 0xf: /* sdiv */
3192 CHECK_IU_FEATURE(dc, DIV);
3193 if (xop & 0x10) {
3194 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3195 dc->cc_op = CC_OP_DIV;
3196 } else {
3197 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3199 break;
3200 default:
3201 goto illegal_insn;
3203 gen_movl_TN_reg(rd, cpu_dst);
3204 } else {
3205 cpu_src1 = get_src1(insn, cpu_src1);
3206 cpu_src2 = get_src2(insn, cpu_src2);
3207 switch (xop) {
3208 case 0x20: /* taddcc */
3209 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3210 gen_movl_TN_reg(rd, cpu_dst);
3211 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3212 dc->cc_op = CC_OP_TADD;
3213 break;
3214 case 0x21: /* tsubcc */
3215 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3216 gen_movl_TN_reg(rd, cpu_dst);
3217 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3218 dc->cc_op = CC_OP_TSUB;
3219 break;
3220 case 0x22: /* taddcctv */
3221 save_state(dc, cpu_cond);
3222 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3223 gen_movl_TN_reg(rd, cpu_dst);
3224 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3225 dc->cc_op = CC_OP_TADDTV;
3226 break;
3227 case 0x23: /* tsubcctv */
3228 save_state(dc, cpu_cond);
3229 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3230 gen_movl_TN_reg(rd, cpu_dst);
3231 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3232 dc->cc_op = CC_OP_TSUBTV;
3233 break;
3234 case 0x24: /* mulscc */
3235 gen_helper_compute_psr();
3236 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3237 gen_movl_TN_reg(rd, cpu_dst);
3238 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3239 dc->cc_op = CC_OP_ADD;
3240 break;
3241 #ifndef TARGET_SPARC64
3242 case 0x25: /* sll */
3243 if (IS_IMM) { /* immediate */
3244 simm = GET_FIELDs(insn, 20, 31);
3245 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3246 } else { /* register */
3247 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3248 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250 gen_movl_TN_reg(rd, cpu_dst);
3251 break;
3252 case 0x26: /* srl */
3253 if (IS_IMM) { /* immediate */
3254 simm = GET_FIELDs(insn, 20, 31);
3255 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3256 } else { /* register */
3257 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3258 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3260 gen_movl_TN_reg(rd, cpu_dst);
3261 break;
3262 case 0x27: /* sra */
3263 if (IS_IMM) { /* immediate */
3264 simm = GET_FIELDs(insn, 20, 31);
3265 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3266 } else { /* register */
3267 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3268 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3270 gen_movl_TN_reg(rd, cpu_dst);
3271 break;
3272 #endif
3273 case 0x30:
3275 switch(rd) {
3276 case 0: /* wry */
3277 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3278 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3279 break;
3280 #ifndef TARGET_SPARC64
3281 case 0x01 ... 0x0f: /* undefined in the
3282 SPARCv8 manual, nop
3283 on the microSPARC
3284 II */
3285 case 0x10 ... 0x1f: /* implementation-dependent
3286 in the SPARCv8
3287 manual, nop on the
3288 microSPARC II */
3289 break;
3290 #else
3291 case 0x2: /* V9 wrccr */
3292 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3293 gen_helper_wrccr(cpu_dst);
3294 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3295 dc->cc_op = CC_OP_FLAGS;
3296 break;
3297 case 0x3: /* V9 wrasi */
3298 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3299 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3300 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3301 break;
3302 case 0x6: /* V9 wrfprs */
3303 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3304 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3305 save_state(dc, cpu_cond);
3306 gen_op_next_insn();
3307 tcg_gen_exit_tb(0);
3308 dc->is_br = 1;
3309 break;
3310 case 0xf: /* V9 sir, nop if user */
3311 #if !defined(CONFIG_USER_ONLY)
3312 if (supervisor(dc)) {
3313 ; // XXX
3315 #endif
3316 break;
3317 case 0x13: /* Graphics Status */
3318 if (gen_trap_ifnofpu(dc, cpu_cond))
3319 goto jmp_insn;
3320 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3321 break;
3322 case 0x14: /* Softint set */
3323 if (!supervisor(dc))
3324 goto illegal_insn;
3325 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3326 gen_helper_set_softint(cpu_tmp64);
3327 break;
3328 case 0x15: /* Softint clear */
3329 if (!supervisor(dc))
3330 goto illegal_insn;
3331 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3332 gen_helper_clear_softint(cpu_tmp64);
3333 break;
3334 case 0x16: /* Softint write */
3335 if (!supervisor(dc))
3336 goto illegal_insn;
3337 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3338 gen_helper_write_softint(cpu_tmp64);
3339 break;
3340 case 0x17: /* Tick compare */
3341 #if !defined(CONFIG_USER_ONLY)
3342 if (!supervisor(dc))
3343 goto illegal_insn;
3344 #endif
3346 TCGv_ptr r_tickptr;
3348 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3349 cpu_src2);
3350 r_tickptr = tcg_temp_new_ptr();
3351 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3352 offsetof(CPUState, tick));
3353 gen_helper_tick_set_limit(r_tickptr,
3354 cpu_tick_cmpr);
3355 tcg_temp_free_ptr(r_tickptr);
3357 break;
3358 case 0x18: /* System tick */
3359 #if !defined(CONFIG_USER_ONLY)
3360 if (!supervisor(dc))
3361 goto illegal_insn;
3362 #endif
3364 TCGv_ptr r_tickptr;
3366 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3367 cpu_src2);
3368 r_tickptr = tcg_temp_new_ptr();
3369 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3370 offsetof(CPUState, stick));
3371 gen_helper_tick_set_count(r_tickptr,
3372 cpu_dst);
3373 tcg_temp_free_ptr(r_tickptr);
3375 break;
3376 case 0x19: /* System tick compare */
3377 #if !defined(CONFIG_USER_ONLY)
3378 if (!supervisor(dc))
3379 goto illegal_insn;
3380 #endif
3382 TCGv_ptr r_tickptr;
3384 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3385 cpu_src2);
3386 r_tickptr = tcg_temp_new_ptr();
3387 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3388 offsetof(CPUState, stick));
3389 gen_helper_tick_set_limit(r_tickptr,
3390 cpu_stick_cmpr);
3391 tcg_temp_free_ptr(r_tickptr);
3393 break;
3395 case 0x10: /* Performance Control */
3396 case 0x11: /* Performance Instrumentation
3397 Counter */
3398 case 0x12: /* Dispatch Control */
3399 #endif
3400 default:
3401 goto illegal_insn;
3404 break;
3405 #if !defined(CONFIG_USER_ONLY)
3406 case 0x31: /* wrpsr, V9 saved, restored */
3408 if (!supervisor(dc))
3409 goto priv_insn;
3410 #ifdef TARGET_SPARC64
3411 switch (rd) {
3412 case 0:
3413 gen_helper_saved();
3414 break;
3415 case 1:
3416 gen_helper_restored();
3417 break;
3418 case 2: /* UA2005 allclean */
3419 case 3: /* UA2005 otherw */
3420 case 4: /* UA2005 normalw */
3421 case 5: /* UA2005 invalw */
3422 // XXX
3423 default:
3424 goto illegal_insn;
3426 #else
3427 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3428 gen_helper_wrpsr(cpu_dst);
3429 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3430 dc->cc_op = CC_OP_FLAGS;
3431 save_state(dc, cpu_cond);
3432 gen_op_next_insn();
3433 tcg_gen_exit_tb(0);
3434 dc->is_br = 1;
3435 #endif
3437 break;
3438 case 0x32: /* wrwim, V9 wrpr */
3440 if (!supervisor(dc))
3441 goto priv_insn;
3442 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3443 #ifdef TARGET_SPARC64
3444 switch (rd) {
3445 case 0: // tpc
3447 TCGv_ptr r_tsptr;
3449 r_tsptr = tcg_temp_new_ptr();
3450 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3451 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3452 offsetof(trap_state, tpc));
3453 tcg_temp_free_ptr(r_tsptr);
3455 break;
3456 case 1: // tnpc
3458 TCGv_ptr r_tsptr;
3460 r_tsptr = tcg_temp_new_ptr();
3461 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3462 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3463 offsetof(trap_state, tnpc));
3464 tcg_temp_free_ptr(r_tsptr);
3466 break;
3467 case 2: // tstate
3469 TCGv_ptr r_tsptr;
3471 r_tsptr = tcg_temp_new_ptr();
3472 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3473 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3474 offsetof(trap_state,
3475 tstate));
3476 tcg_temp_free_ptr(r_tsptr);
3478 break;
3479 case 3: // tt
3481 TCGv_ptr r_tsptr;
3483 r_tsptr = tcg_temp_new_ptr();
3484 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3485 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3486 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3487 offsetof(trap_state, tt));
3488 tcg_temp_free_ptr(r_tsptr);
3490 break;
3491 case 4: // tick
3493 TCGv_ptr r_tickptr;
3495 r_tickptr = tcg_temp_new_ptr();
3496 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3497 offsetof(CPUState, tick));
3498 gen_helper_tick_set_count(r_tickptr,
3499 cpu_tmp0);
3500 tcg_temp_free_ptr(r_tickptr);
3502 break;
3503 case 5: // tba
3504 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3505 break;
3506 case 6: // pstate
3508 TCGv r_tmp = tcg_temp_local_new();
3510 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3511 save_state(dc, cpu_cond);
3512 gen_helper_wrpstate(r_tmp);
3513 tcg_temp_free(r_tmp);
3514 dc->npc = DYNAMIC_PC;
3516 break;
3517 case 7: // tl
3519 TCGv r_tmp = tcg_temp_local_new();
3521 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3522 save_state(dc, cpu_cond);
3523 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3524 tcg_temp_free(r_tmp);
3525 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3526 offsetof(CPUSPARCState, tl));
3527 dc->npc = DYNAMIC_PC;
3529 break;
3530 case 8: // pil
3531 gen_helper_wrpil(cpu_tmp0);
3532 break;
3533 case 9: // cwp
3534 gen_helper_wrcwp(cpu_tmp0);
3535 break;
3536 case 10: // cansave
3537 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3538 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3539 offsetof(CPUSPARCState,
3540 cansave));
3541 break;
3542 case 11: // canrestore
3543 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3544 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3545 offsetof(CPUSPARCState,
3546 canrestore));
3547 break;
3548 case 12: // cleanwin
3549 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3550 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3551 offsetof(CPUSPARCState,
3552 cleanwin));
3553 break;
3554 case 13: // otherwin
3555 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3556 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3557 offsetof(CPUSPARCState,
3558 otherwin));
3559 break;
3560 case 14: // wstate
3561 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3562 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3563 offsetof(CPUSPARCState,
3564 wstate));
3565 break;
3566 case 16: // UA2005 gl
3567 CHECK_IU_FEATURE(dc, GL);
3568 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3569 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3570 offsetof(CPUSPARCState, gl));
3571 break;
3572 case 26: // UA2005 strand status
3573 CHECK_IU_FEATURE(dc, HYPV);
3574 if (!hypervisor(dc))
3575 goto priv_insn;
3576 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3577 break;
3578 default:
3579 goto illegal_insn;
3581 #else
3582 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3583 if (dc->def->nwindows != 32)
3584 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3585 (1 << dc->def->nwindows) - 1);
3586 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3587 #endif
3589 break;
3590 case 0x33: /* wrtbr, UA2005 wrhpr */
3592 #ifndef TARGET_SPARC64
3593 if (!supervisor(dc))
3594 goto priv_insn;
3595 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3596 #else
3597 CHECK_IU_FEATURE(dc, HYPV);
3598 if (!hypervisor(dc))
3599 goto priv_insn;
3600 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3601 switch (rd) {
3602 case 0: // hpstate
3603 // XXX gen_op_wrhpstate();
3604 save_state(dc, cpu_cond);
3605 gen_op_next_insn();
3606 tcg_gen_exit_tb(0);
3607 dc->is_br = 1;
3608 break;
3609 case 1: // htstate
3610 // XXX gen_op_wrhtstate();
3611 break;
3612 case 3: // hintp
3613 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3614 break;
3615 case 5: // htba
3616 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3617 break;
3618 case 31: // hstick_cmpr
3620 TCGv_ptr r_tickptr;
3622 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3623 r_tickptr = tcg_temp_new_ptr();
3624 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3625 offsetof(CPUState, hstick));
3626 gen_helper_tick_set_limit(r_tickptr,
3627 cpu_hstick_cmpr);
3628 tcg_temp_free_ptr(r_tickptr);
3630 break;
3631 case 6: // hver readonly
3632 default:
3633 goto illegal_insn;
3635 #endif
3637 break;
3638 #endif
3639 #ifdef TARGET_SPARC64
3640 case 0x2c: /* V9 movcc */
3642 int cc = GET_FIELD_SP(insn, 11, 12);
3643 int cond = GET_FIELD_SP(insn, 14, 17);
3644 TCGv r_cond;
3645 int l1;
3647 r_cond = tcg_temp_new();
3648 if (insn & (1 << 18)) {
3649 if (cc == 0)
3650 gen_cond(r_cond, 0, cond, dc);
3651 else if (cc == 2)
3652 gen_cond(r_cond, 1, cond, dc);
3653 else
3654 goto illegal_insn;
3655 } else {
3656 gen_fcond(r_cond, cc, cond);
3659 l1 = gen_new_label();
3661 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3662 if (IS_IMM) { /* immediate */
3663 TCGv r_const;
3665 simm = GET_FIELD_SPs(insn, 0, 10);
3666 r_const = tcg_const_tl(simm);
3667 gen_movl_TN_reg(rd, r_const);
3668 tcg_temp_free(r_const);
3669 } else {
3670 rs2 = GET_FIELD_SP(insn, 0, 4);
3671 gen_movl_reg_TN(rs2, cpu_tmp0);
3672 gen_movl_TN_reg(rd, cpu_tmp0);
3674 gen_set_label(l1);
3675 tcg_temp_free(r_cond);
3676 break;
3678 case 0x2d: /* V9 sdivx */
3679 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3680 gen_movl_TN_reg(rd, cpu_dst);
3681 break;
3682 case 0x2e: /* V9 popc */
3684 cpu_src2 = get_src2(insn, cpu_src2);
3685 gen_helper_popc(cpu_dst, cpu_src2);
3686 gen_movl_TN_reg(rd, cpu_dst);
3688 case 0x2f: /* V9 movr */
3690 int cond = GET_FIELD_SP(insn, 10, 12);
3691 int l1;
3693 cpu_src1 = get_src1(insn, cpu_src1);
3695 l1 = gen_new_label();
3697 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3698 cpu_src1, 0, l1);
3699 if (IS_IMM) { /* immediate */
3700 TCGv r_const;
3702 simm = GET_FIELD_SPs(insn, 0, 9);
3703 r_const = tcg_const_tl(simm);
3704 gen_movl_TN_reg(rd, r_const);
3705 tcg_temp_free(r_const);
3706 } else {
3707 rs2 = GET_FIELD_SP(insn, 0, 4);
3708 gen_movl_reg_TN(rs2, cpu_tmp0);
3709 gen_movl_TN_reg(rd, cpu_tmp0);
3711 gen_set_label(l1);
3712 break;
3714 #endif
3715 default:
3716 goto illegal_insn;
3719 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3720 #ifdef TARGET_SPARC64
3721 int opf = GET_FIELD_SP(insn, 5, 13);
3722 rs1 = GET_FIELD(insn, 13, 17);
3723 rs2 = GET_FIELD(insn, 27, 31);
3724 if (gen_trap_ifnofpu(dc, cpu_cond))
3725 goto jmp_insn;
3727 switch (opf) {
3728 case 0x000: /* VIS I edge8cc */
3729 case 0x001: /* VIS II edge8n */
3730 case 0x002: /* VIS I edge8lcc */
3731 case 0x003: /* VIS II edge8ln */
3732 case 0x004: /* VIS I edge16cc */
3733 case 0x005: /* VIS II edge16n */
3734 case 0x006: /* VIS I edge16lcc */
3735 case 0x007: /* VIS II edge16ln */
3736 case 0x008: /* VIS I edge32cc */
3737 case 0x009: /* VIS II edge32n */
3738 case 0x00a: /* VIS I edge32lcc */
3739 case 0x00b: /* VIS II edge32ln */
3740 // XXX
3741 goto illegal_insn;
3742 case 0x010: /* VIS I array8 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 cpu_src1 = get_src1(insn, cpu_src1);
3745 gen_movl_reg_TN(rs2, cpu_src2);
3746 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3747 gen_movl_TN_reg(rd, cpu_dst);
3748 break;
3749 case 0x012: /* VIS I array16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 cpu_src1 = get_src1(insn, cpu_src1);
3752 gen_movl_reg_TN(rs2, cpu_src2);
3753 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3754 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3755 gen_movl_TN_reg(rd, cpu_dst);
3756 break;
3757 case 0x014: /* VIS I array32 */
3758 CHECK_FPU_FEATURE(dc, VIS1);
3759 cpu_src1 = get_src1(insn, cpu_src1);
3760 gen_movl_reg_TN(rs2, cpu_src2);
3761 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3762 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3763 gen_movl_TN_reg(rd, cpu_dst);
3764 break;
3765 case 0x018: /* VIS I alignaddr */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 cpu_src1 = get_src1(insn, cpu_src1);
3768 gen_movl_reg_TN(rs2, cpu_src2);
3769 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3770 gen_movl_TN_reg(rd, cpu_dst);
3771 break;
3772 case 0x019: /* VIS II bmask */
3773 case 0x01a: /* VIS I alignaddrl */
3774 // XXX
3775 goto illegal_insn;
3776 case 0x020: /* VIS I fcmple16 */
3777 CHECK_FPU_FEATURE(dc, VIS1);
3778 gen_op_load_fpr_DT0(DFPREG(rs1));
3779 gen_op_load_fpr_DT1(DFPREG(rs2));
3780 gen_helper_fcmple16();
3781 gen_op_store_DT0_fpr(DFPREG(rd));
3782 break;
3783 case 0x022: /* VIS I fcmpne16 */
3784 CHECK_FPU_FEATURE(dc, VIS1);
3785 gen_op_load_fpr_DT0(DFPREG(rs1));
3786 gen_op_load_fpr_DT1(DFPREG(rs2));
3787 gen_helper_fcmpne16();
3788 gen_op_store_DT0_fpr(DFPREG(rd));
3789 break;
3790 case 0x024: /* VIS I fcmple32 */
3791 CHECK_FPU_FEATURE(dc, VIS1);
3792 gen_op_load_fpr_DT0(DFPREG(rs1));
3793 gen_op_load_fpr_DT1(DFPREG(rs2));
3794 gen_helper_fcmple32();
3795 gen_op_store_DT0_fpr(DFPREG(rd));
3796 break;
3797 case 0x026: /* VIS I fcmpne32 */
3798 CHECK_FPU_FEATURE(dc, VIS1);
3799 gen_op_load_fpr_DT0(DFPREG(rs1));
3800 gen_op_load_fpr_DT1(DFPREG(rs2));
3801 gen_helper_fcmpne32();
3802 gen_op_store_DT0_fpr(DFPREG(rd));
3803 break;
3804 case 0x028: /* VIS I fcmpgt16 */
3805 CHECK_FPU_FEATURE(dc, VIS1);
3806 gen_op_load_fpr_DT0(DFPREG(rs1));
3807 gen_op_load_fpr_DT1(DFPREG(rs2));
3808 gen_helper_fcmpgt16();
3809 gen_op_store_DT0_fpr(DFPREG(rd));
3810 break;
3811 case 0x02a: /* VIS I fcmpeq16 */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 gen_op_load_fpr_DT0(DFPREG(rs1));
3814 gen_op_load_fpr_DT1(DFPREG(rs2));
3815 gen_helper_fcmpeq16();
3816 gen_op_store_DT0_fpr(DFPREG(rd));
3817 break;
3818 case 0x02c: /* VIS I fcmpgt32 */
3819 CHECK_FPU_FEATURE(dc, VIS1);
3820 gen_op_load_fpr_DT0(DFPREG(rs1));
3821 gen_op_load_fpr_DT1(DFPREG(rs2));
3822 gen_helper_fcmpgt32();
3823 gen_op_store_DT0_fpr(DFPREG(rd));
3824 break;
3825 case 0x02e: /* VIS I fcmpeq32 */
3826 CHECK_FPU_FEATURE(dc, VIS1);
3827 gen_op_load_fpr_DT0(DFPREG(rs1));
3828 gen_op_load_fpr_DT1(DFPREG(rs2));
3829 gen_helper_fcmpeq32();
3830 gen_op_store_DT0_fpr(DFPREG(rd));
3831 break;
3832 case 0x031: /* VIS I fmul8x16 */
3833 CHECK_FPU_FEATURE(dc, VIS1);
3834 gen_op_load_fpr_DT0(DFPREG(rs1));
3835 gen_op_load_fpr_DT1(DFPREG(rs2));
3836 gen_helper_fmul8x16();
3837 gen_op_store_DT0_fpr(DFPREG(rd));
3838 break;
3839 case 0x033: /* VIS I fmul8x16au */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 gen_op_load_fpr_DT0(DFPREG(rs1));
3842 gen_op_load_fpr_DT1(DFPREG(rs2));
3843 gen_helper_fmul8x16au();
3844 gen_op_store_DT0_fpr(DFPREG(rd));
3845 break;
3846 case 0x035: /* VIS I fmul8x16al */
3847 CHECK_FPU_FEATURE(dc, VIS1);
3848 gen_op_load_fpr_DT0(DFPREG(rs1));
3849 gen_op_load_fpr_DT1(DFPREG(rs2));
3850 gen_helper_fmul8x16al();
3851 gen_op_store_DT0_fpr(DFPREG(rd));
3852 break;
3853 case 0x036: /* VIS I fmul8sux16 */
3854 CHECK_FPU_FEATURE(dc, VIS1);
3855 gen_op_load_fpr_DT0(DFPREG(rs1));
3856 gen_op_load_fpr_DT1(DFPREG(rs2));
3857 gen_helper_fmul8sux16();
3858 gen_op_store_DT0_fpr(DFPREG(rd));
3859 break;
3860 case 0x037: /* VIS I fmul8ulx16 */
3861 CHECK_FPU_FEATURE(dc, VIS1);
3862 gen_op_load_fpr_DT0(DFPREG(rs1));
3863 gen_op_load_fpr_DT1(DFPREG(rs2));
3864 gen_helper_fmul8ulx16();
3865 gen_op_store_DT0_fpr(DFPREG(rd));
3866 break;
3867 case 0x038: /* VIS I fmuld8sux16 */
3868 CHECK_FPU_FEATURE(dc, VIS1);
3869 gen_op_load_fpr_DT0(DFPREG(rs1));
3870 gen_op_load_fpr_DT1(DFPREG(rs2));
3871 gen_helper_fmuld8sux16();
3872 gen_op_store_DT0_fpr(DFPREG(rd));
3873 break;
3874 case 0x039: /* VIS I fmuld8ulx16 */
3875 CHECK_FPU_FEATURE(dc, VIS1);
3876 gen_op_load_fpr_DT0(DFPREG(rs1));
3877 gen_op_load_fpr_DT1(DFPREG(rs2));
3878 gen_helper_fmuld8ulx16();
3879 gen_op_store_DT0_fpr(DFPREG(rd));
3880 break;
3881 case 0x03a: /* VIS I fpack32 */
3882 case 0x03b: /* VIS I fpack16 */
3883 case 0x03d: /* VIS I fpackfix */
3884 case 0x03e: /* VIS I pdist */
3885 // XXX
3886 goto illegal_insn;
3887 case 0x048: /* VIS I faligndata */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 gen_op_load_fpr_DT0(DFPREG(rs1));
3890 gen_op_load_fpr_DT1(DFPREG(rs2));
3891 gen_helper_faligndata();
3892 gen_op_store_DT0_fpr(DFPREG(rd));
3893 break;
3894 case 0x04b: /* VIS I fpmerge */
3895 CHECK_FPU_FEATURE(dc, VIS1);
3896 gen_op_load_fpr_DT0(DFPREG(rs1));
3897 gen_op_load_fpr_DT1(DFPREG(rs2));
3898 gen_helper_fpmerge();
3899 gen_op_store_DT0_fpr(DFPREG(rd));
3900 break;
3901 case 0x04c: /* VIS II bshuffle */
3902 // XXX
3903 goto illegal_insn;
3904 case 0x04d: /* VIS I fexpand */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 gen_op_load_fpr_DT0(DFPREG(rs1));
3907 gen_op_load_fpr_DT1(DFPREG(rs2));
3908 gen_helper_fexpand();
3909 gen_op_store_DT0_fpr(DFPREG(rd));
3910 break;
3911 case 0x050: /* VIS I fpadd16 */
3912 CHECK_FPU_FEATURE(dc, VIS1);
3913 gen_op_load_fpr_DT0(DFPREG(rs1));
3914 gen_op_load_fpr_DT1(DFPREG(rs2));
3915 gen_helper_fpadd16();
3916 gen_op_store_DT0_fpr(DFPREG(rd));
3917 break;
3918 case 0x051: /* VIS I fpadd16s */
3919 CHECK_FPU_FEATURE(dc, VIS1);
3920 gen_helper_fpadd16s(cpu_fpr[rd],
3921 cpu_fpr[rs1], cpu_fpr[rs2]);
3922 break;
3923 case 0x052: /* VIS I fpadd32 */
3924 CHECK_FPU_FEATURE(dc, VIS1);
3925 gen_op_load_fpr_DT0(DFPREG(rs1));
3926 gen_op_load_fpr_DT1(DFPREG(rs2));
3927 gen_helper_fpadd32();
3928 gen_op_store_DT0_fpr(DFPREG(rd));
3929 break;
3930 case 0x053: /* VIS I fpadd32s */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 gen_helper_fpadd32s(cpu_fpr[rd],
3933 cpu_fpr[rs1], cpu_fpr[rs2]);
3934 break;
3935 case 0x054: /* VIS I fpsub16 */
3936 CHECK_FPU_FEATURE(dc, VIS1);
3937 gen_op_load_fpr_DT0(DFPREG(rs1));
3938 gen_op_load_fpr_DT1(DFPREG(rs2));
3939 gen_helper_fpsub16();
3940 gen_op_store_DT0_fpr(DFPREG(rd));
3941 break;
3942 case 0x055: /* VIS I fpsub16s */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 gen_helper_fpsub16s(cpu_fpr[rd],
3945 cpu_fpr[rs1], cpu_fpr[rs2]);
3946 break;
3947 case 0x056: /* VIS I fpsub32 */
3948 CHECK_FPU_FEATURE(dc, VIS1);
3949 gen_op_load_fpr_DT0(DFPREG(rs1));
3950 gen_op_load_fpr_DT1(DFPREG(rs2));
3951 gen_helper_fpsub32();
3952 gen_op_store_DT0_fpr(DFPREG(rd));
3953 break;
3954 case 0x057: /* VIS I fpsub32s */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 gen_helper_fpsub32s(cpu_fpr[rd],
3957 cpu_fpr[rs1], cpu_fpr[rs2]);
3958 break;
3959 case 0x060: /* VIS I fzero */
3960 CHECK_FPU_FEATURE(dc, VIS1);
3961 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3962 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3963 break;
3964 case 0x061: /* VIS I fzeros */
3965 CHECK_FPU_FEATURE(dc, VIS1);
3966 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3967 break;
3968 case 0x062: /* VIS I fnor */
3969 CHECK_FPU_FEATURE(dc, VIS1);
3970 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3971 cpu_fpr[DFPREG(rs2)]);
3972 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3973 cpu_fpr[DFPREG(rs2) + 1]);
3974 break;
3975 case 0x063: /* VIS I fnors */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3978 break;
3979 case 0x064: /* VIS I fandnot2 */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3982 cpu_fpr[DFPREG(rs2)]);
3983 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3984 cpu_fpr[DFPREG(rs1) + 1],
3985 cpu_fpr[DFPREG(rs2) + 1]);
3986 break;
3987 case 0x065: /* VIS I fandnot2s */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3990 break;
3991 case 0x066: /* VIS I fnot2 */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3994 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3995 cpu_fpr[DFPREG(rs2) + 1]);
3996 break;
3997 case 0x067: /* VIS I fnot2s */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4000 break;
4001 case 0x068: /* VIS I fandnot1 */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4004 cpu_fpr[DFPREG(rs1)]);
4005 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4006 cpu_fpr[DFPREG(rs2) + 1],
4007 cpu_fpr[DFPREG(rs1) + 1]);
4008 break;
4009 case 0x069: /* VIS I fandnot1s */
4010 CHECK_FPU_FEATURE(dc, VIS1);
4011 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4012 break;
4013 case 0x06a: /* VIS I fnot1 */
4014 CHECK_FPU_FEATURE(dc, VIS1);
4015 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4016 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4017 cpu_fpr[DFPREG(rs1) + 1]);
4018 break;
4019 case 0x06b: /* VIS I fnot1s */
4020 CHECK_FPU_FEATURE(dc, VIS1);
4021 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4022 break;
4023 case 0x06c: /* VIS I fxor */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4026 cpu_fpr[DFPREG(rs2)]);
4027 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4028 cpu_fpr[DFPREG(rs1) + 1],
4029 cpu_fpr[DFPREG(rs2) + 1]);
4030 break;
4031 case 0x06d: /* VIS I fxors */
4032 CHECK_FPU_FEATURE(dc, VIS1);
4033 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4034 break;
4035 case 0x06e: /* VIS I fnand */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4038 cpu_fpr[DFPREG(rs2)]);
4039 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4040 cpu_fpr[DFPREG(rs2) + 1]);
4041 break;
4042 case 0x06f: /* VIS I fnands */
4043 CHECK_FPU_FEATURE(dc, VIS1);
4044 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4045 break;
4046 case 0x070: /* VIS I fand */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4049 cpu_fpr[DFPREG(rs2)]);
4050 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4051 cpu_fpr[DFPREG(rs1) + 1],
4052 cpu_fpr[DFPREG(rs2) + 1]);
4053 break;
4054 case 0x071: /* VIS I fands */
4055 CHECK_FPU_FEATURE(dc, VIS1);
4056 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4057 break;
4058 case 0x072: /* VIS I fxnor */
4059 CHECK_FPU_FEATURE(dc, VIS1);
4060 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4061 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4062 cpu_fpr[DFPREG(rs1)]);
4063 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4064 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4065 cpu_fpr[DFPREG(rs1) + 1]);
4066 break;
4067 case 0x073: /* VIS I fxnors */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4070 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4071 break;
4072 case 0x074: /* VIS I fsrc1 */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4075 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4076 cpu_fpr[DFPREG(rs1) + 1]);
4077 break;
4078 case 0x075: /* VIS I fsrc1s */
4079 CHECK_FPU_FEATURE(dc, VIS1);
4080 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4081 break;
4082 case 0x076: /* VIS I fornot2 */
4083 CHECK_FPU_FEATURE(dc, VIS1);
4084 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4085 cpu_fpr[DFPREG(rs2)]);
4086 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4087 cpu_fpr[DFPREG(rs1) + 1],
4088 cpu_fpr[DFPREG(rs2) + 1]);
4089 break;
4090 case 0x077: /* VIS I fornot2s */
4091 CHECK_FPU_FEATURE(dc, VIS1);
4092 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4093 break;
4094 case 0x078: /* VIS I fsrc2 */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 gen_op_load_fpr_DT0(DFPREG(rs2));
4097 gen_op_store_DT0_fpr(DFPREG(rd));
4098 break;
4099 case 0x079: /* VIS I fsrc2s */
4100 CHECK_FPU_FEATURE(dc, VIS1);
4101 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4102 break;
4103 case 0x07a: /* VIS I fornot1 */
4104 CHECK_FPU_FEATURE(dc, VIS1);
4105 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4106 cpu_fpr[DFPREG(rs1)]);
4107 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4108 cpu_fpr[DFPREG(rs2) + 1],
4109 cpu_fpr[DFPREG(rs1) + 1]);
4110 break;
4111 case 0x07b: /* VIS I fornot1s */
4112 CHECK_FPU_FEATURE(dc, VIS1);
4113 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4114 break;
4115 case 0x07c: /* VIS I for */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4118 cpu_fpr[DFPREG(rs2)]);
4119 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4120 cpu_fpr[DFPREG(rs1) + 1],
4121 cpu_fpr[DFPREG(rs2) + 1]);
4122 break;
4123 case 0x07d: /* VIS I fors */
4124 CHECK_FPU_FEATURE(dc, VIS1);
4125 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4126 break;
4127 case 0x07e: /* VIS I fone */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4130 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4131 break;
4132 case 0x07f: /* VIS I fones */
4133 CHECK_FPU_FEATURE(dc, VIS1);
4134 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4135 break;
4136 case 0x080: /* VIS I shutdown */
4137 case 0x081: /* VIS II siam */
4138 // XXX
4139 goto illegal_insn;
4140 default:
4141 goto illegal_insn;
4143 #else
4144 goto ncp_insn;
4145 #endif
4146 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4147 #ifdef TARGET_SPARC64
4148 goto illegal_insn;
4149 #else
4150 goto ncp_insn;
4151 #endif
4152 #ifdef TARGET_SPARC64
4153 } else if (xop == 0x39) { /* V9 return */
4154 TCGv_i32 r_const;
4156 save_state(dc, cpu_cond);
4157 cpu_src1 = get_src1(insn, cpu_src1);
4158 if (IS_IMM) { /* immediate */
4159 simm = GET_FIELDs(insn, 19, 31);
4160 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4161 } else { /* register */
4162 rs2 = GET_FIELD(insn, 27, 31);
4163 if (rs2) {
4164 gen_movl_reg_TN(rs2, cpu_src2);
4165 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4166 } else
4167 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4169 gen_helper_restore();
4170 gen_mov_pc_npc(dc, cpu_cond);
4171 r_const = tcg_const_i32(3);
4172 gen_helper_check_align(cpu_dst, r_const);
4173 tcg_temp_free_i32(r_const);
4174 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4175 dc->npc = DYNAMIC_PC;
4176 goto jmp_insn;
4177 #endif
4178 } else {
4179 cpu_src1 = get_src1(insn, cpu_src1);
4180 if (IS_IMM) { /* immediate */
4181 simm = GET_FIELDs(insn, 19, 31);
4182 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4183 } else { /* register */
4184 rs2 = GET_FIELD(insn, 27, 31);
4185 if (rs2) {
4186 gen_movl_reg_TN(rs2, cpu_src2);
4187 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4188 } else
4189 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4191 switch (xop) {
4192 case 0x38: /* jmpl */
4194 TCGv r_pc;
4195 TCGv_i32 r_const;
4197 r_pc = tcg_const_tl(dc->pc);
4198 gen_movl_TN_reg(rd, r_pc);
4199 tcg_temp_free(r_pc);
4200 gen_mov_pc_npc(dc, cpu_cond);
4201 r_const = tcg_const_i32(3);
4202 gen_helper_check_align(cpu_dst, r_const);
4203 tcg_temp_free_i32(r_const);
4204 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4205 dc->npc = DYNAMIC_PC;
4207 goto jmp_insn;
4208 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4209 case 0x39: /* rett, V9 return */
4211 TCGv_i32 r_const;
4213 if (!supervisor(dc))
4214 goto priv_insn;
4215 gen_mov_pc_npc(dc, cpu_cond);
4216 r_const = tcg_const_i32(3);
4217 gen_helper_check_align(cpu_dst, r_const);
4218 tcg_temp_free_i32(r_const);
4219 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4220 dc->npc = DYNAMIC_PC;
4221 gen_helper_rett();
4223 goto jmp_insn;
4224 #endif
4225 case 0x3b: /* flush */
4226 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4227 goto unimp_flush;
4228 /* nop */
4229 break;
4230 case 0x3c: /* save */
4231 save_state(dc, cpu_cond);
4232 gen_helper_save();
4233 gen_movl_TN_reg(rd, cpu_dst);
4234 break;
4235 case 0x3d: /* restore */
4236 save_state(dc, cpu_cond);
4237 gen_helper_restore();
4238 gen_movl_TN_reg(rd, cpu_dst);
4239 break;
4240 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4241 case 0x3e: /* V9 done/retry */
4243 switch (rd) {
4244 case 0:
4245 if (!supervisor(dc))
4246 goto priv_insn;
4247 dc->npc = DYNAMIC_PC;
4248 dc->pc = DYNAMIC_PC;
4249 gen_helper_done();
4250 goto jmp_insn;
4251 case 1:
4252 if (!supervisor(dc))
4253 goto priv_insn;
4254 dc->npc = DYNAMIC_PC;
4255 dc->pc = DYNAMIC_PC;
4256 gen_helper_retry();
4257 goto jmp_insn;
4258 default:
4259 goto illegal_insn;
4262 break;
4263 #endif
4264 default:
4265 goto illegal_insn;
4268 break;
4270 break;
4271 case 3: /* load/store instructions */
4273 unsigned int xop = GET_FIELD(insn, 7, 12);
4275 /* flush pending conditional evaluations before exposing
4276 cpu state */
4277 if (dc->cc_op != CC_OP_FLAGS) {
4278 dc->cc_op = CC_OP_FLAGS;
4279 gen_helper_compute_psr();
4281 cpu_src1 = get_src1(insn, cpu_src1);
4282 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4283 rs2 = GET_FIELD(insn, 27, 31);
4284 gen_movl_reg_TN(rs2, cpu_src2);
4285 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4286 } else if (IS_IMM) { /* immediate */
4287 simm = GET_FIELDs(insn, 19, 31);
4288 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4289 } else { /* register */
4290 rs2 = GET_FIELD(insn, 27, 31);
4291 if (rs2 != 0) {
4292 gen_movl_reg_TN(rs2, cpu_src2);
4293 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4294 } else
4295 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4297 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4298 (xop > 0x17 && xop <= 0x1d ) ||
4299 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4300 switch (xop) {
4301 case 0x0: /* ld, V9 lduw, load unsigned word */
4302 gen_address_mask(dc, cpu_addr);
4303 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4304 break;
4305 case 0x1: /* ldub, load unsigned byte */
4306 gen_address_mask(dc, cpu_addr);
4307 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4308 break;
4309 case 0x2: /* lduh, load unsigned halfword */
4310 gen_address_mask(dc, cpu_addr);
4311 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4312 break;
4313 case 0x3: /* ldd, load double word */
4314 if (rd & 1)
4315 goto illegal_insn;
4316 else {
4317 TCGv_i32 r_const;
4319 save_state(dc, cpu_cond);
4320 r_const = tcg_const_i32(7);
4321 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4322 tcg_temp_free_i32(r_const);
4323 gen_address_mask(dc, cpu_addr);
4324 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4325 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4326 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4327 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4328 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4329 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4330 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4332 break;
4333 case 0x9: /* ldsb, load signed byte */
4334 gen_address_mask(dc, cpu_addr);
4335 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4336 break;
4337 case 0xa: /* ldsh, load signed halfword */
4338 gen_address_mask(dc, cpu_addr);
4339 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4340 break;
4341 case 0xd: /* ldstub -- XXX: should be atomically */
4343 TCGv r_const;
4345 gen_address_mask(dc, cpu_addr);
4346 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4347 r_const = tcg_const_tl(0xff);
4348 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4349 tcg_temp_free(r_const);
4351 break;
4352 case 0x0f: /* swap, swap register with memory. Also
4353 atomically */
4354 CHECK_IU_FEATURE(dc, SWAP);
4355 gen_movl_reg_TN(rd, cpu_val);
4356 gen_address_mask(dc, cpu_addr);
4357 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4358 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4359 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4360 break;
4361 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4362 case 0x10: /* lda, V9 lduwa, load word alternate */
4363 #ifndef TARGET_SPARC64
4364 if (IS_IMM)
4365 goto illegal_insn;
4366 if (!supervisor(dc))
4367 goto priv_insn;
4368 #endif
4369 save_state(dc, cpu_cond);
4370 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4371 break;
4372 case 0x11: /* lduba, load unsigned byte alternate */
4373 #ifndef TARGET_SPARC64
4374 if (IS_IMM)
4375 goto illegal_insn;
4376 if (!supervisor(dc))
4377 goto priv_insn;
4378 #endif
4379 save_state(dc, cpu_cond);
4380 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4381 break;
4382 case 0x12: /* lduha, load unsigned halfword alternate */
4383 #ifndef TARGET_SPARC64
4384 if (IS_IMM)
4385 goto illegal_insn;
4386 if (!supervisor(dc))
4387 goto priv_insn;
4388 #endif
4389 save_state(dc, cpu_cond);
4390 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4391 break;
4392 case 0x13: /* ldda, load double word alternate */
4393 #ifndef TARGET_SPARC64
4394 if (IS_IMM)
4395 goto illegal_insn;
4396 if (!supervisor(dc))
4397 goto priv_insn;
4398 #endif
4399 if (rd & 1)
4400 goto illegal_insn;
4401 save_state(dc, cpu_cond);
4402 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4403 goto skip_move;
4404 case 0x19: /* ldsba, load signed byte alternate */
4405 #ifndef TARGET_SPARC64
4406 if (IS_IMM)
4407 goto illegal_insn;
4408 if (!supervisor(dc))
4409 goto priv_insn;
4410 #endif
4411 save_state(dc, cpu_cond);
4412 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4413 break;
4414 case 0x1a: /* ldsha, load signed halfword alternate */
4415 #ifndef TARGET_SPARC64
4416 if (IS_IMM)
4417 goto illegal_insn;
4418 if (!supervisor(dc))
4419 goto priv_insn;
4420 #endif
4421 save_state(dc, cpu_cond);
4422 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4423 break;
4424 case 0x1d: /* ldstuba -- XXX: should be atomically */
4425 #ifndef TARGET_SPARC64
4426 if (IS_IMM)
4427 goto illegal_insn;
4428 if (!supervisor(dc))
4429 goto priv_insn;
4430 #endif
4431 save_state(dc, cpu_cond);
4432 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4433 break;
4434 case 0x1f: /* swapa, swap reg with alt. memory. Also
4435 atomically */
4436 CHECK_IU_FEATURE(dc, SWAP);
4437 #ifndef TARGET_SPARC64
4438 if (IS_IMM)
4439 goto illegal_insn;
4440 if (!supervisor(dc))
4441 goto priv_insn;
4442 #endif
4443 save_state(dc, cpu_cond);
4444 gen_movl_reg_TN(rd, cpu_val);
4445 gen_swap_asi(cpu_val, cpu_addr, insn);
4446 break;
4448 #ifndef TARGET_SPARC64
4449 case 0x30: /* ldc */
4450 case 0x31: /* ldcsr */
4451 case 0x33: /* lddc */
4452 goto ncp_insn;
4453 #endif
4454 #endif
4455 #ifdef TARGET_SPARC64
4456 case 0x08: /* V9 ldsw */
4457 gen_address_mask(dc, cpu_addr);
4458 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4459 break;
4460 case 0x0b: /* V9 ldx */
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4463 break;
4464 case 0x18: /* V9 ldswa */
4465 save_state(dc, cpu_cond);
4466 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4467 break;
4468 case 0x1b: /* V9 ldxa */
4469 save_state(dc, cpu_cond);
4470 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4471 break;
4472 case 0x2d: /* V9 prefetch, no effect */
4473 goto skip_move;
4474 case 0x30: /* V9 ldfa */
4475 save_state(dc, cpu_cond);
4476 gen_ldf_asi(cpu_addr, insn, 4, rd);
4477 goto skip_move;
4478 case 0x33: /* V9 lddfa */
4479 save_state(dc, cpu_cond);
4480 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4481 goto skip_move;
4482 case 0x3d: /* V9 prefetcha, no effect */
4483 goto skip_move;
4484 case 0x32: /* V9 ldqfa */
4485 CHECK_FPU_FEATURE(dc, FLOAT128);
4486 save_state(dc, cpu_cond);
4487 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4488 goto skip_move;
4489 #endif
4490 default:
4491 goto illegal_insn;
4493 gen_movl_TN_reg(rd, cpu_val);
4494 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4495 skip_move: ;
4496 #endif
4497 } else if (xop >= 0x20 && xop < 0x24) {
4498 if (gen_trap_ifnofpu(dc, cpu_cond))
4499 goto jmp_insn;
4500 save_state(dc, cpu_cond);
4501 switch (xop) {
4502 case 0x20: /* ldf, load fpreg */
4503 gen_address_mask(dc, cpu_addr);
4504 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4505 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4506 break;
4507 case 0x21: /* ldfsr, V9 ldxfsr */
4508 #ifdef TARGET_SPARC64
4509 gen_address_mask(dc, cpu_addr);
4510 if (rd == 1) {
4511 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4512 gen_helper_ldxfsr(cpu_tmp64);
4513 } else {
4514 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4515 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4516 gen_helper_ldfsr(cpu_tmp32);
4518 #else
4520 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4521 gen_helper_ldfsr(cpu_tmp32);
4523 #endif
4524 break;
4525 case 0x22: /* ldqf, load quad fpreg */
4527 TCGv_i32 r_const;
4529 CHECK_FPU_FEATURE(dc, FLOAT128);
4530 r_const = tcg_const_i32(dc->mem_idx);
4531 gen_address_mask(dc, cpu_addr);
4532 gen_helper_ldqf(cpu_addr, r_const);
4533 tcg_temp_free_i32(r_const);
4534 gen_op_store_QT0_fpr(QFPREG(rd));
4536 break;
4537 case 0x23: /* lddf, load double fpreg */
4539 TCGv_i32 r_const;
4541 r_const = tcg_const_i32(dc->mem_idx);
4542 gen_address_mask(dc, cpu_addr);
4543 gen_helper_lddf(cpu_addr, r_const);
4544 tcg_temp_free_i32(r_const);
4545 gen_op_store_DT0_fpr(DFPREG(rd));
4547 break;
4548 default:
4549 goto illegal_insn;
4551 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4552 xop == 0xe || xop == 0x1e) {
4553 gen_movl_reg_TN(rd, cpu_val);
4554 switch (xop) {
4555 case 0x4: /* st, store word */
4556 gen_address_mask(dc, cpu_addr);
4557 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4558 break;
4559 case 0x5: /* stb, store byte */
4560 gen_address_mask(dc, cpu_addr);
4561 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4562 break;
4563 case 0x6: /* sth, store halfword */
4564 gen_address_mask(dc, cpu_addr);
4565 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4566 break;
4567 case 0x7: /* std, store double word */
4568 if (rd & 1)
4569 goto illegal_insn;
4570 else {
4571 TCGv_i32 r_const;
4573 save_state(dc, cpu_cond);
4574 gen_address_mask(dc, cpu_addr);
4575 r_const = tcg_const_i32(7);
4576 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4577 tcg_temp_free_i32(r_const);
4578 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4579 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4580 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4582 break;
4583 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4584 case 0x14: /* sta, V9 stwa, store word alternate */
4585 #ifndef TARGET_SPARC64
4586 if (IS_IMM)
4587 goto illegal_insn;
4588 if (!supervisor(dc))
4589 goto priv_insn;
4590 #endif
4591 save_state(dc, cpu_cond);
4592 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4593 dc->npc = DYNAMIC_PC;
4594 break;
4595 case 0x15: /* stba, store byte alternate */
4596 #ifndef TARGET_SPARC64
4597 if (IS_IMM)
4598 goto illegal_insn;
4599 if (!supervisor(dc))
4600 goto priv_insn;
4601 #endif
4602 save_state(dc, cpu_cond);
4603 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4604 dc->npc = DYNAMIC_PC;
4605 break;
4606 case 0x16: /* stha, store halfword alternate */
4607 #ifndef TARGET_SPARC64
4608 if (IS_IMM)
4609 goto illegal_insn;
4610 if (!supervisor(dc))
4611 goto priv_insn;
4612 #endif
4613 save_state(dc, cpu_cond);
4614 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4615 dc->npc = DYNAMIC_PC;
4616 break;
4617 case 0x17: /* stda, store double word alternate */
4618 #ifndef TARGET_SPARC64
4619 if (IS_IMM)
4620 goto illegal_insn;
4621 if (!supervisor(dc))
4622 goto priv_insn;
4623 #endif
4624 if (rd & 1)
4625 goto illegal_insn;
4626 else {
4627 save_state(dc, cpu_cond);
4628 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4630 break;
4631 #endif
4632 #ifdef TARGET_SPARC64
4633 case 0x0e: /* V9 stx */
4634 gen_address_mask(dc, cpu_addr);
4635 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4636 break;
4637 case 0x1e: /* V9 stxa */
4638 save_state(dc, cpu_cond);
4639 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4640 dc->npc = DYNAMIC_PC;
4641 break;
4642 #endif
4643 default:
4644 goto illegal_insn;
4646 } else if (xop > 0x23 && xop < 0x28) {
4647 if (gen_trap_ifnofpu(dc, cpu_cond))
4648 goto jmp_insn;
4649 save_state(dc, cpu_cond);
4650 switch (xop) {
4651 case 0x24: /* stf, store fpreg */
4652 gen_address_mask(dc, cpu_addr);
4653 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4654 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4655 break;
4656 case 0x25: /* stfsr, V9 stxfsr */
4657 #ifdef TARGET_SPARC64
4658 gen_address_mask(dc, cpu_addr);
4659 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4660 if (rd == 1)
4661 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4662 else
4663 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4664 #else
4665 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4666 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4667 #endif
4668 break;
4669 case 0x26:
4670 #ifdef TARGET_SPARC64
4671 /* V9 stqf, store quad fpreg */
4673 TCGv_i32 r_const;
4675 CHECK_FPU_FEATURE(dc, FLOAT128);
4676 gen_op_load_fpr_QT0(QFPREG(rd));
4677 r_const = tcg_const_i32(dc->mem_idx);
4678 gen_address_mask(dc, cpu_addr);
4679 gen_helper_stqf(cpu_addr, r_const);
4680 tcg_temp_free_i32(r_const);
4682 break;
4683 #else /* !TARGET_SPARC64 */
4684 /* stdfq, store floating point queue */
4685 #if defined(CONFIG_USER_ONLY)
4686 goto illegal_insn;
4687 #else
4688 if (!supervisor(dc))
4689 goto priv_insn;
4690 if (gen_trap_ifnofpu(dc, cpu_cond))
4691 goto jmp_insn;
4692 goto nfq_insn;
4693 #endif
4694 #endif
4695 case 0x27: /* stdf, store double fpreg */
4697 TCGv_i32 r_const;
4699 gen_op_load_fpr_DT0(DFPREG(rd));
4700 r_const = tcg_const_i32(dc->mem_idx);
4701 gen_address_mask(dc, cpu_addr);
4702 gen_helper_stdf(cpu_addr, r_const);
4703 tcg_temp_free_i32(r_const);
4705 break;
4706 default:
4707 goto illegal_insn;
4709 } else if (xop > 0x33 && xop < 0x3f) {
4710 save_state(dc, cpu_cond);
4711 switch (xop) {
4712 #ifdef TARGET_SPARC64
4713 case 0x34: /* V9 stfa */
4714 gen_stf_asi(cpu_addr, insn, 4, rd);
4715 break;
4716 case 0x36: /* V9 stqfa */
4718 TCGv_i32 r_const;
4720 CHECK_FPU_FEATURE(dc, FLOAT128);
4721 r_const = tcg_const_i32(7);
4722 gen_helper_check_align(cpu_addr, r_const);
4723 tcg_temp_free_i32(r_const);
4724 gen_op_load_fpr_QT0(QFPREG(rd));
4725 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4727 break;
4728 case 0x37: /* V9 stdfa */
4729 gen_op_load_fpr_DT0(DFPREG(rd));
4730 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4731 break;
4732 case 0x3c: /* V9 casa */
4733 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4734 gen_movl_TN_reg(rd, cpu_val);
4735 break;
4736 case 0x3e: /* V9 casxa */
4737 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4738 gen_movl_TN_reg(rd, cpu_val);
4739 break;
4740 #else
4741 case 0x34: /* stc */
4742 case 0x35: /* stcsr */
4743 case 0x36: /* stdcq */
4744 case 0x37: /* stdc */
4745 goto ncp_insn;
4746 #endif
4747 default:
4748 goto illegal_insn;
4750 } else
4751 goto illegal_insn;
4753 break;
4755 /* default case for non jump instructions */
4756 if (dc->npc == DYNAMIC_PC) {
4757 dc->pc = DYNAMIC_PC;
4758 gen_op_next_insn();
4759 } else if (dc->npc == JUMP_PC) {
4760 /* we can do a static jump */
4761 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4762 dc->is_br = 1;
4763 } else {
4764 dc->pc = dc->npc;
4765 dc->npc = dc->npc + 4;
4767 jmp_insn:
4768 goto egress;
4769 illegal_insn:
4771 TCGv_i32 r_const;
4773 save_state(dc, cpu_cond);
4774 r_const = tcg_const_i32(TT_ILL_INSN);
4775 gen_helper_raise_exception(r_const);
4776 tcg_temp_free_i32(r_const);
4777 dc->is_br = 1;
4779 goto egress;
4780 unimp_flush:
4782 TCGv_i32 r_const;
4784 save_state(dc, cpu_cond);
4785 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4786 gen_helper_raise_exception(r_const);
4787 tcg_temp_free_i32(r_const);
4788 dc->is_br = 1;
4790 goto egress;
4791 #if !defined(CONFIG_USER_ONLY)
4792 priv_insn:
4794 TCGv_i32 r_const;
4796 save_state(dc, cpu_cond);
4797 r_const = tcg_const_i32(TT_PRIV_INSN);
4798 gen_helper_raise_exception(r_const);
4799 tcg_temp_free_i32(r_const);
4800 dc->is_br = 1;
4802 goto egress;
4803 #endif
4804 nfpu_insn:
4805 save_state(dc, cpu_cond);
4806 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4807 dc->is_br = 1;
4808 goto egress;
4809 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4810 nfq_insn:
4811 save_state(dc, cpu_cond);
4812 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4813 dc->is_br = 1;
4814 goto egress;
4815 #endif
4816 #ifndef TARGET_SPARC64
4817 ncp_insn:
4819 TCGv r_const;
4821 save_state(dc, cpu_cond);
4822 r_const = tcg_const_i32(TT_NCP_INSN);
4823 gen_helper_raise_exception(r_const);
4824 tcg_temp_free(r_const);
4825 dc->is_br = 1;
4827 goto egress;
4828 #endif
4829 egress:
4830 tcg_temp_free(cpu_tmp1);
4831 tcg_temp_free(cpu_tmp2);
4834 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4835 int spc, CPUSPARCState *env)
4837 target_ulong pc_start, last_pc;
4838 uint16_t *gen_opc_end;
4839 DisasContext dc1, *dc = &dc1;
4840 CPUBreakpoint *bp;
4841 int j, lj = -1;
4842 int num_insns;
4843 int max_insns;
4845 memset(dc, 0, sizeof(DisasContext));
4846 dc->tb = tb;
4847 pc_start = tb->pc;
4848 dc->pc = pc_start;
4849 last_pc = dc->pc;
4850 dc->npc = (target_ulong) tb->cs_base;
4851 dc->cc_op = CC_OP_DYNAMIC;
4852 dc->mem_idx = cpu_mmu_index(env);
4853 dc->def = env->def;
4854 if ((dc->def->features & CPU_FEATURE_FLOAT))
4855 dc->fpu_enabled = cpu_fpu_enabled(env);
4856 else
4857 dc->fpu_enabled = 0;
4858 #ifdef TARGET_SPARC64
4859 dc->address_mask_32bit = env->pstate & PS_AM;
4860 #endif
4861 dc->singlestep = (env->singlestep_enabled || singlestep);
4862 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4864 cpu_tmp0 = tcg_temp_new();
4865 cpu_tmp32 = tcg_temp_new_i32();
4866 cpu_tmp64 = tcg_temp_new_i64();
4868 cpu_dst = tcg_temp_local_new();
4870 // loads and stores
4871 cpu_val = tcg_temp_local_new();
4872 cpu_addr = tcg_temp_local_new();
4874 num_insns = 0;
4875 max_insns = tb->cflags & CF_COUNT_MASK;
4876 if (max_insns == 0)
4877 max_insns = CF_COUNT_MASK;
4878 gen_icount_start();
4879 do {
4880 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4881 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4882 if (bp->pc == dc->pc) {
4883 if (dc->pc != pc_start)
4884 save_state(dc, cpu_cond);
4885 gen_helper_debug();
4886 tcg_gen_exit_tb(0);
4887 dc->is_br = 1;
4888 goto exit_gen_loop;
4892 if (spc) {
4893 qemu_log("Search PC...\n");
4894 j = gen_opc_ptr - gen_opc_buf;
4895 if (lj < j) {
4896 lj++;
4897 while (lj < j)
4898 gen_opc_instr_start[lj++] = 0;
4899 gen_opc_pc[lj] = dc->pc;
4900 gen_opc_npc[lj] = dc->npc;
4901 gen_opc_instr_start[lj] = 1;
4902 gen_opc_icount[lj] = num_insns;
4905 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4906 gen_io_start();
4907 last_pc = dc->pc;
4908 disas_sparc_insn(dc);
4909 num_insns++;
4911 if (dc->is_br)
4912 break;
4913 /* if the next PC is different, we abort now */
4914 if (dc->pc != (last_pc + 4))
4915 break;
4916 /* if we reach a page boundary, we stop generation so that the
4917 PC of a TT_TFAULT exception is always in the right page */
4918 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4919 break;
4920 /* if single step mode, we generate only one instruction and
4921 generate an exception */
4922 if (dc->singlestep) {
4923 break;
4925 } while ((gen_opc_ptr < gen_opc_end) &&
4926 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4927 num_insns < max_insns);
4929 exit_gen_loop:
4930 tcg_temp_free(cpu_addr);
4931 tcg_temp_free(cpu_val);
4932 tcg_temp_free(cpu_dst);
4933 tcg_temp_free_i64(cpu_tmp64);
4934 tcg_temp_free_i32(cpu_tmp32);
4935 tcg_temp_free(cpu_tmp0);
4936 if (tb->cflags & CF_LAST_IO)
4937 gen_io_end();
4938 if (!dc->is_br) {
4939 if (dc->pc != DYNAMIC_PC &&
4940 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4941 /* static PC and NPC: we can use direct chaining */
4942 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4943 } else {
4944 if (dc->pc != DYNAMIC_PC)
4945 tcg_gen_movi_tl(cpu_pc, dc->pc);
4946 save_npc(dc, cpu_cond);
4947 tcg_gen_exit_tb(0);
4950 gen_icount_end(tb, num_insns);
4951 *gen_opc_ptr = INDEX_op_end;
4952 if (spc) {
4953 j = gen_opc_ptr - gen_opc_buf;
4954 lj++;
4955 while (lj <= j)
4956 gen_opc_instr_start[lj++] = 0;
4957 #if 0
4958 log_page_dump();
4959 #endif
4960 gen_opc_jump_pc[0] = dc->jump_pc[0];
4961 gen_opc_jump_pc[1] = dc->jump_pc[1];
4962 } else {
4963 tb->size = last_pc + 4 - pc_start;
4964 tb->icount = num_insns;
4966 #ifdef DEBUG_DISAS
4967 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4968 qemu_log("--------------\n");
4969 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4970 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4971 qemu_log("\n");
4973 #endif
4976 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4978 gen_intermediate_code_internal(tb, 0, env);
4981 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4983 gen_intermediate_code_internal(tb, 1, env);
4986 void gen_intermediate_code_init(CPUSPARCState *env)
4988 unsigned int i;
4989 static int inited;
4990 static const char * const gregnames[8] = {
4991 NULL, // g0 not used
4992 "g1",
4993 "g2",
4994 "g3",
4995 "g4",
4996 "g5",
4997 "g6",
4998 "g7",
5000 static const char * const fregnames[64] = {
5001 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5002 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5003 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5004 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5005 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5006 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5007 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5008 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5011 /* init various static tables */
5012 if (!inited) {
5013 inited = 1;
5015 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5016 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5017 offsetof(CPUState, regwptr),
5018 "regwptr");
5019 #ifdef TARGET_SPARC64
5020 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5021 "xcc");
5022 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5023 "asi");
5024 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5025 "fprs");
5026 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5027 "gsr");
5028 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5029 offsetof(CPUState, tick_cmpr),
5030 "tick_cmpr");
5031 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5032 offsetof(CPUState, stick_cmpr),
5033 "stick_cmpr");
5034 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5035 offsetof(CPUState, hstick_cmpr),
5036 "hstick_cmpr");
5037 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5038 "hintp");
5039 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5040 "htba");
5041 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5042 "hver");
5043 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5044 offsetof(CPUState, ssr), "ssr");
5045 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5046 offsetof(CPUState, version), "ver");
5047 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5048 offsetof(CPUState, softint),
5049 "softint");
5050 #else
5051 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5052 "wim");
5053 #endif
5054 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5055 "cond");
5056 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5057 "cc_src");
5058 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5059 offsetof(CPUState, cc_src2),
5060 "cc_src2");
5061 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5062 "cc_dst");
5063 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5064 "cc_op");
5065 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5066 "psr");
5067 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5068 "fsr");
5069 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5070 "pc");
5071 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5072 "npc");
5073 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5074 #ifndef CONFIG_USER_ONLY
5075 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5076 "tbr");
5077 #endif
5078 for (i = 1; i < 8; i++)
5079 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5080 offsetof(CPUState, gregs[i]),
5081 gregnames[i]);
5082 for (i = 0; i < TARGET_FPREGS; i++)
5083 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5084 offsetof(CPUState, fpr[i]),
5085 fregnames[i]);
5087 /* register helpers */
5089 #define GEN_HELPER 2
5090 #include "helper.h"
5094 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5096 target_ulong npc;
5097 env->pc = gen_opc_pc[pc_pos];
5098 npc = gen_opc_npc[pc_pos];
5099 if (npc == 1) {
5100 /* dynamic NPC: already stored */
5101 } else if (npc == 2) {
5102 /* jump PC: use 'cond' and the jump targets of the translation */
5103 if (env->cond) {
5104 env->npc = gen_opc_jump_pc[0];
5105 } else {
5106 env->npc = gen_opc_jump_pc[1];
5108 } else {
5109 env->npc = npc;
5112 /* flush pending conditional evaluations before exposing cpu state */
5113 if (CC_OP != CC_OP_FLAGS) {
5114 helper_compute_psr();