Optimize cmp x, 0 case
[qemu-kvm/fedora.git] / target-sparc / translate.c
blobae93614cc36f998524af516588b8ac1a9f04f86c
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 } DisasContext;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #ifdef TARGET_SPARC64
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #else
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
100 #endif
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x, int len)
107 len = 32 - len;
108 return (x << len) >> len;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src)
116 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
117 offsetof(CPU_DoubleU, l.upper));
118 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.lower));
122 static void gen_op_load_fpr_DT1(unsigned int src)
124 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
125 offsetof(CPU_DoubleU, l.upper));
126 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.lower));
130 static void gen_op_store_DT0_fpr(unsigned int dst)
132 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
133 offsetof(CPU_DoubleU, l.upper));
134 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.lower));
138 static void gen_op_load_fpr_QT0(unsigned int src)
140 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
141 offsetof(CPU_QuadU, l.upmost));
142 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upper));
144 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.lower));
146 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lowest));
150 static void gen_op_load_fpr_QT1(unsigned int src)
152 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
153 offsetof(CPU_QuadU, l.upmost));
154 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upper));
156 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.lower));
158 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lowest));
162 static void gen_op_store_QT0_fpr(unsigned int dst)
164 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
165 offsetof(CPU_QuadU, l.upmost));
166 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upper));
168 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.lower));
170 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lowest));
174 /* moves */
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
179 #endif
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #else
185 #endif
186 #endif
188 #ifdef TARGET_SPARC64
189 #ifndef TARGET_ABI32
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
191 #else
192 #define AM_CHECK(dc) (1)
193 #endif
194 #endif
196 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 #ifdef TARGET_SPARC64
199 if (AM_CHECK(dc))
200 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
201 #endif
204 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 if (reg == 0)
207 tcg_gen_movi_tl(tn, 0);
208 else if (reg < 8)
209 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
210 else {
211 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
215 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 if (reg == 0)
218 return;
219 else if (reg < 8)
220 tcg_gen_mov_tl(cpu_gregs[reg], tn);
221 else {
222 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
226 static inline void gen_goto_tb(DisasContext *s, int tb_num,
227 target_ulong pc, target_ulong npc)
229 TranslationBlock *tb;
231 tb = s->tb;
232 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
233 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num);
236 tcg_gen_movi_tl(cpu_pc, pc);
237 tcg_gen_movi_tl(cpu_npc, npc);
238 tcg_gen_exit_tb((long)tb + tb_num);
239 } else {
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb(0);
247 // XXX suboptimal
248 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 tcg_gen_extu_i32_tl(reg, src);
251 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
252 tcg_gen_andi_tl(reg, reg, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 tcg_gen_extu_i32_tl(reg, src);
258 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
259 tcg_gen_andi_tl(reg, reg, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 tcg_gen_extu_i32_tl(reg, src);
265 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
266 tcg_gen_andi_tl(reg, reg, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 tcg_gen_extu_i32_tl(reg, src);
272 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
273 tcg_gen_andi_tl(reg, reg, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc, 0);
286 #endif
288 /* old op:
289 if (!T0)
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
292 env->psr |= PSR_NEG;
294 static inline void gen_cc_NZ_icc(TCGv dst)
296 TCGv r_temp;
297 int l1, l2;
299 l1 = gen_new_label();
300 l2 = gen_new_label();
301 r_temp = tcg_temp_new();
302 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
303 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
304 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
305 gen_set_label(l1);
306 tcg_gen_ext32s_tl(r_temp, dst);
307 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
308 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
309 gen_set_label(l2);
310 tcg_temp_free(r_temp);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst)
316 int l1, l2;
318 l1 = gen_new_label();
319 l2 = gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
321 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
322 gen_set_label(l1);
323 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
324 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
325 gen_set_label(l2);
327 #endif
329 /* old op:
330 if (T0 < src1)
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 TCGv r_temp1, r_temp2;
336 int l1;
338 l1 = gen_new_label();
339 r_temp1 = tcg_temp_new();
340 r_temp2 = tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
342 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
343 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
344 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
345 gen_set_label(l1);
346 tcg_temp_free(r_temp1);
347 tcg_temp_free(r_temp2);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 int l1;
355 l1 = gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
357 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
358 gen_set_label(l1);
360 #endif
362 /* old op:
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
364 env->psr |= PSR_OVF;
366 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 TCGv r_temp;
370 r_temp = tcg_temp_new();
371 tcg_gen_xor_tl(r_temp, src1, src2);
372 tcg_gen_not_tl(r_temp, r_temp);
373 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
374 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
375 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
377 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
378 tcg_temp_free(r_temp);
379 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 TCGv r_temp;
387 r_temp = tcg_temp_new();
388 tcg_gen_xor_tl(r_temp, src1, src2);
389 tcg_gen_not_tl(r_temp, r_temp);
390 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
391 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
392 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
394 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
395 tcg_temp_free(r_temp);
396 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 #endif
400 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 TCGv r_temp;
403 TCGv_i32 r_const;
404 int l1;
406 l1 = gen_new_label();
408 r_temp = tcg_temp_new();
409 tcg_gen_xor_tl(r_temp, src1, src2);
410 tcg_gen_not_tl(r_temp, r_temp);
411 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
412 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
413 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
415 r_const = tcg_const_i32(TT_TOVF);
416 gen_helper_raise_exception(r_const);
417 tcg_temp_free_i32(r_const);
418 gen_set_label(l1);
419 tcg_temp_free(r_temp);
422 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 int l1;
426 l1 = gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0, src1, src2);
428 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
430 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
431 gen_set_label(l1);
434 static inline void gen_op_logic_cc(TCGv dst)
436 tcg_gen_mov_tl(cpu_cc_dst, dst);
438 gen_cc_clear_icc();
439 gen_cc_NZ_icc(cpu_cc_dst);
440 #ifdef TARGET_SPARC64
441 gen_cc_clear_xcc();
442 gen_cc_NZ_xcc(cpu_cc_dst);
443 #endif
446 static inline void gen_tag_tv(TCGv src1, TCGv src2)
448 int l1;
449 TCGv_i32 r_const;
451 l1 = gen_new_label();
452 tcg_gen_or_tl(cpu_tmp0, src1, src2);
453 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
454 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
455 r_const = tcg_const_i32(TT_TOVF);
456 gen_helper_raise_exception(r_const);
457 tcg_temp_free_i32(r_const);
458 gen_set_label(l1);
461 static inline void gen_op_add_cc2(TCGv dst)
463 gen_cc_clear_icc();
464 gen_cc_NZ_icc(cpu_cc_dst);
465 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
466 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 #ifdef TARGET_SPARC64
468 gen_cc_clear_xcc();
469 gen_cc_NZ_xcc(cpu_cc_dst);
470 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
471 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
472 #endif
473 tcg_gen_mov_tl(dst, cpu_cc_dst);
476 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
478 tcg_gen_mov_tl(cpu_cc_src, src1);
479 tcg_gen_movi_tl(cpu_cc_src2, src2);
480 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
481 gen_op_add_cc2(dst);
484 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
486 tcg_gen_mov_tl(cpu_cc_src, src1);
487 tcg_gen_mov_tl(cpu_cc_src2, src2);
488 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 gen_op_add_cc2(dst);
492 static inline void gen_op_addx_cc2(TCGv dst)
494 gen_cc_NZ_icc(cpu_cc_dst);
495 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
496 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
497 #ifdef TARGET_SPARC64
498 gen_cc_NZ_xcc(cpu_cc_dst);
499 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
500 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
501 #endif
502 tcg_gen_mov_tl(dst, cpu_cc_dst);
505 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
507 tcg_gen_mov_tl(cpu_cc_src, src1);
508 tcg_gen_movi_tl(cpu_cc_src2, src2);
509 gen_mov_reg_C(cpu_tmp0, cpu_psr);
510 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
511 gen_cc_clear_icc();
512 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
513 #ifdef TARGET_SPARC64
514 gen_cc_clear_xcc();
515 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
516 #endif
517 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
518 gen_op_addx_cc2(dst);
521 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_mov_reg_C(cpu_tmp0, cpu_psr);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
527 gen_cc_clear_icc();
528 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
529 #ifdef TARGET_SPARC64
530 gen_cc_clear_xcc();
531 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
532 #endif
533 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
534 gen_op_addx_cc2(dst);
537 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
539 tcg_gen_mov_tl(cpu_cc_src, src1);
540 tcg_gen_mov_tl(cpu_cc_src2, src2);
541 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
542 gen_cc_clear_icc();
543 gen_cc_NZ_icc(cpu_cc_dst);
544 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
545 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
546 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
547 #ifdef TARGET_SPARC64
548 gen_cc_clear_xcc();
549 gen_cc_NZ_xcc(cpu_cc_dst);
550 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
551 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
552 #endif
553 tcg_gen_mov_tl(dst, cpu_cc_dst);
556 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
558 tcg_gen_mov_tl(cpu_cc_src, src1);
559 tcg_gen_mov_tl(cpu_cc_src2, src2);
560 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
561 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
562 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
563 gen_cc_clear_icc();
564 gen_cc_NZ_icc(cpu_cc_dst);
565 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
566 #ifdef TARGET_SPARC64
567 gen_cc_clear_xcc();
568 gen_cc_NZ_xcc(cpu_cc_dst);
569 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
570 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
571 #endif
572 tcg_gen_mov_tl(dst, cpu_cc_dst);
575 /* old op:
576 if (src1 < T1)
577 env->psr |= PSR_CARRY;
579 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
581 TCGv r_temp1, r_temp2;
582 int l1;
584 l1 = gen_new_label();
585 r_temp1 = tcg_temp_new();
586 r_temp2 = tcg_temp_new();
587 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
588 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
589 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
590 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
591 gen_set_label(l1);
592 tcg_temp_free(r_temp1);
593 tcg_temp_free(r_temp2);
596 #ifdef TARGET_SPARC64
597 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
599 int l1;
601 l1 = gen_new_label();
602 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
603 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
604 gen_set_label(l1);
606 #endif
608 /* old op:
609 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
610 env->psr |= PSR_OVF;
612 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
614 TCGv r_temp;
616 r_temp = tcg_temp_new();
617 tcg_gen_xor_tl(r_temp, src1, src2);
618 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
619 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
620 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
621 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
622 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
623 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
624 tcg_temp_free(r_temp);
627 #ifdef TARGET_SPARC64
628 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
630 TCGv r_temp;
632 r_temp = tcg_temp_new();
633 tcg_gen_xor_tl(r_temp, src1, src2);
634 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
635 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
636 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
637 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
638 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
639 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
640 tcg_temp_free(r_temp);
642 #endif
644 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
646 TCGv r_temp;
647 TCGv_i32 r_const;
648 int l1;
650 l1 = gen_new_label();
652 r_temp = tcg_temp_new();
653 tcg_gen_xor_tl(r_temp, src1, src2);
654 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
655 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
656 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
657 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
658 r_const = tcg_const_i32(TT_TOVF);
659 gen_helper_raise_exception(r_const);
660 tcg_temp_free_i32(r_const);
661 gen_set_label(l1);
662 tcg_temp_free(r_temp);
665 static inline void gen_op_sub_cc2(TCGv dst)
667 gen_cc_clear_icc();
668 gen_cc_NZ_icc(cpu_cc_dst);
669 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
670 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
671 #ifdef TARGET_SPARC64
672 gen_cc_clear_xcc();
673 gen_cc_NZ_xcc(cpu_cc_dst);
674 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
675 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 #endif
677 tcg_gen_mov_tl(dst, cpu_cc_dst);
680 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
682 tcg_gen_mov_tl(cpu_cc_src, src1);
683 tcg_gen_movi_tl(cpu_cc_src2, src2);
684 if (src2 == 0) {
685 tcg_gen_mov_tl(dst, src1);
686 gen_op_logic_cc(dst);
687 } else {
688 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
689 gen_op_sub_cc2(dst);
693 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
695 tcg_gen_mov_tl(cpu_cc_src, src1);
696 tcg_gen_mov_tl(cpu_cc_src2, src2);
697 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
698 gen_op_sub_cc2(dst);
701 static inline void gen_op_subx_cc2(TCGv dst)
703 gen_cc_NZ_icc(cpu_cc_dst);
704 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
705 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
706 #ifdef TARGET_SPARC64
707 gen_cc_NZ_xcc(cpu_cc_dst);
708 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
709 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
710 #endif
711 tcg_gen_mov_tl(dst, cpu_cc_dst);
714 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
716 tcg_gen_mov_tl(cpu_cc_src, src1);
717 tcg_gen_movi_tl(cpu_cc_src2, src2);
718 gen_mov_reg_C(cpu_tmp0, cpu_psr);
719 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
720 gen_cc_clear_icc();
721 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
722 #ifdef TARGET_SPARC64
723 gen_cc_clear_xcc();
724 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
725 #endif
726 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
727 gen_op_subx_cc2(dst);
730 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
732 tcg_gen_mov_tl(cpu_cc_src, src1);
733 tcg_gen_mov_tl(cpu_cc_src2, src2);
734 gen_mov_reg_C(cpu_tmp0, cpu_psr);
735 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
736 gen_cc_clear_icc();
737 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
738 #ifdef TARGET_SPARC64
739 gen_cc_clear_xcc();
740 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
741 #endif
742 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
743 gen_op_subx_cc2(dst);
746 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
748 tcg_gen_mov_tl(cpu_cc_src, src1);
749 tcg_gen_mov_tl(cpu_cc_src2, src2);
750 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
751 gen_cc_clear_icc();
752 gen_cc_NZ_icc(cpu_cc_dst);
753 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
754 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
755 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
756 #ifdef TARGET_SPARC64
757 gen_cc_clear_xcc();
758 gen_cc_NZ_xcc(cpu_cc_dst);
759 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
760 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
761 #endif
762 tcg_gen_mov_tl(dst, cpu_cc_dst);
765 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
767 tcg_gen_mov_tl(cpu_cc_src, src1);
768 tcg_gen_mov_tl(cpu_cc_src2, src2);
769 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
770 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
771 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
772 gen_cc_clear_icc();
773 gen_cc_NZ_icc(cpu_cc_dst);
774 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
775 #ifdef TARGET_SPARC64
776 gen_cc_clear_xcc();
777 gen_cc_NZ_xcc(cpu_cc_dst);
778 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
779 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
780 #endif
781 tcg_gen_mov_tl(dst, cpu_cc_dst);
784 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
786 TCGv r_temp;
787 int l1;
789 l1 = gen_new_label();
790 r_temp = tcg_temp_new();
792 /* old op:
793 if (!(env->y & 1))
794 T1 = 0;
796 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
797 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
798 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
799 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
800 tcg_gen_movi_tl(cpu_cc_src2, 0);
801 gen_set_label(l1);
803 // b2 = T0 & 1;
804 // env->y = (b2 << 31) | (env->y >> 1);
805 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
806 tcg_gen_shli_tl(r_temp, r_temp, 31);
807 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
808 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
809 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
810 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
812 // b1 = N ^ V;
813 gen_mov_reg_N(cpu_tmp0, cpu_psr);
814 gen_mov_reg_V(r_temp, cpu_psr);
815 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
816 tcg_temp_free(r_temp);
818 // T0 = (b1 << 31) | (T0 >> 1);
819 // src1 = T0;
820 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
821 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
822 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
824 /* do addition and update flags */
825 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
827 gen_cc_clear_icc();
828 gen_cc_NZ_icc(cpu_cc_dst);
829 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
830 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
831 tcg_gen_mov_tl(dst, cpu_cc_dst);
834 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
836 TCGv_i64 r_temp, r_temp2;
838 r_temp = tcg_temp_new_i64();
839 r_temp2 = tcg_temp_new_i64();
841 tcg_gen_extu_tl_i64(r_temp, src2);
842 tcg_gen_extu_tl_i64(r_temp2, src1);
843 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
845 tcg_gen_shri_i64(r_temp, r_temp2, 32);
846 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
847 tcg_temp_free_i64(r_temp);
848 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
849 #ifdef TARGET_SPARC64
850 tcg_gen_mov_i64(dst, r_temp2);
851 #else
852 tcg_gen_trunc_i64_tl(dst, r_temp2);
853 #endif
854 tcg_temp_free_i64(r_temp2);
857 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
859 TCGv_i64 r_temp, r_temp2;
861 r_temp = tcg_temp_new_i64();
862 r_temp2 = tcg_temp_new_i64();
864 tcg_gen_ext_tl_i64(r_temp, src2);
865 tcg_gen_ext_tl_i64(r_temp2, src1);
866 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
868 tcg_gen_shri_i64(r_temp, r_temp2, 32);
869 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
870 tcg_temp_free_i64(r_temp);
871 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
872 #ifdef TARGET_SPARC64
873 tcg_gen_mov_i64(dst, r_temp2);
874 #else
875 tcg_gen_trunc_i64_tl(dst, r_temp2);
876 #endif
877 tcg_temp_free_i64(r_temp2);
880 #ifdef TARGET_SPARC64
881 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
883 TCGv_i32 r_const;
884 int l1;
886 l1 = gen_new_label();
887 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
888 r_const = tcg_const_i32(TT_DIV_ZERO);
889 gen_helper_raise_exception(r_const);
890 tcg_temp_free_i32(r_const);
891 gen_set_label(l1);
894 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
896 int l1, l2;
898 l1 = gen_new_label();
899 l2 = gen_new_label();
900 tcg_gen_mov_tl(cpu_cc_src, src1);
901 tcg_gen_mov_tl(cpu_cc_src2, src2);
902 gen_trap_ifdivzero_tl(cpu_cc_src2);
903 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
904 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
905 tcg_gen_movi_i64(dst, INT64_MIN);
906 tcg_gen_br(l2);
907 gen_set_label(l1);
908 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
909 gen_set_label(l2);
911 #endif
913 static inline void gen_op_div_cc(TCGv dst)
915 int l1;
917 tcg_gen_mov_tl(cpu_cc_dst, dst);
918 gen_cc_clear_icc();
919 gen_cc_NZ_icc(cpu_cc_dst);
920 l1 = gen_new_label();
921 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
922 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
923 gen_set_label(l1);
926 // 1
927 static inline void gen_op_eval_ba(TCGv dst)
929 tcg_gen_movi_tl(dst, 1);
932 // Z
933 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
935 gen_mov_reg_Z(dst, src);
938 // Z | (N ^ V)
939 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
941 gen_mov_reg_N(cpu_tmp0, src);
942 gen_mov_reg_V(dst, src);
943 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
944 gen_mov_reg_Z(cpu_tmp0, src);
945 tcg_gen_or_tl(dst, dst, cpu_tmp0);
948 // N ^ V
949 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
951 gen_mov_reg_V(cpu_tmp0, src);
952 gen_mov_reg_N(dst, src);
953 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
956 // C | Z
957 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
959 gen_mov_reg_Z(cpu_tmp0, src);
960 gen_mov_reg_C(dst, src);
961 tcg_gen_or_tl(dst, dst, cpu_tmp0);
964 // C
965 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
967 gen_mov_reg_C(dst, src);
970 // V
971 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
973 gen_mov_reg_V(dst, src);
976 // 0
977 static inline void gen_op_eval_bn(TCGv dst)
979 tcg_gen_movi_tl(dst, 0);
982 // N
983 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
985 gen_mov_reg_N(dst, src);
988 // !Z
989 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
991 gen_mov_reg_Z(dst, src);
992 tcg_gen_xori_tl(dst, dst, 0x1);
995 // !(Z | (N ^ V))
996 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
998 gen_mov_reg_N(cpu_tmp0, src);
999 gen_mov_reg_V(dst, src);
1000 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1001 gen_mov_reg_Z(cpu_tmp0, src);
1002 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1003 tcg_gen_xori_tl(dst, dst, 0x1);
1006 // !(N ^ V)
1007 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
1009 gen_mov_reg_V(cpu_tmp0, src);
1010 gen_mov_reg_N(dst, src);
1011 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1012 tcg_gen_xori_tl(dst, dst, 0x1);
1015 // !(C | Z)
1016 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1018 gen_mov_reg_Z(cpu_tmp0, src);
1019 gen_mov_reg_C(dst, src);
1020 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1021 tcg_gen_xori_tl(dst, dst, 0x1);
1024 // !C
1025 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1027 gen_mov_reg_C(dst, src);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1031 // !N
1032 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1034 gen_mov_reg_N(dst, src);
1035 tcg_gen_xori_tl(dst, dst, 0x1);
1038 // !V
1039 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1041 gen_mov_reg_V(dst, src);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1046 FPSR bit field FCC1 | FCC0:
1050 3 unordered
1052 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1053 unsigned int fcc_offset)
1055 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1056 tcg_gen_andi_tl(reg, reg, 0x1);
1059 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1060 unsigned int fcc_offset)
1062 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1063 tcg_gen_andi_tl(reg, reg, 0x1);
1066 // !0: FCC0 | FCC1
1067 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1075 // 1 or 2: FCC0 ^ FCC1
1076 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1077 unsigned int fcc_offset)
1079 gen_mov_reg_FCC0(dst, src, fcc_offset);
1080 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1081 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1084 // 1 or 3: FCC0
1085 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1091 // 1: FCC0 & !FCC1
1092 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1093 unsigned int fcc_offset)
1095 gen_mov_reg_FCC0(dst, src, fcc_offset);
1096 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1097 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1098 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1101 // 2 or 3: FCC1
1102 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC1(dst, src, fcc_offset);
1108 // 2: !FCC0 & FCC1
1109 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1110 unsigned int fcc_offset)
1112 gen_mov_reg_FCC0(dst, src, fcc_offset);
1113 tcg_gen_xori_tl(dst, dst, 0x1);
1114 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1115 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1118 // 3: FCC0 & FCC1
1119 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1120 unsigned int fcc_offset)
1122 gen_mov_reg_FCC0(dst, src, fcc_offset);
1123 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1124 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1127 // 0: !(FCC0 | FCC1)
1128 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1129 unsigned int fcc_offset)
1131 gen_mov_reg_FCC0(dst, src, fcc_offset);
1132 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1133 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1134 tcg_gen_xori_tl(dst, dst, 0x1);
1137 // 0 or 3: !(FCC0 ^ FCC1)
1138 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1139 unsigned int fcc_offset)
1141 gen_mov_reg_FCC0(dst, src, fcc_offset);
1142 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1143 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1144 tcg_gen_xori_tl(dst, dst, 0x1);
1147 // 0 or 2: !FCC0
1148 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1149 unsigned int fcc_offset)
1151 gen_mov_reg_FCC0(dst, src, fcc_offset);
1152 tcg_gen_xori_tl(dst, dst, 0x1);
1155 // !1: !(FCC0 & !FCC1)
1156 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1157 unsigned int fcc_offset)
1159 gen_mov_reg_FCC0(dst, src, fcc_offset);
1160 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1161 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1162 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1163 tcg_gen_xori_tl(dst, dst, 0x1);
1166 // 0 or 1: !FCC1
1167 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1168 unsigned int fcc_offset)
1170 gen_mov_reg_FCC1(dst, src, fcc_offset);
1171 tcg_gen_xori_tl(dst, dst, 0x1);
1174 // !2: !(!FCC0 & FCC1)
1175 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1176 unsigned int fcc_offset)
1178 gen_mov_reg_FCC0(dst, src, fcc_offset);
1179 tcg_gen_xori_tl(dst, dst, 0x1);
1180 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1181 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1182 tcg_gen_xori_tl(dst, dst, 0x1);
1185 // !3: !(FCC0 & FCC1)
1186 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1187 unsigned int fcc_offset)
1189 gen_mov_reg_FCC0(dst, src, fcc_offset);
1190 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1191 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1192 tcg_gen_xori_tl(dst, dst, 0x1);
1195 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1196 target_ulong pc2, TCGv r_cond)
1198 int l1;
1200 l1 = gen_new_label();
1202 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1204 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1206 gen_set_label(l1);
1207 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1210 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1211 target_ulong pc2, TCGv r_cond)
1213 int l1;
1215 l1 = gen_new_label();
1217 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1219 gen_goto_tb(dc, 0, pc2, pc1);
1221 gen_set_label(l1);
1222 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1225 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1226 TCGv r_cond)
1228 int l1, l2;
1230 l1 = gen_new_label();
1231 l2 = gen_new_label();
1233 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1235 tcg_gen_movi_tl(cpu_npc, npc1);
1236 tcg_gen_br(l2);
1238 gen_set_label(l1);
1239 tcg_gen_movi_tl(cpu_npc, npc2);
1240 gen_set_label(l2);
1243 /* call this function before using the condition register as it may
1244 have been set for a jump */
1245 static inline void flush_cond(DisasContext *dc, TCGv cond)
1247 if (dc->npc == JUMP_PC) {
1248 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1249 dc->npc = DYNAMIC_PC;
1253 static inline void save_npc(DisasContext *dc, TCGv cond)
1255 if (dc->npc == JUMP_PC) {
1256 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1257 dc->npc = DYNAMIC_PC;
1258 } else if (dc->npc != DYNAMIC_PC) {
1259 tcg_gen_movi_tl(cpu_npc, dc->npc);
1263 static inline void save_state(DisasContext *dc, TCGv cond)
1265 tcg_gen_movi_tl(cpu_pc, dc->pc);
1266 save_npc(dc, cond);
1269 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1271 if (dc->npc == JUMP_PC) {
1272 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1273 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1274 dc->pc = DYNAMIC_PC;
1275 } else if (dc->npc == DYNAMIC_PC) {
1276 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1277 dc->pc = DYNAMIC_PC;
1278 } else {
1279 dc->pc = dc->npc;
1283 static inline void gen_op_next_insn(void)
1285 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1286 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1289 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1291 TCGv_i32 r_src;
1293 #ifdef TARGET_SPARC64
1294 if (cc)
1295 r_src = cpu_xcc;
1296 else
1297 r_src = cpu_psr;
1298 #else
1299 r_src = cpu_psr;
1300 #endif
1301 switch (cond) {
1302 case 0x0:
1303 gen_op_eval_bn(r_dst);
1304 break;
1305 case 0x1:
1306 gen_op_eval_be(r_dst, r_src);
1307 break;
1308 case 0x2:
1309 gen_op_eval_ble(r_dst, r_src);
1310 break;
1311 case 0x3:
1312 gen_op_eval_bl(r_dst, r_src);
1313 break;
1314 case 0x4:
1315 gen_op_eval_bleu(r_dst, r_src);
1316 break;
1317 case 0x5:
1318 gen_op_eval_bcs(r_dst, r_src);
1319 break;
1320 case 0x6:
1321 gen_op_eval_bneg(r_dst, r_src);
1322 break;
1323 case 0x7:
1324 gen_op_eval_bvs(r_dst, r_src);
1325 break;
1326 case 0x8:
1327 gen_op_eval_ba(r_dst);
1328 break;
1329 case 0x9:
1330 gen_op_eval_bne(r_dst, r_src);
1331 break;
1332 case 0xa:
1333 gen_op_eval_bg(r_dst, r_src);
1334 break;
1335 case 0xb:
1336 gen_op_eval_bge(r_dst, r_src);
1337 break;
1338 case 0xc:
1339 gen_op_eval_bgu(r_dst, r_src);
1340 break;
1341 case 0xd:
1342 gen_op_eval_bcc(r_dst, r_src);
1343 break;
1344 case 0xe:
1345 gen_op_eval_bpos(r_dst, r_src);
1346 break;
1347 case 0xf:
1348 gen_op_eval_bvc(r_dst, r_src);
1349 break;
1353 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1355 unsigned int offset;
1357 switch (cc) {
1358 default:
1359 case 0x0:
1360 offset = 0;
1361 break;
1362 case 0x1:
1363 offset = 32 - 10;
1364 break;
1365 case 0x2:
1366 offset = 34 - 10;
1367 break;
1368 case 0x3:
1369 offset = 36 - 10;
1370 break;
1373 switch (cond) {
1374 case 0x0:
1375 gen_op_eval_bn(r_dst);
1376 break;
1377 case 0x1:
1378 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1379 break;
1380 case 0x2:
1381 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1382 break;
1383 case 0x3:
1384 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1385 break;
1386 case 0x4:
1387 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1388 break;
1389 case 0x5:
1390 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1391 break;
1392 case 0x6:
1393 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1394 break;
1395 case 0x7:
1396 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1397 break;
1398 case 0x8:
1399 gen_op_eval_ba(r_dst);
1400 break;
1401 case 0x9:
1402 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1403 break;
1404 case 0xa:
1405 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1406 break;
1407 case 0xb:
1408 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1409 break;
1410 case 0xc:
1411 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1412 break;
1413 case 0xd:
1414 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1415 break;
1416 case 0xe:
1417 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1418 break;
1419 case 0xf:
1420 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1421 break;
1425 #ifdef TARGET_SPARC64
1426 // Inverted logic
1427 static const int gen_tcg_cond_reg[8] = {
1429 TCG_COND_NE,
1430 TCG_COND_GT,
1431 TCG_COND_GE,
1433 TCG_COND_EQ,
1434 TCG_COND_LE,
1435 TCG_COND_LT,
1438 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1440 int l1;
1442 l1 = gen_new_label();
1443 tcg_gen_movi_tl(r_dst, 0);
1444 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1445 tcg_gen_movi_tl(r_dst, 1);
1446 gen_set_label(l1);
1448 #endif
1450 /* XXX: potentially incorrect if dynamic npc */
1451 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1452 TCGv r_cond)
1454 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1455 target_ulong target = dc->pc + offset;
1457 if (cond == 0x0) {
1458 /* unconditional not taken */
1459 if (a) {
1460 dc->pc = dc->npc + 4;
1461 dc->npc = dc->pc + 4;
1462 } else {
1463 dc->pc = dc->npc;
1464 dc->npc = dc->pc + 4;
1466 } else if (cond == 0x8) {
1467 /* unconditional taken */
1468 if (a) {
1469 dc->pc = target;
1470 dc->npc = dc->pc + 4;
1471 } else {
1472 dc->pc = dc->npc;
1473 dc->npc = target;
1475 } else {
1476 flush_cond(dc, r_cond);
1477 gen_cond(r_cond, cc, cond);
1478 if (a) {
1479 gen_branch_a(dc, target, dc->npc, r_cond);
1480 dc->is_br = 1;
1481 } else {
1482 dc->pc = dc->npc;
1483 dc->jump_pc[0] = target;
1484 dc->jump_pc[1] = dc->npc + 4;
1485 dc->npc = JUMP_PC;
1490 /* XXX: potentially incorrect if dynamic npc */
1491 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1492 TCGv r_cond)
1494 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1495 target_ulong target = dc->pc + offset;
1497 if (cond == 0x0) {
1498 /* unconditional not taken */
1499 if (a) {
1500 dc->pc = dc->npc + 4;
1501 dc->npc = dc->pc + 4;
1502 } else {
1503 dc->pc = dc->npc;
1504 dc->npc = dc->pc + 4;
1506 } else if (cond == 0x8) {
1507 /* unconditional taken */
1508 if (a) {
1509 dc->pc = target;
1510 dc->npc = dc->pc + 4;
1511 } else {
1512 dc->pc = dc->npc;
1513 dc->npc = target;
1515 } else {
1516 flush_cond(dc, r_cond);
1517 gen_fcond(r_cond, cc, cond);
1518 if (a) {
1519 gen_branch_a(dc, target, dc->npc, r_cond);
1520 dc->is_br = 1;
1521 } else {
1522 dc->pc = dc->npc;
1523 dc->jump_pc[0] = target;
1524 dc->jump_pc[1] = dc->npc + 4;
1525 dc->npc = JUMP_PC;
1530 #ifdef TARGET_SPARC64
1531 /* XXX: potentially incorrect if dynamic npc */
1532 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1533 TCGv r_cond, TCGv r_reg)
1535 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1536 target_ulong target = dc->pc + offset;
1538 flush_cond(dc, r_cond);
1539 gen_cond_reg(r_cond, cond, r_reg);
1540 if (a) {
1541 gen_branch_a(dc, target, dc->npc, r_cond);
1542 dc->is_br = 1;
1543 } else {
1544 dc->pc = dc->npc;
1545 dc->jump_pc[0] = target;
1546 dc->jump_pc[1] = dc->npc + 4;
1547 dc->npc = JUMP_PC;
1551 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1553 switch (fccno) {
1554 case 0:
1555 gen_helper_fcmps(r_rs1, r_rs2);
1556 break;
1557 case 1:
1558 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1559 break;
1560 case 2:
1561 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1562 break;
1563 case 3:
1564 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1565 break;
1569 static inline void gen_op_fcmpd(int fccno)
1571 switch (fccno) {
1572 case 0:
1573 gen_helper_fcmpd();
1574 break;
1575 case 1:
1576 gen_helper_fcmpd_fcc1();
1577 break;
1578 case 2:
1579 gen_helper_fcmpd_fcc2();
1580 break;
1581 case 3:
1582 gen_helper_fcmpd_fcc3();
1583 break;
1587 static inline void gen_op_fcmpq(int fccno)
1589 switch (fccno) {
1590 case 0:
1591 gen_helper_fcmpq();
1592 break;
1593 case 1:
1594 gen_helper_fcmpq_fcc1();
1595 break;
1596 case 2:
1597 gen_helper_fcmpq_fcc2();
1598 break;
1599 case 3:
1600 gen_helper_fcmpq_fcc3();
1601 break;
1605 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1607 switch (fccno) {
1608 case 0:
1609 gen_helper_fcmpes(r_rs1, r_rs2);
1610 break;
1611 case 1:
1612 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1613 break;
1614 case 2:
1615 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1616 break;
1617 case 3:
1618 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1619 break;
1623 static inline void gen_op_fcmped(int fccno)
1625 switch (fccno) {
1626 case 0:
1627 gen_helper_fcmped();
1628 break;
1629 case 1:
1630 gen_helper_fcmped_fcc1();
1631 break;
1632 case 2:
1633 gen_helper_fcmped_fcc2();
1634 break;
1635 case 3:
1636 gen_helper_fcmped_fcc3();
1637 break;
1641 static inline void gen_op_fcmpeq(int fccno)
1643 switch (fccno) {
1644 case 0:
1645 gen_helper_fcmpeq();
1646 break;
1647 case 1:
1648 gen_helper_fcmpeq_fcc1();
1649 break;
1650 case 2:
1651 gen_helper_fcmpeq_fcc2();
1652 break;
1653 case 3:
1654 gen_helper_fcmpeq_fcc3();
1655 break;
1659 #else
1661 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1663 gen_helper_fcmps(r_rs1, r_rs2);
1666 static inline void gen_op_fcmpd(int fccno)
1668 gen_helper_fcmpd();
1671 static inline void gen_op_fcmpq(int fccno)
1673 gen_helper_fcmpq();
1676 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1678 gen_helper_fcmpes(r_rs1, r_rs2);
1681 static inline void gen_op_fcmped(int fccno)
1683 gen_helper_fcmped();
1686 static inline void gen_op_fcmpeq(int fccno)
1688 gen_helper_fcmpeq();
1690 #endif
1692 static inline void gen_op_fpexception_im(int fsr_flags)
1694 TCGv_i32 r_const;
1696 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1697 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1698 r_const = tcg_const_i32(TT_FP_EXCP);
1699 gen_helper_raise_exception(r_const);
1700 tcg_temp_free_i32(r_const);
1703 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1705 #if !defined(CONFIG_USER_ONLY)
1706 if (!dc->fpu_enabled) {
1707 TCGv_i32 r_const;
1709 save_state(dc, r_cond);
1710 r_const = tcg_const_i32(TT_NFPU_INSN);
1711 gen_helper_raise_exception(r_const);
1712 tcg_temp_free_i32(r_const);
1713 dc->is_br = 1;
1714 return 1;
1716 #endif
1717 return 0;
1720 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1722 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1725 static inline void gen_clear_float_exceptions(void)
1727 gen_helper_clear_float_exceptions();
1730 /* asi moves */
1731 #ifdef TARGET_SPARC64
1732 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1734 int asi;
1735 TCGv_i32 r_asi;
1737 if (IS_IMM) {
1738 r_asi = tcg_temp_new_i32();
1739 tcg_gen_mov_i32(r_asi, cpu_asi);
1740 } else {
1741 asi = GET_FIELD(insn, 19, 26);
1742 r_asi = tcg_const_i32(asi);
1744 return r_asi;
1747 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1748 int sign)
1750 TCGv_i32 r_asi, r_size, r_sign;
1752 r_asi = gen_get_asi(insn, addr);
1753 r_size = tcg_const_i32(size);
1754 r_sign = tcg_const_i32(sign);
1755 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1756 tcg_temp_free_i32(r_sign);
1757 tcg_temp_free_i32(r_size);
1758 tcg_temp_free_i32(r_asi);
1761 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1763 TCGv_i32 r_asi, r_size;
1765 r_asi = gen_get_asi(insn, addr);
1766 r_size = tcg_const_i32(size);
1767 gen_helper_st_asi(addr, src, r_asi, r_size);
1768 tcg_temp_free_i32(r_size);
1769 tcg_temp_free_i32(r_asi);
1772 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1774 TCGv_i32 r_asi, r_size, r_rd;
1776 r_asi = gen_get_asi(insn, addr);
1777 r_size = tcg_const_i32(size);
1778 r_rd = tcg_const_i32(rd);
1779 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1780 tcg_temp_free_i32(r_rd);
1781 tcg_temp_free_i32(r_size);
1782 tcg_temp_free_i32(r_asi);
1785 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1787 TCGv_i32 r_asi, r_size, r_rd;
1789 r_asi = gen_get_asi(insn, addr);
1790 r_size = tcg_const_i32(size);
1791 r_rd = tcg_const_i32(rd);
1792 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1793 tcg_temp_free_i32(r_rd);
1794 tcg_temp_free_i32(r_size);
1795 tcg_temp_free_i32(r_asi);
1798 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1800 TCGv_i32 r_asi, r_size, r_sign;
1802 r_asi = gen_get_asi(insn, addr);
1803 r_size = tcg_const_i32(4);
1804 r_sign = tcg_const_i32(0);
1805 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1806 tcg_temp_free_i32(r_sign);
1807 gen_helper_st_asi(addr, dst, r_asi, r_size);
1808 tcg_temp_free_i32(r_size);
1809 tcg_temp_free_i32(r_asi);
1810 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1813 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1815 TCGv_i32 r_asi, r_rd;
1817 r_asi = gen_get_asi(insn, addr);
1818 r_rd = tcg_const_i32(rd);
1819 gen_helper_ldda_asi(addr, r_asi, r_rd);
1820 tcg_temp_free_i32(r_rd);
1821 tcg_temp_free_i32(r_asi);
1824 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1826 TCGv_i32 r_asi, r_size;
1828 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1829 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1830 r_asi = gen_get_asi(insn, addr);
1831 r_size = tcg_const_i32(8);
1832 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1833 tcg_temp_free_i32(r_size);
1834 tcg_temp_free_i32(r_asi);
1837 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1838 int rd)
1840 TCGv r_val1;
1841 TCGv_i32 r_asi;
1843 r_val1 = tcg_temp_new();
1844 gen_movl_reg_TN(rd, r_val1);
1845 r_asi = gen_get_asi(insn, addr);
1846 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1847 tcg_temp_free_i32(r_asi);
1848 tcg_temp_free(r_val1);
1851 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1852 int rd)
1854 TCGv_i32 r_asi;
1856 gen_movl_reg_TN(rd, cpu_tmp64);
1857 r_asi = gen_get_asi(insn, addr);
1858 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1859 tcg_temp_free_i32(r_asi);
1862 #elif !defined(CONFIG_USER_ONLY)
1864 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1865 int sign)
1867 TCGv_i32 r_asi, r_size, r_sign;
1869 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1870 r_size = tcg_const_i32(size);
1871 r_sign = tcg_const_i32(sign);
1872 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1873 tcg_temp_free(r_sign);
1874 tcg_temp_free(r_size);
1875 tcg_temp_free(r_asi);
1876 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1879 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1881 TCGv_i32 r_asi, r_size;
1883 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1884 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1885 r_size = tcg_const_i32(size);
1886 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1887 tcg_temp_free(r_size);
1888 tcg_temp_free(r_asi);
1891 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1893 TCGv_i32 r_asi, r_size, r_sign;
1894 TCGv_i64 r_val;
1896 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1897 r_size = tcg_const_i32(4);
1898 r_sign = tcg_const_i32(0);
1899 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1900 tcg_temp_free(r_sign);
1901 r_val = tcg_temp_new_i64();
1902 tcg_gen_extu_tl_i64(r_val, dst);
1903 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1904 tcg_temp_free_i64(r_val);
1905 tcg_temp_free(r_size);
1906 tcg_temp_free(r_asi);
1907 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1910 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1912 TCGv_i32 r_asi, r_size, r_sign;
1914 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1915 r_size = tcg_const_i32(8);
1916 r_sign = tcg_const_i32(0);
1917 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1918 tcg_temp_free(r_sign);
1919 tcg_temp_free(r_size);
1920 tcg_temp_free(r_asi);
1921 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1922 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1923 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1924 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1925 gen_movl_TN_reg(rd, hi);
1928 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1930 TCGv_i32 r_asi, r_size;
1932 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1933 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1934 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1935 r_size = tcg_const_i32(8);
1936 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1937 tcg_temp_free(r_size);
1938 tcg_temp_free(r_asi);
1940 #endif
1942 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1943 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1945 TCGv_i64 r_val;
1946 TCGv_i32 r_asi, r_size;
1948 gen_ld_asi(dst, addr, insn, 1, 0);
1950 r_val = tcg_const_i64(0xffULL);
1951 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1952 r_size = tcg_const_i32(1);
1953 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1954 tcg_temp_free_i32(r_size);
1955 tcg_temp_free_i32(r_asi);
1956 tcg_temp_free_i64(r_val);
1958 #endif
1960 static inline TCGv get_src1(unsigned int insn, TCGv def)
1962 TCGv r_rs1 = def;
1963 unsigned int rs1;
1965 rs1 = GET_FIELD(insn, 13, 17);
1966 if (rs1 == 0)
1967 r_rs1 = tcg_const_tl(0); // XXX how to free?
1968 else if (rs1 < 8)
1969 r_rs1 = cpu_gregs[rs1];
1970 else
1971 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1972 return r_rs1;
1975 static inline TCGv get_src2(unsigned int insn, TCGv def)
1977 TCGv r_rs2 = def;
1979 if (IS_IMM) { /* immediate */
1980 target_long simm;
1982 simm = GET_FIELDs(insn, 19, 31);
1983 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1984 } else { /* register */
1985 unsigned int rs2;
1987 rs2 = GET_FIELD(insn, 27, 31);
1988 if (rs2 == 0)
1989 r_rs2 = tcg_const_tl(0); // XXX how to free?
1990 else if (rs2 < 8)
1991 r_rs2 = cpu_gregs[rs2];
1992 else
1993 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1995 return r_rs2;
1998 #define CHECK_IU_FEATURE(dc, FEATURE) \
1999 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2000 goto illegal_insn;
2001 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2002 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2003 goto nfpu_insn;
2005 /* before an instruction, dc->pc must be static */
2006 static void disas_sparc_insn(DisasContext * dc)
2008 unsigned int insn, opc, rs1, rs2, rd;
2009 target_long simm;
2011 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2012 tcg_gen_debug_insn_start(dc->pc);
2013 insn = ldl_code(dc->pc);
2014 opc = GET_FIELD(insn, 0, 1);
2016 rd = GET_FIELD(insn, 2, 6);
2018 cpu_src1 = tcg_temp_new(); // const
2019 cpu_src2 = tcg_temp_new(); // const
2021 switch (opc) {
2022 case 0: /* branches/sethi */
2024 unsigned int xop = GET_FIELD(insn, 7, 9);
2025 int32_t target;
2026 switch (xop) {
2027 #ifdef TARGET_SPARC64
2028 case 0x1: /* V9 BPcc */
2030 int cc;
2032 target = GET_FIELD_SP(insn, 0, 18);
2033 target = sign_extend(target, 18);
2034 target <<= 2;
2035 cc = GET_FIELD_SP(insn, 20, 21);
2036 if (cc == 0)
2037 do_branch(dc, target, insn, 0, cpu_cond);
2038 else if (cc == 2)
2039 do_branch(dc, target, insn, 1, cpu_cond);
2040 else
2041 goto illegal_insn;
2042 goto jmp_insn;
2044 case 0x3: /* V9 BPr */
2046 target = GET_FIELD_SP(insn, 0, 13) |
2047 (GET_FIELD_SP(insn, 20, 21) << 14);
2048 target = sign_extend(target, 16);
2049 target <<= 2;
2050 cpu_src1 = get_src1(insn, cpu_src1);
2051 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2052 goto jmp_insn;
2054 case 0x5: /* V9 FBPcc */
2056 int cc = GET_FIELD_SP(insn, 20, 21);
2057 if (gen_trap_ifnofpu(dc, cpu_cond))
2058 goto jmp_insn;
2059 target = GET_FIELD_SP(insn, 0, 18);
2060 target = sign_extend(target, 19);
2061 target <<= 2;
2062 do_fbranch(dc, target, insn, cc, cpu_cond);
2063 goto jmp_insn;
2065 #else
2066 case 0x7: /* CBN+x */
2068 goto ncp_insn;
2070 #endif
2071 case 0x2: /* BN+x */
2073 target = GET_FIELD(insn, 10, 31);
2074 target = sign_extend(target, 22);
2075 target <<= 2;
2076 do_branch(dc, target, insn, 0, cpu_cond);
2077 goto jmp_insn;
2079 case 0x6: /* FBN+x */
2081 if (gen_trap_ifnofpu(dc, cpu_cond))
2082 goto jmp_insn;
2083 target = GET_FIELD(insn, 10, 31);
2084 target = sign_extend(target, 22);
2085 target <<= 2;
2086 do_fbranch(dc, target, insn, 0, cpu_cond);
2087 goto jmp_insn;
2089 case 0x4: /* SETHI */
2090 if (rd) { // nop
2091 uint32_t value = GET_FIELD(insn, 10, 31);
2092 TCGv r_const;
2094 r_const = tcg_const_tl(value << 10);
2095 gen_movl_TN_reg(rd, r_const);
2096 tcg_temp_free(r_const);
2098 break;
2099 case 0x0: /* UNIMPL */
2100 default:
2101 goto illegal_insn;
2103 break;
2105 break;
2106 case 1: /*CALL*/
2108 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2109 TCGv r_const;
2111 r_const = tcg_const_tl(dc->pc);
2112 gen_movl_TN_reg(15, r_const);
2113 tcg_temp_free(r_const);
2114 target += dc->pc;
2115 gen_mov_pc_npc(dc, cpu_cond);
2116 dc->npc = target;
2118 goto jmp_insn;
2119 case 2: /* FPU & Logical Operations */
2121 unsigned int xop = GET_FIELD(insn, 7, 12);
2122 if (xop == 0x3a) { /* generate trap */
2123 int cond;
2125 cpu_src1 = get_src1(insn, cpu_src1);
2126 if (IS_IMM) {
2127 rs2 = GET_FIELD(insn, 25, 31);
2128 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2129 } else {
2130 rs2 = GET_FIELD(insn, 27, 31);
2131 if (rs2 != 0) {
2132 gen_movl_reg_TN(rs2, cpu_src2);
2133 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2134 } else
2135 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2137 cond = GET_FIELD(insn, 3, 6);
2138 if (cond == 0x8) {
2139 save_state(dc, cpu_cond);
2140 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2141 supervisor(dc))
2142 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2143 else
2144 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2145 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2146 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2147 gen_helper_raise_exception(cpu_tmp32);
2148 } else if (cond != 0) {
2149 TCGv r_cond = tcg_temp_new();
2150 int l1;
2151 #ifdef TARGET_SPARC64
2152 /* V9 icc/xcc */
2153 int cc = GET_FIELD_SP(insn, 11, 12);
2155 save_state(dc, cpu_cond);
2156 if (cc == 0)
2157 gen_cond(r_cond, 0, cond);
2158 else if (cc == 2)
2159 gen_cond(r_cond, 1, cond);
2160 else
2161 goto illegal_insn;
2162 #else
2163 save_state(dc, cpu_cond);
2164 gen_cond(r_cond, 0, cond);
2165 #endif
2166 l1 = gen_new_label();
2167 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2169 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2170 supervisor(dc))
2171 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2172 else
2173 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2174 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2175 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2176 gen_helper_raise_exception(cpu_tmp32);
2178 gen_set_label(l1);
2179 tcg_temp_free(r_cond);
2181 gen_op_next_insn();
2182 tcg_gen_exit_tb(0);
2183 dc->is_br = 1;
2184 goto jmp_insn;
2185 } else if (xop == 0x28) {
2186 rs1 = GET_FIELD(insn, 13, 17);
2187 switch(rs1) {
2188 case 0: /* rdy */
2189 #ifndef TARGET_SPARC64
2190 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2191 manual, rdy on the microSPARC
2192 II */
2193 case 0x0f: /* stbar in the SPARCv8 manual,
2194 rdy on the microSPARC II */
2195 case 0x10 ... 0x1f: /* implementation-dependent in the
2196 SPARCv8 manual, rdy on the
2197 microSPARC II */
2198 #endif
2199 gen_movl_TN_reg(rd, cpu_y);
2200 break;
2201 #ifdef TARGET_SPARC64
2202 case 0x2: /* V9 rdccr */
2203 gen_helper_rdccr(cpu_dst);
2204 gen_movl_TN_reg(rd, cpu_dst);
2205 break;
2206 case 0x3: /* V9 rdasi */
2207 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2208 gen_movl_TN_reg(rd, cpu_dst);
2209 break;
2210 case 0x4: /* V9 rdtick */
2212 TCGv_ptr r_tickptr;
2214 r_tickptr = tcg_temp_new_ptr();
2215 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2216 offsetof(CPUState, tick));
2217 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2218 tcg_temp_free_ptr(r_tickptr);
2219 gen_movl_TN_reg(rd, cpu_dst);
2221 break;
2222 case 0x5: /* V9 rdpc */
2224 TCGv r_const;
2226 r_const = tcg_const_tl(dc->pc);
2227 gen_movl_TN_reg(rd, r_const);
2228 tcg_temp_free(r_const);
2230 break;
2231 case 0x6: /* V9 rdfprs */
2232 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2233 gen_movl_TN_reg(rd, cpu_dst);
2234 break;
2235 case 0xf: /* V9 membar */
2236 break; /* no effect */
2237 case 0x13: /* Graphics Status */
2238 if (gen_trap_ifnofpu(dc, cpu_cond))
2239 goto jmp_insn;
2240 gen_movl_TN_reg(rd, cpu_gsr);
2241 break;
2242 case 0x16: /* Softint */
2243 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2244 gen_movl_TN_reg(rd, cpu_dst);
2245 break;
2246 case 0x17: /* Tick compare */
2247 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2248 break;
2249 case 0x18: /* System tick */
2251 TCGv_ptr r_tickptr;
2253 r_tickptr = tcg_temp_new_ptr();
2254 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2255 offsetof(CPUState, stick));
2256 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2257 tcg_temp_free_ptr(r_tickptr);
2258 gen_movl_TN_reg(rd, cpu_dst);
2260 break;
2261 case 0x19: /* System tick compare */
2262 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2263 break;
2264 case 0x10: /* Performance Control */
2265 case 0x11: /* Performance Instrumentation Counter */
2266 case 0x12: /* Dispatch Control */
2267 case 0x14: /* Softint set, WO */
2268 case 0x15: /* Softint clear, WO */
2269 #endif
2270 default:
2271 goto illegal_insn;
2273 #if !defined(CONFIG_USER_ONLY)
2274 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2275 #ifndef TARGET_SPARC64
2276 if (!supervisor(dc))
2277 goto priv_insn;
2278 gen_helper_rdpsr(cpu_dst);
2279 #else
2280 CHECK_IU_FEATURE(dc, HYPV);
2281 if (!hypervisor(dc))
2282 goto priv_insn;
2283 rs1 = GET_FIELD(insn, 13, 17);
2284 switch (rs1) {
2285 case 0: // hpstate
2286 // gen_op_rdhpstate();
2287 break;
2288 case 1: // htstate
2289 // gen_op_rdhtstate();
2290 break;
2291 case 3: // hintp
2292 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2293 break;
2294 case 5: // htba
2295 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2296 break;
2297 case 6: // hver
2298 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2299 break;
2300 case 31: // hstick_cmpr
2301 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2302 break;
2303 default:
2304 goto illegal_insn;
2306 #endif
2307 gen_movl_TN_reg(rd, cpu_dst);
2308 break;
2309 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2310 if (!supervisor(dc))
2311 goto priv_insn;
2312 #ifdef TARGET_SPARC64
2313 rs1 = GET_FIELD(insn, 13, 17);
2314 switch (rs1) {
2315 case 0: // tpc
2317 TCGv_ptr r_tsptr;
2319 r_tsptr = tcg_temp_new_ptr();
2320 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2321 offsetof(CPUState, tsptr));
2322 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2323 offsetof(trap_state, tpc));
2324 tcg_temp_free_ptr(r_tsptr);
2326 break;
2327 case 1: // tnpc
2329 TCGv_ptr r_tsptr;
2331 r_tsptr = tcg_temp_new_ptr();
2332 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2333 offsetof(CPUState, tsptr));
2334 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2335 offsetof(trap_state, tnpc));
2336 tcg_temp_free_ptr(r_tsptr);
2338 break;
2339 case 2: // tstate
2341 TCGv_ptr r_tsptr;
2343 r_tsptr = tcg_temp_new_ptr();
2344 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2345 offsetof(CPUState, tsptr));
2346 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2347 offsetof(trap_state, tstate));
2348 tcg_temp_free_ptr(r_tsptr);
2350 break;
2351 case 3: // tt
2353 TCGv_ptr r_tsptr;
2355 r_tsptr = tcg_temp_new_ptr();
2356 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2357 offsetof(CPUState, tsptr));
2358 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2359 offsetof(trap_state, tt));
2360 tcg_temp_free_ptr(r_tsptr);
2361 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2363 break;
2364 case 4: // tick
2366 TCGv_ptr r_tickptr;
2368 r_tickptr = tcg_temp_new_ptr();
2369 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2370 offsetof(CPUState, tick));
2371 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2372 gen_movl_TN_reg(rd, cpu_tmp0);
2373 tcg_temp_free_ptr(r_tickptr);
2375 break;
2376 case 5: // tba
2377 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2378 break;
2379 case 6: // pstate
2380 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2381 offsetof(CPUSPARCState, pstate));
2382 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2383 break;
2384 case 7: // tl
2385 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2386 offsetof(CPUSPARCState, tl));
2387 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2388 break;
2389 case 8: // pil
2390 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2391 offsetof(CPUSPARCState, psrpil));
2392 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2393 break;
2394 case 9: // cwp
2395 gen_helper_rdcwp(cpu_tmp0);
2396 break;
2397 case 10: // cansave
2398 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2399 offsetof(CPUSPARCState, cansave));
2400 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2401 break;
2402 case 11: // canrestore
2403 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2404 offsetof(CPUSPARCState, canrestore));
2405 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2406 break;
2407 case 12: // cleanwin
2408 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2409 offsetof(CPUSPARCState, cleanwin));
2410 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2411 break;
2412 case 13: // otherwin
2413 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2414 offsetof(CPUSPARCState, otherwin));
2415 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2416 break;
2417 case 14: // wstate
2418 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2419 offsetof(CPUSPARCState, wstate));
2420 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2421 break;
2422 case 16: // UA2005 gl
2423 CHECK_IU_FEATURE(dc, GL);
2424 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2425 offsetof(CPUSPARCState, gl));
2426 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2427 break;
2428 case 26: // UA2005 strand status
2429 CHECK_IU_FEATURE(dc, HYPV);
2430 if (!hypervisor(dc))
2431 goto priv_insn;
2432 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2433 break;
2434 case 31: // ver
2435 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2436 break;
2437 case 15: // fq
2438 default:
2439 goto illegal_insn;
2441 #else
2442 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2443 #endif
2444 gen_movl_TN_reg(rd, cpu_tmp0);
2445 break;
2446 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2447 #ifdef TARGET_SPARC64
2448 save_state(dc, cpu_cond);
2449 gen_helper_flushw();
2450 #else
2451 if (!supervisor(dc))
2452 goto priv_insn;
2453 gen_movl_TN_reg(rd, cpu_tbr);
2454 #endif
2455 break;
2456 #endif
2457 } else if (xop == 0x34) { /* FPU Operations */
2458 if (gen_trap_ifnofpu(dc, cpu_cond))
2459 goto jmp_insn;
2460 gen_op_clear_ieee_excp_and_FTT();
2461 rs1 = GET_FIELD(insn, 13, 17);
2462 rs2 = GET_FIELD(insn, 27, 31);
2463 xop = GET_FIELD(insn, 18, 26);
2464 switch (xop) {
2465 case 0x1: /* fmovs */
2466 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2467 break;
2468 case 0x5: /* fnegs */
2469 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2470 break;
2471 case 0x9: /* fabss */
2472 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2473 break;
2474 case 0x29: /* fsqrts */
2475 CHECK_FPU_FEATURE(dc, FSQRT);
2476 gen_clear_float_exceptions();
2477 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2478 gen_helper_check_ieee_exceptions();
2479 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2480 break;
2481 case 0x2a: /* fsqrtd */
2482 CHECK_FPU_FEATURE(dc, FSQRT);
2483 gen_op_load_fpr_DT1(DFPREG(rs2));
2484 gen_clear_float_exceptions();
2485 gen_helper_fsqrtd();
2486 gen_helper_check_ieee_exceptions();
2487 gen_op_store_DT0_fpr(DFPREG(rd));
2488 break;
2489 case 0x2b: /* fsqrtq */
2490 CHECK_FPU_FEATURE(dc, FLOAT128);
2491 gen_op_load_fpr_QT1(QFPREG(rs2));
2492 gen_clear_float_exceptions();
2493 gen_helper_fsqrtq();
2494 gen_helper_check_ieee_exceptions();
2495 gen_op_store_QT0_fpr(QFPREG(rd));
2496 break;
2497 case 0x41: /* fadds */
2498 gen_clear_float_exceptions();
2499 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2500 gen_helper_check_ieee_exceptions();
2501 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2502 break;
2503 case 0x42: /* faddd */
2504 gen_op_load_fpr_DT0(DFPREG(rs1));
2505 gen_op_load_fpr_DT1(DFPREG(rs2));
2506 gen_clear_float_exceptions();
2507 gen_helper_faddd();
2508 gen_helper_check_ieee_exceptions();
2509 gen_op_store_DT0_fpr(DFPREG(rd));
2510 break;
2511 case 0x43: /* faddq */
2512 CHECK_FPU_FEATURE(dc, FLOAT128);
2513 gen_op_load_fpr_QT0(QFPREG(rs1));
2514 gen_op_load_fpr_QT1(QFPREG(rs2));
2515 gen_clear_float_exceptions();
2516 gen_helper_faddq();
2517 gen_helper_check_ieee_exceptions();
2518 gen_op_store_QT0_fpr(QFPREG(rd));
2519 break;
2520 case 0x45: /* fsubs */
2521 gen_clear_float_exceptions();
2522 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2523 gen_helper_check_ieee_exceptions();
2524 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2525 break;
2526 case 0x46: /* fsubd */
2527 gen_op_load_fpr_DT0(DFPREG(rs1));
2528 gen_op_load_fpr_DT1(DFPREG(rs2));
2529 gen_clear_float_exceptions();
2530 gen_helper_fsubd();
2531 gen_helper_check_ieee_exceptions();
2532 gen_op_store_DT0_fpr(DFPREG(rd));
2533 break;
2534 case 0x47: /* fsubq */
2535 CHECK_FPU_FEATURE(dc, FLOAT128);
2536 gen_op_load_fpr_QT0(QFPREG(rs1));
2537 gen_op_load_fpr_QT1(QFPREG(rs2));
2538 gen_clear_float_exceptions();
2539 gen_helper_fsubq();
2540 gen_helper_check_ieee_exceptions();
2541 gen_op_store_QT0_fpr(QFPREG(rd));
2542 break;
2543 case 0x49: /* fmuls */
2544 CHECK_FPU_FEATURE(dc, FMUL);
2545 gen_clear_float_exceptions();
2546 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2549 break;
2550 case 0x4a: /* fmuld */
2551 CHECK_FPU_FEATURE(dc, FMUL);
2552 gen_op_load_fpr_DT0(DFPREG(rs1));
2553 gen_op_load_fpr_DT1(DFPREG(rs2));
2554 gen_clear_float_exceptions();
2555 gen_helper_fmuld();
2556 gen_helper_check_ieee_exceptions();
2557 gen_op_store_DT0_fpr(DFPREG(rd));
2558 break;
2559 case 0x4b: /* fmulq */
2560 CHECK_FPU_FEATURE(dc, FLOAT128);
2561 CHECK_FPU_FEATURE(dc, FMUL);
2562 gen_op_load_fpr_QT0(QFPREG(rs1));
2563 gen_op_load_fpr_QT1(QFPREG(rs2));
2564 gen_clear_float_exceptions();
2565 gen_helper_fmulq();
2566 gen_helper_check_ieee_exceptions();
2567 gen_op_store_QT0_fpr(QFPREG(rd));
2568 break;
2569 case 0x4d: /* fdivs */
2570 gen_clear_float_exceptions();
2571 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2572 gen_helper_check_ieee_exceptions();
2573 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2574 break;
2575 case 0x4e: /* fdivd */
2576 gen_op_load_fpr_DT0(DFPREG(rs1));
2577 gen_op_load_fpr_DT1(DFPREG(rs2));
2578 gen_clear_float_exceptions();
2579 gen_helper_fdivd();
2580 gen_helper_check_ieee_exceptions();
2581 gen_op_store_DT0_fpr(DFPREG(rd));
2582 break;
2583 case 0x4f: /* fdivq */
2584 CHECK_FPU_FEATURE(dc, FLOAT128);
2585 gen_op_load_fpr_QT0(QFPREG(rs1));
2586 gen_op_load_fpr_QT1(QFPREG(rs2));
2587 gen_clear_float_exceptions();
2588 gen_helper_fdivq();
2589 gen_helper_check_ieee_exceptions();
2590 gen_op_store_QT0_fpr(QFPREG(rd));
2591 break;
2592 case 0x69: /* fsmuld */
2593 CHECK_FPU_FEATURE(dc, FSMULD);
2594 gen_clear_float_exceptions();
2595 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2596 gen_helper_check_ieee_exceptions();
2597 gen_op_store_DT0_fpr(DFPREG(rd));
2598 break;
2599 case 0x6e: /* fdmulq */
2600 CHECK_FPU_FEATURE(dc, FLOAT128);
2601 gen_op_load_fpr_DT0(DFPREG(rs1));
2602 gen_op_load_fpr_DT1(DFPREG(rs2));
2603 gen_clear_float_exceptions();
2604 gen_helper_fdmulq();
2605 gen_helper_check_ieee_exceptions();
2606 gen_op_store_QT0_fpr(QFPREG(rd));
2607 break;
2608 case 0xc4: /* fitos */
2609 gen_clear_float_exceptions();
2610 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2611 gen_helper_check_ieee_exceptions();
2612 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2613 break;
2614 case 0xc6: /* fdtos */
2615 gen_op_load_fpr_DT1(DFPREG(rs2));
2616 gen_clear_float_exceptions();
2617 gen_helper_fdtos(cpu_tmp32);
2618 gen_helper_check_ieee_exceptions();
2619 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2620 break;
2621 case 0xc7: /* fqtos */
2622 CHECK_FPU_FEATURE(dc, FLOAT128);
2623 gen_op_load_fpr_QT1(QFPREG(rs2));
2624 gen_clear_float_exceptions();
2625 gen_helper_fqtos(cpu_tmp32);
2626 gen_helper_check_ieee_exceptions();
2627 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2628 break;
2629 case 0xc8: /* fitod */
2630 gen_helper_fitod(cpu_fpr[rs2]);
2631 gen_op_store_DT0_fpr(DFPREG(rd));
2632 break;
2633 case 0xc9: /* fstod */
2634 gen_helper_fstod(cpu_fpr[rs2]);
2635 gen_op_store_DT0_fpr(DFPREG(rd));
2636 break;
2637 case 0xcb: /* fqtod */
2638 CHECK_FPU_FEATURE(dc, FLOAT128);
2639 gen_op_load_fpr_QT1(QFPREG(rs2));
2640 gen_clear_float_exceptions();
2641 gen_helper_fqtod();
2642 gen_helper_check_ieee_exceptions();
2643 gen_op_store_DT0_fpr(DFPREG(rd));
2644 break;
2645 case 0xcc: /* fitoq */
2646 CHECK_FPU_FEATURE(dc, FLOAT128);
2647 gen_helper_fitoq(cpu_fpr[rs2]);
2648 gen_op_store_QT0_fpr(QFPREG(rd));
2649 break;
2650 case 0xcd: /* fstoq */
2651 CHECK_FPU_FEATURE(dc, FLOAT128);
2652 gen_helper_fstoq(cpu_fpr[rs2]);
2653 gen_op_store_QT0_fpr(QFPREG(rd));
2654 break;
2655 case 0xce: /* fdtoq */
2656 CHECK_FPU_FEATURE(dc, FLOAT128);
2657 gen_op_load_fpr_DT1(DFPREG(rs2));
2658 gen_helper_fdtoq();
2659 gen_op_store_QT0_fpr(QFPREG(rd));
2660 break;
2661 case 0xd1: /* fstoi */
2662 gen_clear_float_exceptions();
2663 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2664 gen_helper_check_ieee_exceptions();
2665 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2666 break;
2667 case 0xd2: /* fdtoi */
2668 gen_op_load_fpr_DT1(DFPREG(rs2));
2669 gen_clear_float_exceptions();
2670 gen_helper_fdtoi(cpu_tmp32);
2671 gen_helper_check_ieee_exceptions();
2672 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2673 break;
2674 case 0xd3: /* fqtoi */
2675 CHECK_FPU_FEATURE(dc, FLOAT128);
2676 gen_op_load_fpr_QT1(QFPREG(rs2));
2677 gen_clear_float_exceptions();
2678 gen_helper_fqtoi(cpu_tmp32);
2679 gen_helper_check_ieee_exceptions();
2680 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2681 break;
2682 #ifdef TARGET_SPARC64
2683 case 0x2: /* V9 fmovd */
2684 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2685 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2686 cpu_fpr[DFPREG(rs2) + 1]);
2687 break;
2688 case 0x3: /* V9 fmovq */
2689 CHECK_FPU_FEATURE(dc, FLOAT128);
2690 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2691 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2692 cpu_fpr[QFPREG(rs2) + 1]);
2693 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2694 cpu_fpr[QFPREG(rs2) + 2]);
2695 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2696 cpu_fpr[QFPREG(rs2) + 3]);
2697 break;
2698 case 0x6: /* V9 fnegd */
2699 gen_op_load_fpr_DT1(DFPREG(rs2));
2700 gen_helper_fnegd();
2701 gen_op_store_DT0_fpr(DFPREG(rd));
2702 break;
2703 case 0x7: /* V9 fnegq */
2704 CHECK_FPU_FEATURE(dc, FLOAT128);
2705 gen_op_load_fpr_QT1(QFPREG(rs2));
2706 gen_helper_fnegq();
2707 gen_op_store_QT0_fpr(QFPREG(rd));
2708 break;
2709 case 0xa: /* V9 fabsd */
2710 gen_op_load_fpr_DT1(DFPREG(rs2));
2711 gen_helper_fabsd();
2712 gen_op_store_DT0_fpr(DFPREG(rd));
2713 break;
2714 case 0xb: /* V9 fabsq */
2715 CHECK_FPU_FEATURE(dc, FLOAT128);
2716 gen_op_load_fpr_QT1(QFPREG(rs2));
2717 gen_helper_fabsq();
2718 gen_op_store_QT0_fpr(QFPREG(rd));
2719 break;
2720 case 0x81: /* V9 fstox */
2721 gen_clear_float_exceptions();
2722 gen_helper_fstox(cpu_fpr[rs2]);
2723 gen_helper_check_ieee_exceptions();
2724 gen_op_store_DT0_fpr(DFPREG(rd));
2725 break;
2726 case 0x82: /* V9 fdtox */
2727 gen_op_load_fpr_DT1(DFPREG(rs2));
2728 gen_clear_float_exceptions();
2729 gen_helper_fdtox();
2730 gen_helper_check_ieee_exceptions();
2731 gen_op_store_DT0_fpr(DFPREG(rd));
2732 break;
2733 case 0x83: /* V9 fqtox */
2734 CHECK_FPU_FEATURE(dc, FLOAT128);
2735 gen_op_load_fpr_QT1(QFPREG(rs2));
2736 gen_clear_float_exceptions();
2737 gen_helper_fqtox();
2738 gen_helper_check_ieee_exceptions();
2739 gen_op_store_DT0_fpr(DFPREG(rd));
2740 break;
2741 case 0x84: /* V9 fxtos */
2742 gen_op_load_fpr_DT1(DFPREG(rs2));
2743 gen_clear_float_exceptions();
2744 gen_helper_fxtos(cpu_tmp32);
2745 gen_helper_check_ieee_exceptions();
2746 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2747 break;
2748 case 0x88: /* V9 fxtod */
2749 gen_op_load_fpr_DT1(DFPREG(rs2));
2750 gen_clear_float_exceptions();
2751 gen_helper_fxtod();
2752 gen_helper_check_ieee_exceptions();
2753 gen_op_store_DT0_fpr(DFPREG(rd));
2754 break;
2755 case 0x8c: /* V9 fxtoq */
2756 CHECK_FPU_FEATURE(dc, FLOAT128);
2757 gen_op_load_fpr_DT1(DFPREG(rs2));
2758 gen_clear_float_exceptions();
2759 gen_helper_fxtoq();
2760 gen_helper_check_ieee_exceptions();
2761 gen_op_store_QT0_fpr(QFPREG(rd));
2762 break;
2763 #endif
2764 default:
2765 goto illegal_insn;
2767 } else if (xop == 0x35) { /* FPU Operations */
2768 #ifdef TARGET_SPARC64
2769 int cond;
2770 #endif
2771 if (gen_trap_ifnofpu(dc, cpu_cond))
2772 goto jmp_insn;
2773 gen_op_clear_ieee_excp_and_FTT();
2774 rs1 = GET_FIELD(insn, 13, 17);
2775 rs2 = GET_FIELD(insn, 27, 31);
2776 xop = GET_FIELD(insn, 18, 26);
2777 #ifdef TARGET_SPARC64
2778 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2779 int l1;
2781 l1 = gen_new_label();
2782 cond = GET_FIELD_SP(insn, 14, 17);
2783 cpu_src1 = get_src1(insn, cpu_src1);
2784 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2785 0, l1);
2786 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2787 gen_set_label(l1);
2788 break;
2789 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2790 int l1;
2792 l1 = gen_new_label();
2793 cond = GET_FIELD_SP(insn, 14, 17);
2794 cpu_src1 = get_src1(insn, cpu_src1);
2795 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2796 0, l1);
2797 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2798 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2799 gen_set_label(l1);
2800 break;
2801 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2802 int l1;
2804 CHECK_FPU_FEATURE(dc, FLOAT128);
2805 l1 = gen_new_label();
2806 cond = GET_FIELD_SP(insn, 14, 17);
2807 cpu_src1 = get_src1(insn, cpu_src1);
2808 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2809 0, l1);
2810 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2811 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2812 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2813 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2814 gen_set_label(l1);
2815 break;
2817 #endif
2818 switch (xop) {
2819 #ifdef TARGET_SPARC64
2820 #define FMOVSCC(fcc) \
2822 TCGv r_cond; \
2823 int l1; \
2825 l1 = gen_new_label(); \
2826 r_cond = tcg_temp_new(); \
2827 cond = GET_FIELD_SP(insn, 14, 17); \
2828 gen_fcond(r_cond, fcc, cond); \
2829 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2830 0, l1); \
2831 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2832 gen_set_label(l1); \
2833 tcg_temp_free(r_cond); \
2835 #define FMOVDCC(fcc) \
2837 TCGv r_cond; \
2838 int l1; \
2840 l1 = gen_new_label(); \
2841 r_cond = tcg_temp_new(); \
2842 cond = GET_FIELD_SP(insn, 14, 17); \
2843 gen_fcond(r_cond, fcc, cond); \
2844 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2845 0, l1); \
2846 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2847 cpu_fpr[DFPREG(rs2)]); \
2848 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2849 cpu_fpr[DFPREG(rs2) + 1]); \
2850 gen_set_label(l1); \
2851 tcg_temp_free(r_cond); \
2853 #define FMOVQCC(fcc) \
2855 TCGv r_cond; \
2856 int l1; \
2858 l1 = gen_new_label(); \
2859 r_cond = tcg_temp_new(); \
2860 cond = GET_FIELD_SP(insn, 14, 17); \
2861 gen_fcond(r_cond, fcc, cond); \
2862 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2863 0, l1); \
2864 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2865 cpu_fpr[QFPREG(rs2)]); \
2866 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2867 cpu_fpr[QFPREG(rs2) + 1]); \
2868 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2869 cpu_fpr[QFPREG(rs2) + 2]); \
2870 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2871 cpu_fpr[QFPREG(rs2) + 3]); \
2872 gen_set_label(l1); \
2873 tcg_temp_free(r_cond); \
2875 case 0x001: /* V9 fmovscc %fcc0 */
2876 FMOVSCC(0);
2877 break;
2878 case 0x002: /* V9 fmovdcc %fcc0 */
2879 FMOVDCC(0);
2880 break;
2881 case 0x003: /* V9 fmovqcc %fcc0 */
2882 CHECK_FPU_FEATURE(dc, FLOAT128);
2883 FMOVQCC(0);
2884 break;
2885 case 0x041: /* V9 fmovscc %fcc1 */
2886 FMOVSCC(1);
2887 break;
2888 case 0x042: /* V9 fmovdcc %fcc1 */
2889 FMOVDCC(1);
2890 break;
2891 case 0x043: /* V9 fmovqcc %fcc1 */
2892 CHECK_FPU_FEATURE(dc, FLOAT128);
2893 FMOVQCC(1);
2894 break;
2895 case 0x081: /* V9 fmovscc %fcc2 */
2896 FMOVSCC(2);
2897 break;
2898 case 0x082: /* V9 fmovdcc %fcc2 */
2899 FMOVDCC(2);
2900 break;
2901 case 0x083: /* V9 fmovqcc %fcc2 */
2902 CHECK_FPU_FEATURE(dc, FLOAT128);
2903 FMOVQCC(2);
2904 break;
2905 case 0x0c1: /* V9 fmovscc %fcc3 */
2906 FMOVSCC(3);
2907 break;
2908 case 0x0c2: /* V9 fmovdcc %fcc3 */
2909 FMOVDCC(3);
2910 break;
2911 case 0x0c3: /* V9 fmovqcc %fcc3 */
2912 CHECK_FPU_FEATURE(dc, FLOAT128);
2913 FMOVQCC(3);
2914 break;
2915 #undef FMOVSCC
2916 #undef FMOVDCC
2917 #undef FMOVQCC
2918 #define FMOVSCC(icc) \
2920 TCGv r_cond; \
2921 int l1; \
2923 l1 = gen_new_label(); \
2924 r_cond = tcg_temp_new(); \
2925 cond = GET_FIELD_SP(insn, 14, 17); \
2926 gen_cond(r_cond, icc, cond); \
2927 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2928 0, l1); \
2929 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2930 gen_set_label(l1); \
2931 tcg_temp_free(r_cond); \
2933 #define FMOVDCC(icc) \
2935 TCGv r_cond; \
2936 int l1; \
2938 l1 = gen_new_label(); \
2939 r_cond = tcg_temp_new(); \
2940 cond = GET_FIELD_SP(insn, 14, 17); \
2941 gen_cond(r_cond, icc, cond); \
2942 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2943 0, l1); \
2944 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2945 cpu_fpr[DFPREG(rs2)]); \
2946 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2947 cpu_fpr[DFPREG(rs2) + 1]); \
2948 gen_set_label(l1); \
2949 tcg_temp_free(r_cond); \
2951 #define FMOVQCC(icc) \
2953 TCGv r_cond; \
2954 int l1; \
2956 l1 = gen_new_label(); \
2957 r_cond = tcg_temp_new(); \
2958 cond = GET_FIELD_SP(insn, 14, 17); \
2959 gen_cond(r_cond, icc, cond); \
2960 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2961 0, l1); \
2962 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2963 cpu_fpr[QFPREG(rs2)]); \
2964 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2965 cpu_fpr[QFPREG(rs2) + 1]); \
2966 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2967 cpu_fpr[QFPREG(rs2) + 2]); \
2968 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2969 cpu_fpr[QFPREG(rs2) + 3]); \
2970 gen_set_label(l1); \
2971 tcg_temp_free(r_cond); \
2974 case 0x101: /* V9 fmovscc %icc */
2975 FMOVSCC(0);
2976 break;
2977 case 0x102: /* V9 fmovdcc %icc */
2978 FMOVDCC(0);
2979 case 0x103: /* V9 fmovqcc %icc */
2980 CHECK_FPU_FEATURE(dc, FLOAT128);
2981 FMOVQCC(0);
2982 break;
2983 case 0x181: /* V9 fmovscc %xcc */
2984 FMOVSCC(1);
2985 break;
2986 case 0x182: /* V9 fmovdcc %xcc */
2987 FMOVDCC(1);
2988 break;
2989 case 0x183: /* V9 fmovqcc %xcc */
2990 CHECK_FPU_FEATURE(dc, FLOAT128);
2991 FMOVQCC(1);
2992 break;
2993 #undef FMOVSCC
2994 #undef FMOVDCC
2995 #undef FMOVQCC
2996 #endif
2997 case 0x51: /* fcmps, V9 %fcc */
2998 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2999 break;
3000 case 0x52: /* fcmpd, V9 %fcc */
3001 gen_op_load_fpr_DT0(DFPREG(rs1));
3002 gen_op_load_fpr_DT1(DFPREG(rs2));
3003 gen_op_fcmpd(rd & 3);
3004 break;
3005 case 0x53: /* fcmpq, V9 %fcc */
3006 CHECK_FPU_FEATURE(dc, FLOAT128);
3007 gen_op_load_fpr_QT0(QFPREG(rs1));
3008 gen_op_load_fpr_QT1(QFPREG(rs2));
3009 gen_op_fcmpq(rd & 3);
3010 break;
3011 case 0x55: /* fcmpes, V9 %fcc */
3012 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3013 break;
3014 case 0x56: /* fcmped, V9 %fcc */
3015 gen_op_load_fpr_DT0(DFPREG(rs1));
3016 gen_op_load_fpr_DT1(DFPREG(rs2));
3017 gen_op_fcmped(rd & 3);
3018 break;
3019 case 0x57: /* fcmpeq, V9 %fcc */
3020 CHECK_FPU_FEATURE(dc, FLOAT128);
3021 gen_op_load_fpr_QT0(QFPREG(rs1));
3022 gen_op_load_fpr_QT1(QFPREG(rs2));
3023 gen_op_fcmpeq(rd & 3);
3024 break;
3025 default:
3026 goto illegal_insn;
3028 } else if (xop == 0x2) {
3029 // clr/mov shortcut
3031 rs1 = GET_FIELD(insn, 13, 17);
3032 if (rs1 == 0) {
3033 // or %g0, x, y -> mov T0, x; mov y, T0
3034 if (IS_IMM) { /* immediate */
3035 TCGv r_const;
3037 simm = GET_FIELDs(insn, 19, 31);
3038 r_const = tcg_const_tl(simm);
3039 gen_movl_TN_reg(rd, r_const);
3040 tcg_temp_free(r_const);
3041 } else { /* register */
3042 rs2 = GET_FIELD(insn, 27, 31);
3043 gen_movl_reg_TN(rs2, cpu_dst);
3044 gen_movl_TN_reg(rd, cpu_dst);
3046 } else {
3047 cpu_src1 = get_src1(insn, cpu_src1);
3048 if (IS_IMM) { /* immediate */
3049 simm = GET_FIELDs(insn, 19, 31);
3050 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3051 gen_movl_TN_reg(rd, cpu_dst);
3052 } else { /* register */
3053 // or x, %g0, y -> mov T1, x; mov y, T1
3054 rs2 = GET_FIELD(insn, 27, 31);
3055 if (rs2 != 0) {
3056 gen_movl_reg_TN(rs2, cpu_src2);
3057 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3058 gen_movl_TN_reg(rd, cpu_dst);
3059 } else
3060 gen_movl_TN_reg(rd, cpu_src1);
3063 #ifdef TARGET_SPARC64
3064 } else if (xop == 0x25) { /* sll, V9 sllx */
3065 cpu_src1 = get_src1(insn, cpu_src1);
3066 if (IS_IMM) { /* immediate */
3067 simm = GET_FIELDs(insn, 20, 31);
3068 if (insn & (1 << 12)) {
3069 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3070 } else {
3071 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3073 } else { /* register */
3074 rs2 = GET_FIELD(insn, 27, 31);
3075 gen_movl_reg_TN(rs2, cpu_src2);
3076 if (insn & (1 << 12)) {
3077 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3078 } else {
3079 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3081 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3083 gen_movl_TN_reg(rd, cpu_dst);
3084 } else if (xop == 0x26) { /* srl, V9 srlx */
3085 cpu_src1 = get_src1(insn, cpu_src1);
3086 if (IS_IMM) { /* immediate */
3087 simm = GET_FIELDs(insn, 20, 31);
3088 if (insn & (1 << 12)) {
3089 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3090 } else {
3091 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3092 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3094 } else { /* register */
3095 rs2 = GET_FIELD(insn, 27, 31);
3096 gen_movl_reg_TN(rs2, cpu_src2);
3097 if (insn & (1 << 12)) {
3098 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3099 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3100 } else {
3101 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3102 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3103 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3106 gen_movl_TN_reg(rd, cpu_dst);
3107 } else if (xop == 0x27) { /* sra, V9 srax */
3108 cpu_src1 = get_src1(insn, cpu_src1);
3109 if (IS_IMM) { /* immediate */
3110 simm = GET_FIELDs(insn, 20, 31);
3111 if (insn & (1 << 12)) {
3112 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3113 } else {
3114 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3115 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3116 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3118 } else { /* register */
3119 rs2 = GET_FIELD(insn, 27, 31);
3120 gen_movl_reg_TN(rs2, cpu_src2);
3121 if (insn & (1 << 12)) {
3122 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3123 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3124 } else {
3125 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3126 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3127 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3128 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3131 gen_movl_TN_reg(rd, cpu_dst);
3132 #endif
3133 } else if (xop < 0x36) {
3134 if (xop < 0x20) {
3135 cpu_src1 = get_src1(insn, cpu_src1);
3136 cpu_src2 = get_src2(insn, cpu_src2);
3137 switch (xop & ~0x10) {
3138 case 0x0: /* add */
3139 if (IS_IMM) {
3140 simm = GET_FIELDs(insn, 19, 31);
3141 if (xop & 0x10) {
3142 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3143 } else {
3144 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3146 } else {
3147 if (xop & 0x10) {
3148 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3149 } else {
3150 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3153 break;
3154 case 0x1: /* and */
3155 if (IS_IMM) {
3156 simm = GET_FIELDs(insn, 19, 31);
3157 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3158 } else {
3159 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3161 if (xop & 0x10) {
3162 gen_op_logic_cc(cpu_dst);
3164 break;
3165 case 0x2: /* or */
3166 if (IS_IMM) {
3167 simm = GET_FIELDs(insn, 19, 31);
3168 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3169 } else {
3170 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3172 if (xop & 0x10)
3173 gen_op_logic_cc(cpu_dst);
3174 break;
3175 case 0x3: /* xor */
3176 if (IS_IMM) {
3177 simm = GET_FIELDs(insn, 19, 31);
3178 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3179 } else {
3180 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3182 if (xop & 0x10)
3183 gen_op_logic_cc(cpu_dst);
3184 break;
3185 case 0x4: /* sub */
3186 if (IS_IMM) {
3187 simm = GET_FIELDs(insn, 19, 31);
3188 if (xop & 0x10) {
3189 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3190 } else {
3191 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3193 } else {
3194 if (xop & 0x10) {
3195 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3196 } else {
3197 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3200 break;
3201 case 0x5: /* andn */
3202 if (IS_IMM) {
3203 simm = GET_FIELDs(insn, 19, 31);
3204 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3205 } else {
3206 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3208 if (xop & 0x10)
3209 gen_op_logic_cc(cpu_dst);
3210 break;
3211 case 0x6: /* orn */
3212 if (IS_IMM) {
3213 simm = GET_FIELDs(insn, 19, 31);
3214 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3215 } else {
3216 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3218 if (xop & 0x10)
3219 gen_op_logic_cc(cpu_dst);
3220 break;
3221 case 0x7: /* xorn */
3222 if (IS_IMM) {
3223 simm = GET_FIELDs(insn, 19, 31);
3224 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3225 } else {
3226 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3227 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3229 if (xop & 0x10)
3230 gen_op_logic_cc(cpu_dst);
3231 break;
3232 case 0x8: /* addx, V9 addc */
3233 if (IS_IMM) {
3234 simm = GET_FIELDs(insn, 19, 31);
3235 if (xop & 0x10)
3236 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3237 else {
3238 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3239 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3240 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3242 } else {
3243 if (xop & 0x10)
3244 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3245 else {
3246 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3247 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3248 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3251 break;
3252 #ifdef TARGET_SPARC64
3253 case 0x9: /* V9 mulx */
3254 if (IS_IMM) {
3255 simm = GET_FIELDs(insn, 19, 31);
3256 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3257 } else {
3258 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3260 break;
3261 #endif
3262 case 0xa: /* umul */
3263 CHECK_IU_FEATURE(dc, MUL);
3264 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3265 if (xop & 0x10)
3266 gen_op_logic_cc(cpu_dst);
3267 break;
3268 case 0xb: /* smul */
3269 CHECK_IU_FEATURE(dc, MUL);
3270 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3271 if (xop & 0x10)
3272 gen_op_logic_cc(cpu_dst);
3273 break;
3274 case 0xc: /* subx, V9 subc */
3275 if (IS_IMM) {
3276 simm = GET_FIELDs(insn, 19, 31);
3277 if (xop & 0x10) {
3278 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3279 } else {
3280 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3281 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3282 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3284 } else {
3285 if (xop & 0x10) {
3286 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3287 } else {
3288 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3289 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3290 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3293 break;
3294 #ifdef TARGET_SPARC64
3295 case 0xd: /* V9 udivx */
3296 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3297 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3298 gen_trap_ifdivzero_tl(cpu_cc_src2);
3299 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3300 break;
3301 #endif
3302 case 0xe: /* udiv */
3303 CHECK_IU_FEATURE(dc, DIV);
3304 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3305 if (xop & 0x10)
3306 gen_op_div_cc(cpu_dst);
3307 break;
3308 case 0xf: /* sdiv */
3309 CHECK_IU_FEATURE(dc, DIV);
3310 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3311 if (xop & 0x10)
3312 gen_op_div_cc(cpu_dst);
3313 break;
3314 default:
3315 goto illegal_insn;
3317 gen_movl_TN_reg(rd, cpu_dst);
3318 } else {
3319 cpu_src1 = get_src1(insn, cpu_src1);
3320 cpu_src2 = get_src2(insn, cpu_src2);
3321 switch (xop) {
3322 case 0x20: /* taddcc */
3323 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3324 gen_movl_TN_reg(rd, cpu_dst);
3325 break;
3326 case 0x21: /* tsubcc */
3327 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3328 gen_movl_TN_reg(rd, cpu_dst);
3329 break;
3330 case 0x22: /* taddcctv */
3331 save_state(dc, cpu_cond);
3332 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3333 gen_movl_TN_reg(rd, cpu_dst);
3334 break;
3335 case 0x23: /* tsubcctv */
3336 save_state(dc, cpu_cond);
3337 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3338 gen_movl_TN_reg(rd, cpu_dst);
3339 break;
3340 case 0x24: /* mulscc */
3341 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3342 gen_movl_TN_reg(rd, cpu_dst);
3343 break;
3344 #ifndef TARGET_SPARC64
3345 case 0x25: /* sll */
3346 if (IS_IMM) { /* immediate */
3347 simm = GET_FIELDs(insn, 20, 31);
3348 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3349 } else { /* register */
3350 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3351 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3353 gen_movl_TN_reg(rd, cpu_dst);
3354 break;
3355 case 0x26: /* srl */
3356 if (IS_IMM) { /* immediate */
3357 simm = GET_FIELDs(insn, 20, 31);
3358 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3359 } else { /* register */
3360 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3361 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3363 gen_movl_TN_reg(rd, cpu_dst);
3364 break;
3365 case 0x27: /* sra */
3366 if (IS_IMM) { /* immediate */
3367 simm = GET_FIELDs(insn, 20, 31);
3368 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3369 } else { /* register */
3370 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3371 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3373 gen_movl_TN_reg(rd, cpu_dst);
3374 break;
3375 #endif
3376 case 0x30:
3378 switch(rd) {
3379 case 0: /* wry */
3380 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3381 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3382 break;
3383 #ifndef TARGET_SPARC64
3384 case 0x01 ... 0x0f: /* undefined in the
3385 SPARCv8 manual, nop
3386 on the microSPARC
3387 II */
3388 case 0x10 ... 0x1f: /* implementation-dependent
3389 in the SPARCv8
3390 manual, nop on the
3391 microSPARC II */
3392 break;
3393 #else
3394 case 0x2: /* V9 wrccr */
3395 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3396 gen_helper_wrccr(cpu_dst);
3397 break;
3398 case 0x3: /* V9 wrasi */
3399 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3400 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3401 break;
3402 case 0x6: /* V9 wrfprs */
3403 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3404 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3405 save_state(dc, cpu_cond);
3406 gen_op_next_insn();
3407 tcg_gen_exit_tb(0);
3408 dc->is_br = 1;
3409 break;
3410 case 0xf: /* V9 sir, nop if user */
3411 #if !defined(CONFIG_USER_ONLY)
3412 if (supervisor(dc))
3413 ; // XXX
3414 #endif
3415 break;
3416 case 0x13: /* Graphics Status */
3417 if (gen_trap_ifnofpu(dc, cpu_cond))
3418 goto jmp_insn;
3419 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3420 break;
3421 case 0x14: /* Softint set */
3422 if (!supervisor(dc))
3423 goto illegal_insn;
3424 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3425 gen_helper_set_softint(cpu_tmp64);
3426 break;
3427 case 0x15: /* Softint clear */
3428 if (!supervisor(dc))
3429 goto illegal_insn;
3430 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3431 gen_helper_clear_softint(cpu_tmp64);
3432 break;
3433 case 0x16: /* Softint write */
3434 if (!supervisor(dc))
3435 goto illegal_insn;
3436 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3437 gen_helper_write_softint(cpu_tmp64);
3438 break;
3439 case 0x17: /* Tick compare */
3440 #if !defined(CONFIG_USER_ONLY)
3441 if (!supervisor(dc))
3442 goto illegal_insn;
3443 #endif
3445 TCGv_ptr r_tickptr;
3447 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3448 cpu_src2);
3449 r_tickptr = tcg_temp_new_ptr();
3450 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3451 offsetof(CPUState, tick));
3452 gen_helper_tick_set_limit(r_tickptr,
3453 cpu_tick_cmpr);
3454 tcg_temp_free_ptr(r_tickptr);
3456 break;
3457 case 0x18: /* System tick */
3458 #if !defined(CONFIG_USER_ONLY)
3459 if (!supervisor(dc))
3460 goto illegal_insn;
3461 #endif
3463 TCGv_ptr r_tickptr;
3465 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3466 cpu_src2);
3467 r_tickptr = tcg_temp_new_ptr();
3468 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3469 offsetof(CPUState, stick));
3470 gen_helper_tick_set_count(r_tickptr,
3471 cpu_dst);
3472 tcg_temp_free_ptr(r_tickptr);
3474 break;
3475 case 0x19: /* System tick compare */
3476 #if !defined(CONFIG_USER_ONLY)
3477 if (!supervisor(dc))
3478 goto illegal_insn;
3479 #endif
3481 TCGv_ptr r_tickptr;
3483 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3484 cpu_src2);
3485 r_tickptr = tcg_temp_new_ptr();
3486 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3487 offsetof(CPUState, stick));
3488 gen_helper_tick_set_limit(r_tickptr,
3489 cpu_stick_cmpr);
3490 tcg_temp_free_ptr(r_tickptr);
3492 break;
3494 case 0x10: /* Performance Control */
3495 case 0x11: /* Performance Instrumentation
3496 Counter */
3497 case 0x12: /* Dispatch Control */
3498 #endif
3499 default:
3500 goto illegal_insn;
3503 break;
3504 #if !defined(CONFIG_USER_ONLY)
3505 case 0x31: /* wrpsr, V9 saved, restored */
3507 if (!supervisor(dc))
3508 goto priv_insn;
3509 #ifdef TARGET_SPARC64
3510 switch (rd) {
3511 case 0:
3512 gen_helper_saved();
3513 break;
3514 case 1:
3515 gen_helper_restored();
3516 break;
3517 case 2: /* UA2005 allclean */
3518 case 3: /* UA2005 otherw */
3519 case 4: /* UA2005 normalw */
3520 case 5: /* UA2005 invalw */
3521 // XXX
3522 default:
3523 goto illegal_insn;
3525 #else
3526 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3527 gen_helper_wrpsr(cpu_dst);
3528 save_state(dc, cpu_cond);
3529 gen_op_next_insn();
3530 tcg_gen_exit_tb(0);
3531 dc->is_br = 1;
3532 #endif
3534 break;
3535 case 0x32: /* wrwim, V9 wrpr */
3537 if (!supervisor(dc))
3538 goto priv_insn;
3539 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3540 #ifdef TARGET_SPARC64
3541 switch (rd) {
3542 case 0: // tpc
3544 TCGv_ptr r_tsptr;
3546 r_tsptr = tcg_temp_new_ptr();
3547 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3548 offsetof(CPUState, tsptr));
3549 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3550 offsetof(trap_state, tpc));
3551 tcg_temp_free_ptr(r_tsptr);
3553 break;
3554 case 1: // tnpc
3556 TCGv_ptr r_tsptr;
3558 r_tsptr = tcg_temp_new_ptr();
3559 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3560 offsetof(CPUState, tsptr));
3561 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3562 offsetof(trap_state, tnpc));
3563 tcg_temp_free_ptr(r_tsptr);
3565 break;
3566 case 2: // tstate
3568 TCGv_ptr r_tsptr;
3570 r_tsptr = tcg_temp_new_ptr();
3571 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3572 offsetof(CPUState, tsptr));
3573 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3574 offsetof(trap_state,
3575 tstate));
3576 tcg_temp_free_ptr(r_tsptr);
3578 break;
3579 case 3: // tt
3581 TCGv_ptr r_tsptr;
3583 r_tsptr = tcg_temp_new_ptr();
3584 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3585 offsetof(CPUState, tsptr));
3586 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3587 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3588 offsetof(trap_state, tt));
3589 tcg_temp_free_ptr(r_tsptr);
3591 break;
3592 case 4: // tick
3594 TCGv_ptr r_tickptr;
3596 r_tickptr = tcg_temp_new_ptr();
3597 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3598 offsetof(CPUState, tick));
3599 gen_helper_tick_set_count(r_tickptr,
3600 cpu_tmp0);
3601 tcg_temp_free_ptr(r_tickptr);
3603 break;
3604 case 5: // tba
3605 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3606 break;
3607 case 6: // pstate
3608 save_state(dc, cpu_cond);
3609 gen_helper_wrpstate(cpu_tmp0);
3610 gen_op_next_insn();
3611 tcg_gen_exit_tb(0);
3612 dc->is_br = 1;
3613 break;
3614 case 7: // tl
3615 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3616 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3617 offsetof(CPUSPARCState, tl));
3618 break;
3619 case 8: // pil
3620 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3621 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3622 offsetof(CPUSPARCState,
3623 psrpil));
3624 break;
3625 case 9: // cwp
3626 gen_helper_wrcwp(cpu_tmp0);
3627 break;
3628 case 10: // cansave
3629 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3630 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3631 offsetof(CPUSPARCState,
3632 cansave));
3633 break;
3634 case 11: // canrestore
3635 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3636 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3637 offsetof(CPUSPARCState,
3638 canrestore));
3639 break;
3640 case 12: // cleanwin
3641 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3642 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3643 offsetof(CPUSPARCState,
3644 cleanwin));
3645 break;
3646 case 13: // otherwin
3647 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3648 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3649 offsetof(CPUSPARCState,
3650 otherwin));
3651 break;
3652 case 14: // wstate
3653 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3654 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3655 offsetof(CPUSPARCState,
3656 wstate));
3657 break;
3658 case 16: // UA2005 gl
3659 CHECK_IU_FEATURE(dc, GL);
3660 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3661 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3662 offsetof(CPUSPARCState, gl));
3663 break;
3664 case 26: // UA2005 strand status
3665 CHECK_IU_FEATURE(dc, HYPV);
3666 if (!hypervisor(dc))
3667 goto priv_insn;
3668 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3669 break;
3670 default:
3671 goto illegal_insn;
3673 #else
3674 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3675 if (dc->def->nwindows != 32)
3676 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3677 (1 << dc->def->nwindows) - 1);
3678 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3679 #endif
3681 break;
3682 case 0x33: /* wrtbr, UA2005 wrhpr */
3684 #ifndef TARGET_SPARC64
3685 if (!supervisor(dc))
3686 goto priv_insn;
3687 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3688 #else
3689 CHECK_IU_FEATURE(dc, HYPV);
3690 if (!hypervisor(dc))
3691 goto priv_insn;
3692 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3693 switch (rd) {
3694 case 0: // hpstate
3695 // XXX gen_op_wrhpstate();
3696 save_state(dc, cpu_cond);
3697 gen_op_next_insn();
3698 tcg_gen_exit_tb(0);
3699 dc->is_br = 1;
3700 break;
3701 case 1: // htstate
3702 // XXX gen_op_wrhtstate();
3703 break;
3704 case 3: // hintp
3705 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3706 break;
3707 case 5: // htba
3708 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3709 break;
3710 case 31: // hstick_cmpr
3712 TCGv_ptr r_tickptr;
3714 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3715 r_tickptr = tcg_temp_new_ptr();
3716 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3717 offsetof(CPUState, hstick));
3718 gen_helper_tick_set_limit(r_tickptr,
3719 cpu_hstick_cmpr);
3720 tcg_temp_free_ptr(r_tickptr);
3722 break;
3723 case 6: // hver readonly
3724 default:
3725 goto illegal_insn;
3727 #endif
3729 break;
3730 #endif
3731 #ifdef TARGET_SPARC64
3732 case 0x2c: /* V9 movcc */
3734 int cc = GET_FIELD_SP(insn, 11, 12);
3735 int cond = GET_FIELD_SP(insn, 14, 17);
3736 TCGv r_cond;
3737 int l1;
3739 r_cond = tcg_temp_new();
3740 if (insn & (1 << 18)) {
3741 if (cc == 0)
3742 gen_cond(r_cond, 0, cond);
3743 else if (cc == 2)
3744 gen_cond(r_cond, 1, cond);
3745 else
3746 goto illegal_insn;
3747 } else {
3748 gen_fcond(r_cond, cc, cond);
3751 l1 = gen_new_label();
3753 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3754 if (IS_IMM) { /* immediate */
3755 TCGv r_const;
3757 simm = GET_FIELD_SPs(insn, 0, 10);
3758 r_const = tcg_const_tl(simm);
3759 gen_movl_TN_reg(rd, r_const);
3760 tcg_temp_free(r_const);
3761 } else {
3762 rs2 = GET_FIELD_SP(insn, 0, 4);
3763 gen_movl_reg_TN(rs2, cpu_tmp0);
3764 gen_movl_TN_reg(rd, cpu_tmp0);
3766 gen_set_label(l1);
3767 tcg_temp_free(r_cond);
3768 break;
3770 case 0x2d: /* V9 sdivx */
3771 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3772 gen_movl_TN_reg(rd, cpu_dst);
3773 break;
3774 case 0x2e: /* V9 popc */
3776 cpu_src2 = get_src2(insn, cpu_src2);
3777 gen_helper_popc(cpu_dst, cpu_src2);
3778 gen_movl_TN_reg(rd, cpu_dst);
3780 case 0x2f: /* V9 movr */
3782 int cond = GET_FIELD_SP(insn, 10, 12);
3783 int l1;
3785 cpu_src1 = get_src1(insn, cpu_src1);
3787 l1 = gen_new_label();
3789 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3790 cpu_src1, 0, l1);
3791 if (IS_IMM) { /* immediate */
3792 TCGv r_const;
3794 simm = GET_FIELD_SPs(insn, 0, 9);
3795 r_const = tcg_const_tl(simm);
3796 gen_movl_TN_reg(rd, r_const);
3797 tcg_temp_free(r_const);
3798 } else {
3799 rs2 = GET_FIELD_SP(insn, 0, 4);
3800 gen_movl_reg_TN(rs2, cpu_tmp0);
3801 gen_movl_TN_reg(rd, cpu_tmp0);
3803 gen_set_label(l1);
3804 break;
3806 #endif
3807 default:
3808 goto illegal_insn;
3811 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3812 #ifdef TARGET_SPARC64
3813 int opf = GET_FIELD_SP(insn, 5, 13);
3814 rs1 = GET_FIELD(insn, 13, 17);
3815 rs2 = GET_FIELD(insn, 27, 31);
3816 if (gen_trap_ifnofpu(dc, cpu_cond))
3817 goto jmp_insn;
3819 switch (opf) {
3820 case 0x000: /* VIS I edge8cc */
3821 case 0x001: /* VIS II edge8n */
3822 case 0x002: /* VIS I edge8lcc */
3823 case 0x003: /* VIS II edge8ln */
3824 case 0x004: /* VIS I edge16cc */
3825 case 0x005: /* VIS II edge16n */
3826 case 0x006: /* VIS I edge16lcc */
3827 case 0x007: /* VIS II edge16ln */
3828 case 0x008: /* VIS I edge32cc */
3829 case 0x009: /* VIS II edge32n */
3830 case 0x00a: /* VIS I edge32lcc */
3831 case 0x00b: /* VIS II edge32ln */
3832 // XXX
3833 goto illegal_insn;
3834 case 0x010: /* VIS I array8 */
3835 CHECK_FPU_FEATURE(dc, VIS1);
3836 cpu_src1 = get_src1(insn, cpu_src1);
3837 gen_movl_reg_TN(rs2, cpu_src2);
3838 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3839 gen_movl_TN_reg(rd, cpu_dst);
3840 break;
3841 case 0x012: /* VIS I array16 */
3842 CHECK_FPU_FEATURE(dc, VIS1);
3843 cpu_src1 = get_src1(insn, cpu_src1);
3844 gen_movl_reg_TN(rs2, cpu_src2);
3845 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3846 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3847 gen_movl_TN_reg(rd, cpu_dst);
3848 break;
3849 case 0x014: /* VIS I array32 */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 cpu_src1 = get_src1(insn, cpu_src1);
3852 gen_movl_reg_TN(rs2, cpu_src2);
3853 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3854 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3855 gen_movl_TN_reg(rd, cpu_dst);
3856 break;
3857 case 0x018: /* VIS I alignaddr */
3858 CHECK_FPU_FEATURE(dc, VIS1);
3859 cpu_src1 = get_src1(insn, cpu_src1);
3860 gen_movl_reg_TN(rs2, cpu_src2);
3861 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3862 gen_movl_TN_reg(rd, cpu_dst);
3863 break;
3864 case 0x019: /* VIS II bmask */
3865 case 0x01a: /* VIS I alignaddrl */
3866 // XXX
3867 goto illegal_insn;
3868 case 0x020: /* VIS I fcmple16 */
3869 CHECK_FPU_FEATURE(dc, VIS1);
3870 gen_op_load_fpr_DT0(DFPREG(rs1));
3871 gen_op_load_fpr_DT1(DFPREG(rs2));
3872 gen_helper_fcmple16();
3873 gen_op_store_DT0_fpr(DFPREG(rd));
3874 break;
3875 case 0x022: /* VIS I fcmpne16 */
3876 CHECK_FPU_FEATURE(dc, VIS1);
3877 gen_op_load_fpr_DT0(DFPREG(rs1));
3878 gen_op_load_fpr_DT1(DFPREG(rs2));
3879 gen_helper_fcmpne16();
3880 gen_op_store_DT0_fpr(DFPREG(rd));
3881 break;
3882 case 0x024: /* VIS I fcmple32 */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 gen_op_load_fpr_DT0(DFPREG(rs1));
3885 gen_op_load_fpr_DT1(DFPREG(rs2));
3886 gen_helper_fcmple32();
3887 gen_op_store_DT0_fpr(DFPREG(rd));
3888 break;
3889 case 0x026: /* VIS I fcmpne32 */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 gen_op_load_fpr_DT0(DFPREG(rs1));
3892 gen_op_load_fpr_DT1(DFPREG(rs2));
3893 gen_helper_fcmpne32();
3894 gen_op_store_DT0_fpr(DFPREG(rd));
3895 break;
3896 case 0x028: /* VIS I fcmpgt16 */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_DT0(DFPREG(rs1));
3899 gen_op_load_fpr_DT1(DFPREG(rs2));
3900 gen_helper_fcmpgt16();
3901 gen_op_store_DT0_fpr(DFPREG(rd));
3902 break;
3903 case 0x02a: /* VIS I fcmpeq16 */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_op_load_fpr_DT0(DFPREG(rs1));
3906 gen_op_load_fpr_DT1(DFPREG(rs2));
3907 gen_helper_fcmpeq16();
3908 gen_op_store_DT0_fpr(DFPREG(rd));
3909 break;
3910 case 0x02c: /* VIS I fcmpgt32 */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 gen_op_load_fpr_DT0(DFPREG(rs1));
3913 gen_op_load_fpr_DT1(DFPREG(rs2));
3914 gen_helper_fcmpgt32();
3915 gen_op_store_DT0_fpr(DFPREG(rd));
3916 break;
3917 case 0x02e: /* VIS I fcmpeq32 */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 gen_op_load_fpr_DT0(DFPREG(rs1));
3920 gen_op_load_fpr_DT1(DFPREG(rs2));
3921 gen_helper_fcmpeq32();
3922 gen_op_store_DT0_fpr(DFPREG(rd));
3923 break;
3924 case 0x031: /* VIS I fmul8x16 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 gen_op_load_fpr_DT0(DFPREG(rs1));
3927 gen_op_load_fpr_DT1(DFPREG(rs2));
3928 gen_helper_fmul8x16();
3929 gen_op_store_DT0_fpr(DFPREG(rd));
3930 break;
3931 case 0x033: /* VIS I fmul8x16au */
3932 CHECK_FPU_FEATURE(dc, VIS1);
3933 gen_op_load_fpr_DT0(DFPREG(rs1));
3934 gen_op_load_fpr_DT1(DFPREG(rs2));
3935 gen_helper_fmul8x16au();
3936 gen_op_store_DT0_fpr(DFPREG(rd));
3937 break;
3938 case 0x035: /* VIS I fmul8x16al */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 gen_op_load_fpr_DT0(DFPREG(rs1));
3941 gen_op_load_fpr_DT1(DFPREG(rs2));
3942 gen_helper_fmul8x16al();
3943 gen_op_store_DT0_fpr(DFPREG(rd));
3944 break;
3945 case 0x036: /* VIS I fmul8sux16 */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 gen_op_load_fpr_DT0(DFPREG(rs1));
3948 gen_op_load_fpr_DT1(DFPREG(rs2));
3949 gen_helper_fmul8sux16();
3950 gen_op_store_DT0_fpr(DFPREG(rd));
3951 break;
3952 case 0x037: /* VIS I fmul8ulx16 */
3953 CHECK_FPU_FEATURE(dc, VIS1);
3954 gen_op_load_fpr_DT0(DFPREG(rs1));
3955 gen_op_load_fpr_DT1(DFPREG(rs2));
3956 gen_helper_fmul8ulx16();
3957 gen_op_store_DT0_fpr(DFPREG(rd));
3958 break;
3959 case 0x038: /* VIS I fmuld8sux16 */
3960 CHECK_FPU_FEATURE(dc, VIS1);
3961 gen_op_load_fpr_DT0(DFPREG(rs1));
3962 gen_op_load_fpr_DT1(DFPREG(rs2));
3963 gen_helper_fmuld8sux16();
3964 gen_op_store_DT0_fpr(DFPREG(rd));
3965 break;
3966 case 0x039: /* VIS I fmuld8ulx16 */
3967 CHECK_FPU_FEATURE(dc, VIS1);
3968 gen_op_load_fpr_DT0(DFPREG(rs1));
3969 gen_op_load_fpr_DT1(DFPREG(rs2));
3970 gen_helper_fmuld8ulx16();
3971 gen_op_store_DT0_fpr(DFPREG(rd));
3972 break;
3973 case 0x03a: /* VIS I fpack32 */
3974 case 0x03b: /* VIS I fpack16 */
3975 case 0x03d: /* VIS I fpackfix */
3976 case 0x03e: /* VIS I pdist */
3977 // XXX
3978 goto illegal_insn;
3979 case 0x048: /* VIS I faligndata */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 gen_op_load_fpr_DT0(DFPREG(rs1));
3982 gen_op_load_fpr_DT1(DFPREG(rs2));
3983 gen_helper_faligndata();
3984 gen_op_store_DT0_fpr(DFPREG(rd));
3985 break;
3986 case 0x04b: /* VIS I fpmerge */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_DT0(DFPREG(rs1));
3989 gen_op_load_fpr_DT1(DFPREG(rs2));
3990 gen_helper_fpmerge();
3991 gen_op_store_DT0_fpr(DFPREG(rd));
3992 break;
3993 case 0x04c: /* VIS II bshuffle */
3994 // XXX
3995 goto illegal_insn;
3996 case 0x04d: /* VIS I fexpand */
3997 CHECK_FPU_FEATURE(dc, VIS1);
3998 gen_op_load_fpr_DT0(DFPREG(rs1));
3999 gen_op_load_fpr_DT1(DFPREG(rs2));
4000 gen_helper_fexpand();
4001 gen_op_store_DT0_fpr(DFPREG(rd));
4002 break;
4003 case 0x050: /* VIS I fpadd16 */
4004 CHECK_FPU_FEATURE(dc, VIS1);
4005 gen_op_load_fpr_DT0(DFPREG(rs1));
4006 gen_op_load_fpr_DT1(DFPREG(rs2));
4007 gen_helper_fpadd16();
4008 gen_op_store_DT0_fpr(DFPREG(rd));
4009 break;
4010 case 0x051: /* VIS I fpadd16s */
4011 CHECK_FPU_FEATURE(dc, VIS1);
4012 gen_helper_fpadd16s(cpu_fpr[rd],
4013 cpu_fpr[rs1], cpu_fpr[rs2]);
4014 break;
4015 case 0x052: /* VIS I fpadd32 */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 gen_op_load_fpr_DT0(DFPREG(rs1));
4018 gen_op_load_fpr_DT1(DFPREG(rs2));
4019 gen_helper_fpadd32();
4020 gen_op_store_DT0_fpr(DFPREG(rd));
4021 break;
4022 case 0x053: /* VIS I fpadd32s */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 gen_helper_fpadd32s(cpu_fpr[rd],
4025 cpu_fpr[rs1], cpu_fpr[rs2]);
4026 break;
4027 case 0x054: /* VIS I fpsub16 */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 gen_op_load_fpr_DT0(DFPREG(rs1));
4030 gen_op_load_fpr_DT1(DFPREG(rs2));
4031 gen_helper_fpsub16();
4032 gen_op_store_DT0_fpr(DFPREG(rd));
4033 break;
4034 case 0x055: /* VIS I fpsub16s */
4035 CHECK_FPU_FEATURE(dc, VIS1);
4036 gen_helper_fpsub16s(cpu_fpr[rd],
4037 cpu_fpr[rs1], cpu_fpr[rs2]);
4038 break;
4039 case 0x056: /* VIS I fpsub32 */
4040 CHECK_FPU_FEATURE(dc, VIS1);
4041 gen_op_load_fpr_DT0(DFPREG(rs1));
4042 gen_op_load_fpr_DT1(DFPREG(rs2));
4043 gen_helper_fpsub32();
4044 gen_op_store_DT0_fpr(DFPREG(rd));
4045 break;
4046 case 0x057: /* VIS I fpsub32s */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 gen_helper_fpsub32s(cpu_fpr[rd],
4049 cpu_fpr[rs1], cpu_fpr[rs2]);
4050 break;
4051 case 0x060: /* VIS I fzero */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4054 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4055 break;
4056 case 0x061: /* VIS I fzeros */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4059 break;
4060 case 0x062: /* VIS I fnor */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4063 cpu_fpr[DFPREG(rs2)]);
4064 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4065 cpu_fpr[DFPREG(rs2) + 1]);
4066 break;
4067 case 0x063: /* VIS I fnors */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4070 break;
4071 case 0x064: /* VIS I fandnot2 */
4072 CHECK_FPU_FEATURE(dc, VIS1);
4073 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4074 cpu_fpr[DFPREG(rs2)]);
4075 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4076 cpu_fpr[DFPREG(rs1) + 1],
4077 cpu_fpr[DFPREG(rs2) + 1]);
4078 break;
4079 case 0x065: /* VIS I fandnot2s */
4080 CHECK_FPU_FEATURE(dc, VIS1);
4081 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4082 break;
4083 case 0x066: /* VIS I fnot2 */
4084 CHECK_FPU_FEATURE(dc, VIS1);
4085 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4086 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4087 cpu_fpr[DFPREG(rs2) + 1]);
4088 break;
4089 case 0x067: /* VIS I fnot2s */
4090 CHECK_FPU_FEATURE(dc, VIS1);
4091 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4092 break;
4093 case 0x068: /* VIS I fandnot1 */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4096 cpu_fpr[DFPREG(rs1)]);
4097 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4098 cpu_fpr[DFPREG(rs2) + 1],
4099 cpu_fpr[DFPREG(rs1) + 1]);
4100 break;
4101 case 0x069: /* VIS I fandnot1s */
4102 CHECK_FPU_FEATURE(dc, VIS1);
4103 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4104 break;
4105 case 0x06a: /* VIS I fnot1 */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4108 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4109 cpu_fpr[DFPREG(rs1) + 1]);
4110 break;
4111 case 0x06b: /* VIS I fnot1s */
4112 CHECK_FPU_FEATURE(dc, VIS1);
4113 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4114 break;
4115 case 0x06c: /* VIS I fxor */
4116 CHECK_FPU_FEATURE(dc, VIS1);
4117 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4118 cpu_fpr[DFPREG(rs2)]);
4119 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4120 cpu_fpr[DFPREG(rs1) + 1],
4121 cpu_fpr[DFPREG(rs2) + 1]);
4122 break;
4123 case 0x06d: /* VIS I fxors */
4124 CHECK_FPU_FEATURE(dc, VIS1);
4125 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4126 break;
4127 case 0x06e: /* VIS I fnand */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4130 cpu_fpr[DFPREG(rs2)]);
4131 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4132 cpu_fpr[DFPREG(rs2) + 1]);
4133 break;
4134 case 0x06f: /* VIS I fnands */
4135 CHECK_FPU_FEATURE(dc, VIS1);
4136 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4137 break;
4138 case 0x070: /* VIS I fand */
4139 CHECK_FPU_FEATURE(dc, VIS1);
4140 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4141 cpu_fpr[DFPREG(rs2)]);
4142 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4143 cpu_fpr[DFPREG(rs1) + 1],
4144 cpu_fpr[DFPREG(rs2) + 1]);
4145 break;
4146 case 0x071: /* VIS I fands */
4147 CHECK_FPU_FEATURE(dc, VIS1);
4148 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4149 break;
4150 case 0x072: /* VIS I fxnor */
4151 CHECK_FPU_FEATURE(dc, VIS1);
4152 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4153 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4154 cpu_fpr[DFPREG(rs1)]);
4155 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4156 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4157 cpu_fpr[DFPREG(rs1) + 1]);
4158 break;
4159 case 0x073: /* VIS I fxnors */
4160 CHECK_FPU_FEATURE(dc, VIS1);
4161 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4162 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4163 break;
4164 case 0x074: /* VIS I fsrc1 */
4165 CHECK_FPU_FEATURE(dc, VIS1);
4166 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4167 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4168 cpu_fpr[DFPREG(rs1) + 1]);
4169 break;
4170 case 0x075: /* VIS I fsrc1s */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4173 break;
4174 case 0x076: /* VIS I fornot2 */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4177 cpu_fpr[DFPREG(rs2)]);
4178 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4179 cpu_fpr[DFPREG(rs1) + 1],
4180 cpu_fpr[DFPREG(rs2) + 1]);
4181 break;
4182 case 0x077: /* VIS I fornot2s */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4185 break;
4186 case 0x078: /* VIS I fsrc2 */
4187 CHECK_FPU_FEATURE(dc, VIS1);
4188 gen_op_load_fpr_DT0(DFPREG(rs2));
4189 gen_op_store_DT0_fpr(DFPREG(rd));
4190 break;
4191 case 0x079: /* VIS I fsrc2s */
4192 CHECK_FPU_FEATURE(dc, VIS1);
4193 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4194 break;
4195 case 0x07a: /* VIS I fornot1 */
4196 CHECK_FPU_FEATURE(dc, VIS1);
4197 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4198 cpu_fpr[DFPREG(rs1)]);
4199 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4200 cpu_fpr[DFPREG(rs2) + 1],
4201 cpu_fpr[DFPREG(rs1) + 1]);
4202 break;
4203 case 0x07b: /* VIS I fornot1s */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4206 break;
4207 case 0x07c: /* VIS I for */
4208 CHECK_FPU_FEATURE(dc, VIS1);
4209 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4210 cpu_fpr[DFPREG(rs2)]);
4211 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4212 cpu_fpr[DFPREG(rs1) + 1],
4213 cpu_fpr[DFPREG(rs2) + 1]);
4214 break;
4215 case 0x07d: /* VIS I fors */
4216 CHECK_FPU_FEATURE(dc, VIS1);
4217 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4218 break;
4219 case 0x07e: /* VIS I fone */
4220 CHECK_FPU_FEATURE(dc, VIS1);
4221 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4222 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4223 break;
4224 case 0x07f: /* VIS I fones */
4225 CHECK_FPU_FEATURE(dc, VIS1);
4226 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4227 break;
4228 case 0x080: /* VIS I shutdown */
4229 case 0x081: /* VIS II siam */
4230 // XXX
4231 goto illegal_insn;
4232 default:
4233 goto illegal_insn;
4235 #else
4236 goto ncp_insn;
4237 #endif
4238 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4239 #ifdef TARGET_SPARC64
4240 goto illegal_insn;
4241 #else
4242 goto ncp_insn;
4243 #endif
4244 #ifdef TARGET_SPARC64
4245 } else if (xop == 0x39) { /* V9 return */
4246 TCGv_i32 r_const;
4248 save_state(dc, cpu_cond);
4249 cpu_src1 = get_src1(insn, cpu_src1);
4250 if (IS_IMM) { /* immediate */
4251 simm = GET_FIELDs(insn, 19, 31);
4252 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4253 } else { /* register */
4254 rs2 = GET_FIELD(insn, 27, 31);
4255 if (rs2) {
4256 gen_movl_reg_TN(rs2, cpu_src2);
4257 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4258 } else
4259 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4261 gen_helper_restore();
4262 gen_mov_pc_npc(dc, cpu_cond);
4263 r_const = tcg_const_i32(3);
4264 gen_helper_check_align(cpu_dst, r_const);
4265 tcg_temp_free_i32(r_const);
4266 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4267 dc->npc = DYNAMIC_PC;
4268 goto jmp_insn;
4269 #endif
4270 } else {
4271 cpu_src1 = get_src1(insn, cpu_src1);
4272 if (IS_IMM) { /* immediate */
4273 simm = GET_FIELDs(insn, 19, 31);
4274 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4275 } else { /* register */
4276 rs2 = GET_FIELD(insn, 27, 31);
4277 if (rs2) {
4278 gen_movl_reg_TN(rs2, cpu_src2);
4279 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4280 } else
4281 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4283 switch (xop) {
4284 case 0x38: /* jmpl */
4286 TCGv r_pc;
4287 TCGv_i32 r_const;
4289 r_pc = tcg_const_tl(dc->pc);
4290 gen_movl_TN_reg(rd, r_pc);
4291 tcg_temp_free(r_pc);
4292 gen_mov_pc_npc(dc, cpu_cond);
4293 r_const = tcg_const_i32(3);
4294 gen_helper_check_align(cpu_dst, r_const);
4295 tcg_temp_free_i32(r_const);
4296 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4297 dc->npc = DYNAMIC_PC;
4299 goto jmp_insn;
4300 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4301 case 0x39: /* rett, V9 return */
4303 TCGv_i32 r_const;
4305 if (!supervisor(dc))
4306 goto priv_insn;
4307 gen_mov_pc_npc(dc, cpu_cond);
4308 r_const = tcg_const_i32(3);
4309 gen_helper_check_align(cpu_dst, r_const);
4310 tcg_temp_free_i32(r_const);
4311 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4312 dc->npc = DYNAMIC_PC;
4313 gen_helper_rett();
4315 goto jmp_insn;
4316 #endif
4317 case 0x3b: /* flush */
4318 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4319 goto unimp_flush;
4320 gen_helper_flush(cpu_dst);
4321 break;
4322 case 0x3c: /* save */
4323 save_state(dc, cpu_cond);
4324 gen_helper_save();
4325 gen_movl_TN_reg(rd, cpu_dst);
4326 break;
4327 case 0x3d: /* restore */
4328 save_state(dc, cpu_cond);
4329 gen_helper_restore();
4330 gen_movl_TN_reg(rd, cpu_dst);
4331 break;
4332 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4333 case 0x3e: /* V9 done/retry */
4335 switch (rd) {
4336 case 0:
4337 if (!supervisor(dc))
4338 goto priv_insn;
4339 dc->npc = DYNAMIC_PC;
4340 dc->pc = DYNAMIC_PC;
4341 gen_helper_done();
4342 goto jmp_insn;
4343 case 1:
4344 if (!supervisor(dc))
4345 goto priv_insn;
4346 dc->npc = DYNAMIC_PC;
4347 dc->pc = DYNAMIC_PC;
4348 gen_helper_retry();
4349 goto jmp_insn;
4350 default:
4351 goto illegal_insn;
4354 break;
4355 #endif
4356 default:
4357 goto illegal_insn;
4360 break;
4362 break;
4363 case 3: /* load/store instructions */
4365 unsigned int xop = GET_FIELD(insn, 7, 12);
4367 cpu_src1 = get_src1(insn, cpu_src1);
4368 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4369 rs2 = GET_FIELD(insn, 27, 31);
4370 gen_movl_reg_TN(rs2, cpu_src2);
4371 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4372 } else if (IS_IMM) { /* immediate */
4373 simm = GET_FIELDs(insn, 19, 31);
4374 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4375 } else { /* register */
4376 rs2 = GET_FIELD(insn, 27, 31);
4377 if (rs2 != 0) {
4378 gen_movl_reg_TN(rs2, cpu_src2);
4379 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4380 } else
4381 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4383 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4384 (xop > 0x17 && xop <= 0x1d ) ||
4385 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4386 switch (xop) {
4387 case 0x0: /* ld, V9 lduw, load unsigned word */
4388 gen_address_mask(dc, cpu_addr);
4389 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4390 break;
4391 case 0x1: /* ldub, load unsigned byte */
4392 gen_address_mask(dc, cpu_addr);
4393 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4394 break;
4395 case 0x2: /* lduh, load unsigned halfword */
4396 gen_address_mask(dc, cpu_addr);
4397 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4398 break;
4399 case 0x3: /* ldd, load double word */
4400 if (rd & 1)
4401 goto illegal_insn;
4402 else {
4403 TCGv_i32 r_const;
4405 save_state(dc, cpu_cond);
4406 r_const = tcg_const_i32(7);
4407 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4408 tcg_temp_free_i32(r_const);
4409 gen_address_mask(dc, cpu_addr);
4410 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4411 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4412 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4413 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4414 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4415 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4416 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4418 break;
4419 case 0x9: /* ldsb, load signed byte */
4420 gen_address_mask(dc, cpu_addr);
4421 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4422 break;
4423 case 0xa: /* ldsh, load signed halfword */
4424 gen_address_mask(dc, cpu_addr);
4425 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4426 break;
4427 case 0xd: /* ldstub -- XXX: should be atomically */
4429 TCGv r_const;
4431 gen_address_mask(dc, cpu_addr);
4432 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4433 r_const = tcg_const_tl(0xff);
4434 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4435 tcg_temp_free(r_const);
4437 break;
4438 case 0x0f: /* swap, swap register with memory. Also
4439 atomically */
4440 CHECK_IU_FEATURE(dc, SWAP);
4441 gen_movl_reg_TN(rd, cpu_val);
4442 gen_address_mask(dc, cpu_addr);
4443 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4444 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4445 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4446 break;
4447 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4448 case 0x10: /* lda, V9 lduwa, load word alternate */
4449 #ifndef TARGET_SPARC64
4450 if (IS_IMM)
4451 goto illegal_insn;
4452 if (!supervisor(dc))
4453 goto priv_insn;
4454 #endif
4455 save_state(dc, cpu_cond);
4456 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4457 break;
4458 case 0x11: /* lduba, load unsigned byte alternate */
4459 #ifndef TARGET_SPARC64
4460 if (IS_IMM)
4461 goto illegal_insn;
4462 if (!supervisor(dc))
4463 goto priv_insn;
4464 #endif
4465 save_state(dc, cpu_cond);
4466 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4467 break;
4468 case 0x12: /* lduha, load unsigned halfword alternate */
4469 #ifndef TARGET_SPARC64
4470 if (IS_IMM)
4471 goto illegal_insn;
4472 if (!supervisor(dc))
4473 goto priv_insn;
4474 #endif
4475 save_state(dc, cpu_cond);
4476 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4477 break;
4478 case 0x13: /* ldda, load double word alternate */
4479 #ifndef TARGET_SPARC64
4480 if (IS_IMM)
4481 goto illegal_insn;
4482 if (!supervisor(dc))
4483 goto priv_insn;
4484 #endif
4485 if (rd & 1)
4486 goto illegal_insn;
4487 save_state(dc, cpu_cond);
4488 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4489 goto skip_move;
4490 case 0x19: /* ldsba, load signed byte alternate */
4491 #ifndef TARGET_SPARC64
4492 if (IS_IMM)
4493 goto illegal_insn;
4494 if (!supervisor(dc))
4495 goto priv_insn;
4496 #endif
4497 save_state(dc, cpu_cond);
4498 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4499 break;
4500 case 0x1a: /* ldsha, load signed halfword alternate */
4501 #ifndef TARGET_SPARC64
4502 if (IS_IMM)
4503 goto illegal_insn;
4504 if (!supervisor(dc))
4505 goto priv_insn;
4506 #endif
4507 save_state(dc, cpu_cond);
4508 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4509 break;
4510 case 0x1d: /* ldstuba -- XXX: should be atomically */
4511 #ifndef TARGET_SPARC64
4512 if (IS_IMM)
4513 goto illegal_insn;
4514 if (!supervisor(dc))
4515 goto priv_insn;
4516 #endif
4517 save_state(dc, cpu_cond);
4518 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4519 break;
4520 case 0x1f: /* swapa, swap reg with alt. memory. Also
4521 atomically */
4522 CHECK_IU_FEATURE(dc, SWAP);
4523 #ifndef TARGET_SPARC64
4524 if (IS_IMM)
4525 goto illegal_insn;
4526 if (!supervisor(dc))
4527 goto priv_insn;
4528 #endif
4529 save_state(dc, cpu_cond);
4530 gen_movl_reg_TN(rd, cpu_val);
4531 gen_swap_asi(cpu_val, cpu_addr, insn);
4532 break;
4534 #ifndef TARGET_SPARC64
4535 case 0x30: /* ldc */
4536 case 0x31: /* ldcsr */
4537 case 0x33: /* lddc */
4538 goto ncp_insn;
4539 #endif
4540 #endif
4541 #ifdef TARGET_SPARC64
4542 case 0x08: /* V9 ldsw */
4543 gen_address_mask(dc, cpu_addr);
4544 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4545 break;
4546 case 0x0b: /* V9 ldx */
4547 gen_address_mask(dc, cpu_addr);
4548 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4549 break;
4550 case 0x18: /* V9 ldswa */
4551 save_state(dc, cpu_cond);
4552 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4553 break;
4554 case 0x1b: /* V9 ldxa */
4555 save_state(dc, cpu_cond);
4556 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4557 break;
4558 case 0x2d: /* V9 prefetch, no effect */
4559 goto skip_move;
4560 case 0x30: /* V9 ldfa */
4561 save_state(dc, cpu_cond);
4562 gen_ldf_asi(cpu_addr, insn, 4, rd);
4563 goto skip_move;
4564 case 0x33: /* V9 lddfa */
4565 save_state(dc, cpu_cond);
4566 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4567 goto skip_move;
4568 case 0x3d: /* V9 prefetcha, no effect */
4569 goto skip_move;
4570 case 0x32: /* V9 ldqfa */
4571 CHECK_FPU_FEATURE(dc, FLOAT128);
4572 save_state(dc, cpu_cond);
4573 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4574 goto skip_move;
4575 #endif
4576 default:
4577 goto illegal_insn;
4579 gen_movl_TN_reg(rd, cpu_val);
4580 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4581 skip_move: ;
4582 #endif
4583 } else if (xop >= 0x20 && xop < 0x24) {
4584 if (gen_trap_ifnofpu(dc, cpu_cond))
4585 goto jmp_insn;
4586 save_state(dc, cpu_cond);
4587 switch (xop) {
4588 case 0x20: /* ldf, load fpreg */
4589 gen_address_mask(dc, cpu_addr);
4590 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4591 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4592 break;
4593 case 0x21: /* ldfsr, V9 ldxfsr */
4594 #ifdef TARGET_SPARC64
4595 gen_address_mask(dc, cpu_addr);
4596 if (rd == 1) {
4597 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4598 gen_helper_ldxfsr(cpu_tmp64);
4599 } else
4600 #else
4602 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4603 gen_helper_ldfsr(cpu_tmp32);
4605 #endif
4606 break;
4607 case 0x22: /* ldqf, load quad fpreg */
4609 TCGv_i32 r_const;
4611 CHECK_FPU_FEATURE(dc, FLOAT128);
4612 r_const = tcg_const_i32(dc->mem_idx);
4613 gen_helper_ldqf(cpu_addr, r_const);
4614 tcg_temp_free_i32(r_const);
4615 gen_op_store_QT0_fpr(QFPREG(rd));
4617 break;
4618 case 0x23: /* lddf, load double fpreg */
4620 TCGv_i32 r_const;
4622 r_const = tcg_const_i32(dc->mem_idx);
4623 gen_helper_lddf(cpu_addr, r_const);
4624 tcg_temp_free_i32(r_const);
4625 gen_op_store_DT0_fpr(DFPREG(rd));
4627 break;
4628 default:
4629 goto illegal_insn;
4631 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4632 xop == 0xe || xop == 0x1e) {
4633 gen_movl_reg_TN(rd, cpu_val);
4634 switch (xop) {
4635 case 0x4: /* st, store word */
4636 gen_address_mask(dc, cpu_addr);
4637 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4638 break;
4639 case 0x5: /* stb, store byte */
4640 gen_address_mask(dc, cpu_addr);
4641 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4642 break;
4643 case 0x6: /* sth, store halfword */
4644 gen_address_mask(dc, cpu_addr);
4645 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4646 break;
4647 case 0x7: /* std, store double word */
4648 if (rd & 1)
4649 goto illegal_insn;
4650 else {
4651 TCGv_i32 r_const;
4653 save_state(dc, cpu_cond);
4654 gen_address_mask(dc, cpu_addr);
4655 r_const = tcg_const_i32(7);
4656 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4657 tcg_temp_free_i32(r_const);
4658 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4659 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4660 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4662 break;
4663 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4664 case 0x14: /* sta, V9 stwa, store word alternate */
4665 #ifndef TARGET_SPARC64
4666 if (IS_IMM)
4667 goto illegal_insn;
4668 if (!supervisor(dc))
4669 goto priv_insn;
4670 #endif
4671 save_state(dc, cpu_cond);
4672 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4673 break;
4674 case 0x15: /* stba, store byte alternate */
4675 #ifndef TARGET_SPARC64
4676 if (IS_IMM)
4677 goto illegal_insn;
4678 if (!supervisor(dc))
4679 goto priv_insn;
4680 #endif
4681 save_state(dc, cpu_cond);
4682 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4683 break;
4684 case 0x16: /* stha, store halfword alternate */
4685 #ifndef TARGET_SPARC64
4686 if (IS_IMM)
4687 goto illegal_insn;
4688 if (!supervisor(dc))
4689 goto priv_insn;
4690 #endif
4691 save_state(dc, cpu_cond);
4692 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4693 break;
4694 case 0x17: /* stda, store double word alternate */
4695 #ifndef TARGET_SPARC64
4696 if (IS_IMM)
4697 goto illegal_insn;
4698 if (!supervisor(dc))
4699 goto priv_insn;
4700 #endif
4701 if (rd & 1)
4702 goto illegal_insn;
4703 else {
4704 save_state(dc, cpu_cond);
4705 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4707 break;
4708 #endif
4709 #ifdef TARGET_SPARC64
4710 case 0x0e: /* V9 stx */
4711 gen_address_mask(dc, cpu_addr);
4712 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4713 break;
4714 case 0x1e: /* V9 stxa */
4715 save_state(dc, cpu_cond);
4716 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4717 break;
4718 #endif
4719 default:
4720 goto illegal_insn;
4722 } else if (xop > 0x23 && xop < 0x28) {
4723 if (gen_trap_ifnofpu(dc, cpu_cond))
4724 goto jmp_insn;
4725 save_state(dc, cpu_cond);
4726 switch (xop) {
4727 case 0x24: /* stf, store fpreg */
4728 gen_address_mask(dc, cpu_addr);
4729 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4730 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4731 break;
4732 case 0x25: /* stfsr, V9 stxfsr */
4733 #ifdef TARGET_SPARC64
4734 gen_address_mask(dc, cpu_addr);
4735 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4736 if (rd == 1)
4737 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4738 else
4739 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4740 #else
4741 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4742 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4743 #endif
4744 break;
4745 case 0x26:
4746 #ifdef TARGET_SPARC64
4747 /* V9 stqf, store quad fpreg */
4749 TCGv_i32 r_const;
4751 CHECK_FPU_FEATURE(dc, FLOAT128);
4752 gen_op_load_fpr_QT0(QFPREG(rd));
4753 r_const = tcg_const_i32(dc->mem_idx);
4754 gen_helper_stqf(cpu_addr, r_const);
4755 tcg_temp_free_i32(r_const);
4757 break;
4758 #else /* !TARGET_SPARC64 */
4759 /* stdfq, store floating point queue */
4760 #if defined(CONFIG_USER_ONLY)
4761 goto illegal_insn;
4762 #else
4763 if (!supervisor(dc))
4764 goto priv_insn;
4765 if (gen_trap_ifnofpu(dc, cpu_cond))
4766 goto jmp_insn;
4767 goto nfq_insn;
4768 #endif
4769 #endif
4770 case 0x27: /* stdf, store double fpreg */
4772 TCGv_i32 r_const;
4774 gen_op_load_fpr_DT0(DFPREG(rd));
4775 r_const = tcg_const_i32(dc->mem_idx);
4776 gen_helper_stdf(cpu_addr, r_const);
4777 tcg_temp_free_i32(r_const);
4779 break;
4780 default:
4781 goto illegal_insn;
4783 } else if (xop > 0x33 && xop < 0x3f) {
4784 save_state(dc, cpu_cond);
4785 switch (xop) {
4786 #ifdef TARGET_SPARC64
4787 case 0x34: /* V9 stfa */
4788 gen_stf_asi(cpu_addr, insn, 4, rd);
4789 break;
4790 case 0x36: /* V9 stqfa */
4792 TCGv_i32 r_const;
4794 CHECK_FPU_FEATURE(dc, FLOAT128);
4795 r_const = tcg_const_i32(7);
4796 gen_helper_check_align(cpu_addr, r_const);
4797 tcg_temp_free_i32(r_const);
4798 gen_op_load_fpr_QT0(QFPREG(rd));
4799 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4801 break;
4802 case 0x37: /* V9 stdfa */
4803 gen_op_load_fpr_DT0(DFPREG(rd));
4804 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4805 break;
4806 case 0x3c: /* V9 casa */
4807 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4808 gen_movl_TN_reg(rd, cpu_val);
4809 break;
4810 case 0x3e: /* V9 casxa */
4811 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4812 gen_movl_TN_reg(rd, cpu_val);
4813 break;
4814 #else
4815 case 0x34: /* stc */
4816 case 0x35: /* stcsr */
4817 case 0x36: /* stdcq */
4818 case 0x37: /* stdc */
4819 goto ncp_insn;
4820 #endif
4821 default:
4822 goto illegal_insn;
4824 } else
4825 goto illegal_insn;
4827 break;
4829 /* default case for non jump instructions */
4830 if (dc->npc == DYNAMIC_PC) {
4831 dc->pc = DYNAMIC_PC;
4832 gen_op_next_insn();
4833 } else if (dc->npc == JUMP_PC) {
4834 /* we can do a static jump */
4835 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4836 dc->is_br = 1;
4837 } else {
4838 dc->pc = dc->npc;
4839 dc->npc = dc->npc + 4;
4841 jmp_insn:
4842 return;
4843 illegal_insn:
4845 TCGv_i32 r_const;
4847 save_state(dc, cpu_cond);
4848 r_const = tcg_const_i32(TT_ILL_INSN);
4849 gen_helper_raise_exception(r_const);
4850 tcg_temp_free_i32(r_const);
4851 dc->is_br = 1;
4853 return;
4854 unimp_flush:
4856 TCGv_i32 r_const;
4858 save_state(dc, cpu_cond);
4859 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4860 gen_helper_raise_exception(r_const);
4861 tcg_temp_free_i32(r_const);
4862 dc->is_br = 1;
4864 return;
4865 #if !defined(CONFIG_USER_ONLY)
4866 priv_insn:
4868 TCGv_i32 r_const;
4870 save_state(dc, cpu_cond);
4871 r_const = tcg_const_i32(TT_PRIV_INSN);
4872 gen_helper_raise_exception(r_const);
4873 tcg_temp_free_i32(r_const);
4874 dc->is_br = 1;
4876 return;
4877 #endif
4878 nfpu_insn:
4879 save_state(dc, cpu_cond);
4880 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4881 dc->is_br = 1;
4882 return;
4883 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4884 nfq_insn:
4885 save_state(dc, cpu_cond);
4886 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4887 dc->is_br = 1;
4888 return;
4889 #endif
4890 #ifndef TARGET_SPARC64
4891 ncp_insn:
4893 TCGv r_const;
4895 save_state(dc, cpu_cond);
4896 r_const = tcg_const_i32(TT_NCP_INSN);
4897 gen_helper_raise_exception(r_const);
4898 tcg_temp_free(r_const);
4899 dc->is_br = 1;
4901 return;
4902 #endif
4905 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4906 int spc, CPUSPARCState *env)
4908 target_ulong pc_start, last_pc;
4909 uint16_t *gen_opc_end;
4910 DisasContext dc1, *dc = &dc1;
4911 CPUBreakpoint *bp;
4912 int j, lj = -1;
4913 int num_insns;
4914 int max_insns;
4916 memset(dc, 0, sizeof(DisasContext));
4917 dc->tb = tb;
4918 pc_start = tb->pc;
4919 dc->pc = pc_start;
4920 last_pc = dc->pc;
4921 dc->npc = (target_ulong) tb->cs_base;
4922 dc->mem_idx = cpu_mmu_index(env);
4923 dc->def = env->def;
4924 if ((dc->def->features & CPU_FEATURE_FLOAT))
4925 dc->fpu_enabled = cpu_fpu_enabled(env);
4926 else
4927 dc->fpu_enabled = 0;
4928 #ifdef TARGET_SPARC64
4929 dc->address_mask_32bit = env->pstate & PS_AM;
4930 #endif
4931 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4933 cpu_tmp0 = tcg_temp_new();
4934 cpu_tmp32 = tcg_temp_new_i32();
4935 cpu_tmp64 = tcg_temp_new_i64();
4937 cpu_dst = tcg_temp_local_new();
4939 // loads and stores
4940 cpu_val = tcg_temp_local_new();
4941 cpu_addr = tcg_temp_local_new();
4943 num_insns = 0;
4944 max_insns = tb->cflags & CF_COUNT_MASK;
4945 if (max_insns == 0)
4946 max_insns = CF_COUNT_MASK;
4947 gen_icount_start();
4948 do {
4949 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4950 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4951 if (bp->pc == dc->pc) {
4952 if (dc->pc != pc_start)
4953 save_state(dc, cpu_cond);
4954 gen_helper_debug();
4955 tcg_gen_exit_tb(0);
4956 dc->is_br = 1;
4957 goto exit_gen_loop;
4961 if (spc) {
4962 qemu_log("Search PC...\n");
4963 j = gen_opc_ptr - gen_opc_buf;
4964 if (lj < j) {
4965 lj++;
4966 while (lj < j)
4967 gen_opc_instr_start[lj++] = 0;
4968 gen_opc_pc[lj] = dc->pc;
4969 gen_opc_npc[lj] = dc->npc;
4970 gen_opc_instr_start[lj] = 1;
4971 gen_opc_icount[lj] = num_insns;
4974 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4975 gen_io_start();
4976 last_pc = dc->pc;
4977 disas_sparc_insn(dc);
4978 num_insns++;
4980 if (dc->is_br)
4981 break;
4982 /* if the next PC is different, we abort now */
4983 if (dc->pc != (last_pc + 4))
4984 break;
4985 /* if we reach a page boundary, we stop generation so that the
4986 PC of a TT_TFAULT exception is always in the right page */
4987 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4988 break;
4989 /* if single step mode, we generate only one instruction and
4990 generate an exception */
4991 if (env->singlestep_enabled || singlestep) {
4992 tcg_gen_movi_tl(cpu_pc, dc->pc);
4993 tcg_gen_exit_tb(0);
4994 break;
4996 } while ((gen_opc_ptr < gen_opc_end) &&
4997 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4998 num_insns < max_insns);
5000 exit_gen_loop:
5001 tcg_temp_free(cpu_addr);
5002 tcg_temp_free(cpu_val);
5003 tcg_temp_free(cpu_dst);
5004 tcg_temp_free_i64(cpu_tmp64);
5005 tcg_temp_free_i32(cpu_tmp32);
5006 tcg_temp_free(cpu_tmp0);
5007 if (tb->cflags & CF_LAST_IO)
5008 gen_io_end();
5009 if (!dc->is_br) {
5010 if (dc->pc != DYNAMIC_PC &&
5011 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5012 /* static PC and NPC: we can use direct chaining */
5013 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5014 } else {
5015 if (dc->pc != DYNAMIC_PC)
5016 tcg_gen_movi_tl(cpu_pc, dc->pc);
5017 save_npc(dc, cpu_cond);
5018 tcg_gen_exit_tb(0);
5021 gen_icount_end(tb, num_insns);
5022 *gen_opc_ptr = INDEX_op_end;
5023 if (spc) {
5024 j = gen_opc_ptr - gen_opc_buf;
5025 lj++;
5026 while (lj <= j)
5027 gen_opc_instr_start[lj++] = 0;
5028 #if 0
5029 log_page_dump();
5030 #endif
5031 gen_opc_jump_pc[0] = dc->jump_pc[0];
5032 gen_opc_jump_pc[1] = dc->jump_pc[1];
5033 } else {
5034 tb->size = last_pc + 4 - pc_start;
5035 tb->icount = num_insns;
5037 #ifdef DEBUG_DISAS
5038 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5039 qemu_log("--------------\n");
5040 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5041 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5042 qemu_log("\n");
5044 #endif
5047 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5049 gen_intermediate_code_internal(tb, 0, env);
5052 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5054 gen_intermediate_code_internal(tb, 1, env);
5057 void gen_intermediate_code_init(CPUSPARCState *env)
5059 unsigned int i;
5060 static int inited;
5061 static const char * const gregnames[8] = {
5062 NULL, // g0 not used
5063 "g1",
5064 "g2",
5065 "g3",
5066 "g4",
5067 "g5",
5068 "g6",
5069 "g7",
5071 static const char * const fregnames[64] = {
5072 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5073 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5074 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5075 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5076 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5077 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5078 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5079 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5082 /* init various static tables */
5083 if (!inited) {
5084 inited = 1;
5086 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5087 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5088 offsetof(CPUState, regwptr),
5089 "regwptr");
5090 #ifdef TARGET_SPARC64
5091 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5092 "xcc");
5093 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5094 "asi");
5095 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5096 "fprs");
5097 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5098 "gsr");
5099 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5100 offsetof(CPUState, tick_cmpr),
5101 "tick_cmpr");
5102 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5103 offsetof(CPUState, stick_cmpr),
5104 "stick_cmpr");
5105 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5106 offsetof(CPUState, hstick_cmpr),
5107 "hstick_cmpr");
5108 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5109 "hintp");
5110 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5111 "htba");
5112 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5113 "hver");
5114 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5115 offsetof(CPUState, ssr), "ssr");
5116 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5117 offsetof(CPUState, version), "ver");
5118 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5119 offsetof(CPUState, softint),
5120 "softint");
5121 #else
5122 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5123 "wim");
5124 #endif
5125 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5126 "cond");
5127 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5128 "cc_src");
5129 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5130 offsetof(CPUState, cc_src2),
5131 "cc_src2");
5132 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5133 "cc_dst");
5134 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5135 "psr");
5136 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5137 "fsr");
5138 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5139 "pc");
5140 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5141 "npc");
5142 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5143 #ifndef CONFIG_USER_ONLY
5144 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5145 "tbr");
5146 #endif
5147 for (i = 1; i < 8; i++)
5148 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5149 offsetof(CPUState, gregs[i]),
5150 gregnames[i]);
5151 for (i = 0; i < TARGET_FPREGS; i++)
5152 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5153 offsetof(CPUState, fpr[i]),
5154 fregnames[i]);
5156 /* register helpers */
5158 #define GEN_HELPER 2
5159 #include "helper.h"
5163 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5164 unsigned long searched_pc, int pc_pos, void *puc)
5166 target_ulong npc;
5167 env->pc = gen_opc_pc[pc_pos];
5168 npc = gen_opc_npc[pc_pos];
5169 if (npc == 1) {
5170 /* dynamic NPC: already stored */
5171 } else if (npc == 2) {
5172 target_ulong t2 = (target_ulong)(unsigned long)puc;
5173 /* jump PC: use T2 and the jump targets of the translation */
5174 if (t2)
5175 env->npc = gen_opc_jump_pc[0];
5176 else
5177 env->npc = gen_opc_jump_pc[1];
5178 } else {
5179 env->npc = npc;