Convert basic float32 ops to TCG
[qemu/mini2440.git] / target-sparc / translate.c
blob937c7084c23d36ff6b36853d60ed44396b1d8028
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define DEBUG_DISAS
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
96 static int sign_extend(int x, int len)
98 len = 32 - len;
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_FT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft0));
110 static void gen_op_load_fpr_FT1(unsigned int src)
112 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft1));
115 static void gen_op_store_FT0_fpr(unsigned int dst)
117 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, ft0));
120 static void gen_op_load_fpr_DT0(unsigned int src)
122 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.upper));
124 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
125 offsetof(CPU_DoubleU, l.lower));
128 static void gen_op_load_fpr_DT1(unsigned int src)
130 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.upper));
132 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
133 offsetof(CPU_DoubleU, l.lower));
136 static void gen_op_store_DT0_fpr(unsigned int dst)
138 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.upper));
140 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
141 offsetof(CPU_DoubleU, l.lower));
144 static void gen_op_load_fpr_QT0(unsigned int src)
146 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upmost));
148 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.upper));
150 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lower));
152 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
153 offsetof(CPU_QuadU, l.lowest));
156 static void gen_op_load_fpr_QT1(unsigned int src)
158 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upmost));
160 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.upper));
162 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lower));
164 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
165 offsetof(CPU_QuadU, l.lowest));
168 static void gen_op_store_QT0_fpr(unsigned int dst)
170 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upmost));
172 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.upper));
174 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lower));
176 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
177 offsetof(CPU_QuadU, l.lowest));
180 /* moves */
181 #ifdef CONFIG_USER_ONLY
182 #define supervisor(dc) 0
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) 0
185 #endif
186 #else
187 #define supervisor(dc) (dc->mem_idx >= 1)
188 #ifdef TARGET_SPARC64
189 #define hypervisor(dc) (dc->mem_idx == 2)
190 #else
191 #endif
192 #endif
194 #ifdef TARGET_SPARC64
195 #ifndef TARGET_ABI32
196 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #else
198 #define AM_CHECK(dc) (1)
199 #endif
200 #endif
202 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
204 #ifdef TARGET_SPARC64
205 if (AM_CHECK(dc))
206 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
207 #endif
210 static inline void gen_movl_reg_TN(int reg, TCGv tn)
212 if (reg == 0)
213 tcg_gen_movi_tl(tn, 0);
214 else if (reg < 8)
215 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
216 else {
217 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
221 static inline void gen_movl_TN_reg(int reg, TCGv tn)
223 if (reg == 0)
224 return;
225 else if (reg < 8)
226 tcg_gen_mov_tl(cpu_gregs[reg], tn);
227 else {
228 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
232 static inline void gen_goto_tb(DisasContext *s, int tb_num,
233 target_ulong pc, target_ulong npc)
235 TranslationBlock *tb;
237 tb = s->tb;
238 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
282 static inline void gen_cc_clear_icc(void)
284 tcg_gen_movi_i32(cpu_psr, 0);
287 #ifdef TARGET_SPARC64
288 static inline void gen_cc_clear_xcc(void)
290 tcg_gen_movi_i32(cpu_xcc, 0);
292 #endif
294 /* old op:
295 if (!T0)
296 env->psr |= PSR_ZERO;
297 if ((int32_t) T0 < 0)
298 env->psr |= PSR_NEG;
300 static inline void gen_cc_NZ_icc(TCGv dst)
302 TCGv r_temp;
303 int l1, l2;
305 l1 = gen_new_label();
306 l2 = gen_new_label();
307 r_temp = tcg_temp_new(TCG_TYPE_TL);
308 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
309 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
310 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
311 gen_set_label(l1);
312 tcg_gen_ext_i32_tl(r_temp, dst);
313 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
314 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
315 gen_set_label(l2);
316 tcg_temp_free(r_temp);
319 #ifdef TARGET_SPARC64
320 static inline void gen_cc_NZ_xcc(TCGv dst)
322 int l1, l2;
324 l1 = gen_new_label();
325 l2 = gen_new_label();
326 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
327 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
328 gen_set_label(l1);
329 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
330 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
331 gen_set_label(l2);
333 #endif
335 /* old op:
336 if (T0 < src1)
337 env->psr |= PSR_CARRY;
339 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
341 TCGv r_temp1, r_temp2;
342 int l1;
344 l1 = gen_new_label();
345 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
346 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
347 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
348 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
349 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
350 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
351 gen_set_label(l1);
352 tcg_temp_free(r_temp1);
353 tcg_temp_free(r_temp2);
356 #ifdef TARGET_SPARC64
357 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
359 int l1;
361 l1 = gen_new_label();
362 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
363 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 gen_set_label(l1);
366 #endif
368 /* old op:
369 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
370 env->psr |= PSR_OVF;
372 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
374 TCGv r_temp;
376 r_temp = tcg_temp_new(TCG_TYPE_TL);
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xori_tl(r_temp, r_temp, -1);
379 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
380 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
381 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
382 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
383 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
384 tcg_temp_free(r_temp);
385 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
388 #ifdef TARGET_SPARC64
389 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
391 TCGv r_temp;
393 r_temp = tcg_temp_new(TCG_TYPE_TL);
394 tcg_gen_xor_tl(r_temp, src1, src2);
395 tcg_gen_xori_tl(r_temp, r_temp, -1);
396 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
397 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
398 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
399 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
400 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
401 tcg_temp_free(r_temp);
402 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
404 #endif
406 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
408 TCGv r_temp, r_const;
409 int l1;
411 l1 = gen_new_label();
413 r_temp = tcg_temp_new(TCG_TYPE_TL);
414 tcg_gen_xor_tl(r_temp, src1, src2);
415 tcg_gen_xori_tl(r_temp, r_temp, -1);
416 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
417 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
418 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
419 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
420 r_const = tcg_const_i32(TT_TOVF);
421 tcg_gen_helper_0_1(raise_exception, r_const);
422 tcg_temp_free(r_const);
423 gen_set_label(l1);
424 tcg_temp_free(r_temp);
427 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
429 int l1;
431 l1 = gen_new_label();
432 tcg_gen_or_tl(cpu_tmp0, src1, src2);
433 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
434 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
435 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
436 gen_set_label(l1);
439 static inline void gen_tag_tv(TCGv src1, TCGv src2)
441 int l1;
442 TCGv r_const;
444 l1 = gen_new_label();
445 tcg_gen_or_tl(cpu_tmp0, src1, src2);
446 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
447 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
448 r_const = tcg_const_i32(TT_TOVF);
449 tcg_gen_helper_0_1(raise_exception, r_const);
450 tcg_temp_free(r_const);
451 gen_set_label(l1);
454 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
456 tcg_gen_mov_tl(cpu_cc_src, src1);
457 tcg_gen_mov_tl(cpu_cc_src2, src2);
458 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
459 gen_cc_clear_icc();
460 gen_cc_NZ_icc(cpu_cc_dst);
461 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
462 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 #ifdef TARGET_SPARC64
464 gen_cc_clear_xcc();
465 gen_cc_NZ_xcc(cpu_cc_dst);
466 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
467 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 #endif
469 tcg_gen_mov_tl(dst, cpu_cc_dst);
472 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 gen_mov_reg_C(cpu_tmp0, cpu_psr);
477 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
478 gen_cc_clear_icc();
479 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
480 #ifdef TARGET_SPARC64
481 gen_cc_clear_xcc();
482 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
483 #endif
484 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
485 gen_cc_NZ_icc(cpu_cc_dst);
486 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
487 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 #ifdef TARGET_SPARC64
489 gen_cc_NZ_xcc(cpu_cc_dst);
490 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
491 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492 #endif
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
496 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
498 tcg_gen_mov_tl(cpu_cc_src, src1);
499 tcg_gen_mov_tl(cpu_cc_src2, src2);
500 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
501 gen_cc_clear_icc();
502 gen_cc_NZ_icc(cpu_cc_dst);
503 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
504 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
505 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
506 #ifdef TARGET_SPARC64
507 gen_cc_clear_xcc();
508 gen_cc_NZ_xcc(cpu_cc_dst);
509 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
510 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511 #endif
512 tcg_gen_mov_tl(dst, cpu_cc_dst);
515 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
517 tcg_gen_mov_tl(cpu_cc_src, src1);
518 tcg_gen_mov_tl(cpu_cc_src2, src2);
519 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
520 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
521 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 gen_cc_clear_icc();
523 gen_cc_NZ_icc(cpu_cc_dst);
524 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
525 #ifdef TARGET_SPARC64
526 gen_cc_clear_xcc();
527 gen_cc_NZ_xcc(cpu_cc_dst);
528 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
529 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
530 #endif
531 tcg_gen_mov_tl(dst, cpu_cc_dst);
534 /* old op:
535 if (src1 < T1)
536 env->psr |= PSR_CARRY;
538 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
540 TCGv r_temp1, r_temp2;
541 int l1;
543 l1 = gen_new_label();
544 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
545 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
546 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
547 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
548 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
549 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
550 gen_set_label(l1);
551 tcg_temp_free(r_temp1);
552 tcg_temp_free(r_temp2);
555 #ifdef TARGET_SPARC64
556 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
558 int l1;
560 l1 = gen_new_label();
561 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
562 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
563 gen_set_label(l1);
565 #endif
567 /* old op:
568 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
569 env->psr |= PSR_OVF;
571 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
573 TCGv r_temp;
575 r_temp = tcg_temp_new(TCG_TYPE_TL);
576 tcg_gen_xor_tl(r_temp, src1, src2);
577 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
578 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
579 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
580 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
581 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
582 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
583 tcg_temp_free(r_temp);
586 #ifdef TARGET_SPARC64
587 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
589 TCGv r_temp;
591 r_temp = tcg_temp_new(TCG_TYPE_TL);
592 tcg_gen_xor_tl(r_temp, src1, src2);
593 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
594 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
595 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
596 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
597 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
598 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
599 tcg_temp_free(r_temp);
601 #endif
603 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
605 TCGv r_temp, r_const;
606 int l1;
608 l1 = gen_new_label();
610 r_temp = tcg_temp_new(TCG_TYPE_TL);
611 tcg_gen_xor_tl(r_temp, src1, src2);
612 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
613 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
614 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
615 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
616 r_const = tcg_const_i32(TT_TOVF);
617 tcg_gen_helper_0_1(raise_exception, r_const);
618 tcg_temp_free(r_const);
619 gen_set_label(l1);
620 tcg_temp_free(r_temp);
623 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
625 tcg_gen_mov_tl(cpu_cc_src, src1);
626 tcg_gen_mov_tl(cpu_cc_src2, src2);
627 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
628 gen_cc_clear_icc();
629 gen_cc_NZ_icc(cpu_cc_dst);
630 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
631 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
632 #ifdef TARGET_SPARC64
633 gen_cc_clear_xcc();
634 gen_cc_NZ_xcc(cpu_cc_dst);
635 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
636 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
637 #endif
638 tcg_gen_mov_tl(dst, cpu_cc_dst);
641 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
643 tcg_gen_mov_tl(cpu_cc_src, src1);
644 tcg_gen_mov_tl(cpu_cc_src2, src2);
645 gen_mov_reg_C(cpu_tmp0, cpu_psr);
646 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
647 gen_cc_clear_icc();
648 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
649 #ifdef TARGET_SPARC64
650 gen_cc_clear_xcc();
651 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
652 #endif
653 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
654 gen_cc_NZ_icc(cpu_cc_dst);
655 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
656 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 #ifdef TARGET_SPARC64
658 gen_cc_NZ_xcc(cpu_cc_dst);
659 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
660 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661 #endif
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
665 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
667 tcg_gen_mov_tl(cpu_cc_src, src1);
668 tcg_gen_mov_tl(cpu_cc_src2, src2);
669 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
670 gen_cc_clear_icc();
671 gen_cc_NZ_icc(cpu_cc_dst);
672 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
673 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
674 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
675 #ifdef TARGET_SPARC64
676 gen_cc_clear_xcc();
677 gen_cc_NZ_xcc(cpu_cc_dst);
678 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
679 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
680 #endif
681 tcg_gen_mov_tl(dst, cpu_cc_dst);
684 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
686 tcg_gen_mov_tl(cpu_cc_src, src1);
687 tcg_gen_mov_tl(cpu_cc_src2, src2);
688 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
689 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
690 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
691 gen_cc_clear_icc();
692 gen_cc_NZ_icc(cpu_cc_dst);
693 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
694 #ifdef TARGET_SPARC64
695 gen_cc_clear_xcc();
696 gen_cc_NZ_xcc(cpu_cc_dst);
697 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
698 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
699 #endif
700 tcg_gen_mov_tl(dst, cpu_cc_dst);
703 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
705 TCGv r_temp;
706 int l1;
708 l1 = gen_new_label();
709 r_temp = tcg_temp_new(TCG_TYPE_TL);
711 /* old op:
712 if (!(env->y & 1))
713 T1 = 0;
715 tcg_gen_mov_tl(cpu_cc_src, src1);
716 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
717 tcg_gen_mov_tl(cpu_cc_src2, src2);
718 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
719 tcg_gen_movi_tl(cpu_cc_src2, 0);
720 gen_set_label(l1);
722 // b2 = T0 & 1;
723 // env->y = (b2 << 31) | (env->y >> 1);
724 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
725 tcg_gen_shli_tl(r_temp, r_temp, 31);
726 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
727 tcg_gen_or_tl(cpu_y, cpu_tmp0, r_temp);
729 // b1 = N ^ V;
730 gen_mov_reg_N(cpu_tmp0, cpu_psr);
731 gen_mov_reg_V(r_temp, cpu_psr);
732 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
733 tcg_temp_free(r_temp);
735 // T0 = (b1 << 31) | (T0 >> 1);
736 // src1 = T0;
737 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
738 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
739 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
744 gen_cc_clear_icc();
745 gen_cc_NZ_icc(cpu_cc_dst);
746 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
748 tcg_gen_mov_tl(dst, cpu_cc_dst);
751 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 TCGv r_temp, r_temp2;
755 r_temp = tcg_temp_new(TCG_TYPE_I64);
756 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
758 tcg_gen_extu_i32_i64(r_temp, src2);
759 tcg_gen_extu_i32_i64(r_temp2, src1);
760 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
762 tcg_gen_shri_i64(r_temp, r_temp2, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
764 tcg_temp_free(r_temp);
765 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst, r_temp2);
768 #else
769 tcg_gen_trunc_i64_tl(dst, r_temp2);
770 #endif
771 tcg_temp_free(r_temp2);
774 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
776 TCGv r_temp, r_temp2;
778 r_temp = tcg_temp_new(TCG_TYPE_I64);
779 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
781 tcg_gen_ext_i32_i64(r_temp, src2);
782 tcg_gen_ext_i32_i64(r_temp2, src1);
783 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
785 tcg_gen_shri_i64(r_temp, r_temp2, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
787 tcg_temp_free(r_temp);
788 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst, r_temp2);
791 #else
792 tcg_gen_trunc_i64_tl(dst, r_temp2);
793 #endif
794 tcg_temp_free(r_temp2);
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
800 TCGv r_const;
801 int l1;
803 l1 = gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
805 r_const = tcg_const_i32(TT_DIV_ZERO);
806 tcg_gen_helper_0_1(raise_exception, r_const);
807 tcg_temp_free(r_const);
808 gen_set_label(l1);
811 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
813 int l1, l2;
815 l1 = gen_new_label();
816 l2 = gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src, src1);
818 tcg_gen_mov_tl(cpu_cc_src2, src2);
819 gen_trap_ifdivzero_tl(cpu_cc_src2);
820 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
821 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
822 tcg_gen_movi_i64(dst, INT64_MIN);
823 tcg_gen_br(l2);
824 gen_set_label(l1);
825 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
826 gen_set_label(l2);
828 #endif
830 static inline void gen_op_div_cc(TCGv dst)
832 int l1;
834 tcg_gen_mov_tl(cpu_cc_dst, dst);
835 gen_cc_clear_icc();
836 gen_cc_NZ_icc(cpu_cc_dst);
837 l1 = gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
839 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
840 gen_set_label(l1);
843 static inline void gen_op_logic_cc(TCGv dst)
845 tcg_gen_mov_tl(cpu_cc_dst, dst);
847 gen_cc_clear_icc();
848 gen_cc_NZ_icc(cpu_cc_dst);
849 #ifdef TARGET_SPARC64
850 gen_cc_clear_xcc();
851 gen_cc_NZ_xcc(cpu_cc_dst);
852 #endif
855 // 1
856 static inline void gen_op_eval_ba(TCGv dst)
858 tcg_gen_movi_tl(dst, 1);
861 // Z
862 static inline void gen_op_eval_be(TCGv dst, TCGv src)
864 gen_mov_reg_Z(dst, src);
867 // Z | (N ^ V)
868 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
870 gen_mov_reg_N(cpu_tmp0, src);
871 gen_mov_reg_V(dst, src);
872 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 gen_mov_reg_Z(cpu_tmp0, src);
874 tcg_gen_or_tl(dst, dst, cpu_tmp0);
877 // N ^ V
878 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
880 gen_mov_reg_V(cpu_tmp0, src);
881 gen_mov_reg_N(dst, src);
882 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
885 // C | Z
886 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
888 gen_mov_reg_Z(cpu_tmp0, src);
889 gen_mov_reg_C(dst, src);
890 tcg_gen_or_tl(dst, dst, cpu_tmp0);
893 // C
894 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
896 gen_mov_reg_C(dst, src);
899 // V
900 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
902 gen_mov_reg_V(dst, src);
905 // 0
906 static inline void gen_op_eval_bn(TCGv dst)
908 tcg_gen_movi_tl(dst, 0);
911 // N
912 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
914 gen_mov_reg_N(dst, src);
917 // !Z
918 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
920 gen_mov_reg_Z(dst, src);
921 tcg_gen_xori_tl(dst, dst, 0x1);
924 // !(Z | (N ^ V))
925 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
927 gen_mov_reg_N(cpu_tmp0, src);
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930 gen_mov_reg_Z(cpu_tmp0, src);
931 tcg_gen_or_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
935 // !(N ^ V)
936 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
938 gen_mov_reg_V(cpu_tmp0, src);
939 gen_mov_reg_N(dst, src);
940 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
941 tcg_gen_xori_tl(dst, dst, 0x1);
944 // !(C | Z)
945 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
947 gen_mov_reg_Z(cpu_tmp0, src);
948 gen_mov_reg_C(dst, src);
949 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 tcg_gen_xori_tl(dst, dst, 0x1);
953 // !C
954 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
956 gen_mov_reg_C(dst, src);
957 tcg_gen_xori_tl(dst, dst, 0x1);
960 // !N
961 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
963 gen_mov_reg_N(dst, src);
964 tcg_gen_xori_tl(dst, dst, 0x1);
967 // !V
968 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
970 gen_mov_reg_V(dst, src);
971 tcg_gen_xori_tl(dst, dst, 0x1);
975 FPSR bit field FCC1 | FCC0:
979 3 unordered
981 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
982 unsigned int fcc_offset)
984 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
985 tcg_gen_andi_tl(reg, reg, 0x1);
988 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
989 unsigned int fcc_offset)
991 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
992 tcg_gen_andi_tl(reg, reg, 0x1);
995 // !0: FCC0 | FCC1
996 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1006 unsigned int fcc_offset)
1008 gen_mov_reg_FCC0(dst, src, fcc_offset);
1009 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1010 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1013 // 1 or 3: FCC0
1014 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1020 // 1: FCC0 & !FCC1
1021 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1022 unsigned int fcc_offset)
1024 gen_mov_reg_FCC0(dst, src, fcc_offset);
1025 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1026 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1027 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1030 // 2 or 3: FCC1
1031 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1032 unsigned int fcc_offset)
1034 gen_mov_reg_FCC1(dst, src, fcc_offset);
1037 // 2: !FCC0 & FCC1
1038 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1039 unsigned int fcc_offset)
1041 gen_mov_reg_FCC0(dst, src, fcc_offset);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1044 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1047 // 3: FCC0 & FCC1
1048 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1049 unsigned int fcc_offset)
1051 gen_mov_reg_FCC0(dst, src, fcc_offset);
1052 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1053 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1058 unsigned int fcc_offset)
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1062 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1063 tcg_gen_xori_tl(dst, dst, 0x1);
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1073 tcg_gen_xori_tl(dst, dst, 0x1);
1076 // 0 or 2: !FCC0
1077 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1089 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1090 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1091 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1092 tcg_gen_xori_tl(dst, dst, 0x1);
1095 // 0 or 1: !FCC1
1096 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1097 unsigned int fcc_offset)
1099 gen_mov_reg_FCC1(dst, src, fcc_offset);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 tcg_gen_xori_tl(dst, dst, 0x1);
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1116 unsigned int fcc_offset)
1118 gen_mov_reg_FCC0(dst, src, fcc_offset);
1119 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1120 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1121 tcg_gen_xori_tl(dst, dst, 0x1);
1124 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1125 target_ulong pc2, TCGv r_cond)
1127 int l1;
1129 l1 = gen_new_label();
1131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1133 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1135 gen_set_label(l1);
1136 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1139 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1140 target_ulong pc2, TCGv r_cond)
1142 int l1;
1144 l1 = gen_new_label();
1146 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1148 gen_goto_tb(dc, 0, pc2, pc1);
1150 gen_set_label(l1);
1151 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1154 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1155 TCGv r_cond)
1157 int l1, l2;
1159 l1 = gen_new_label();
1160 l2 = gen_new_label();
1162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1164 tcg_gen_movi_tl(cpu_npc, npc1);
1165 tcg_gen_br(l2);
1167 gen_set_label(l1);
1168 tcg_gen_movi_tl(cpu_npc, npc2);
1169 gen_set_label(l2);
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext *dc, TCGv cond)
1176 if (dc->npc == JUMP_PC) {
1177 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1178 dc->npc = DYNAMIC_PC;
1182 static inline void save_npc(DisasContext *dc, TCGv cond)
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186 dc->npc = DYNAMIC_PC;
1187 } else if (dc->npc != DYNAMIC_PC) {
1188 tcg_gen_movi_tl(cpu_npc, dc->npc);
1192 static inline void save_state(DisasContext *dc, TCGv cond)
1194 tcg_gen_movi_tl(cpu_pc, dc->pc);
1195 save_npc(dc, cond);
1198 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1200 if (dc->npc == JUMP_PC) {
1201 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1202 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1203 dc->pc = DYNAMIC_PC;
1204 } else if (dc->npc == DYNAMIC_PC) {
1205 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1206 dc->pc = DYNAMIC_PC;
1207 } else {
1208 dc->pc = dc->npc;
1212 static inline void gen_op_next_insn(void)
1214 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1215 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1218 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1220 TCGv r_src;
1222 #ifdef TARGET_SPARC64
1223 if (cc)
1224 r_src = cpu_xcc;
1225 else
1226 r_src = cpu_psr;
1227 #else
1228 r_src = cpu_psr;
1229 #endif
1230 switch (cond) {
1231 case 0x0:
1232 gen_op_eval_bn(r_dst);
1233 break;
1234 case 0x1:
1235 gen_op_eval_be(r_dst, r_src);
1236 break;
1237 case 0x2:
1238 gen_op_eval_ble(r_dst, r_src);
1239 break;
1240 case 0x3:
1241 gen_op_eval_bl(r_dst, r_src);
1242 break;
1243 case 0x4:
1244 gen_op_eval_bleu(r_dst, r_src);
1245 break;
1246 case 0x5:
1247 gen_op_eval_bcs(r_dst, r_src);
1248 break;
1249 case 0x6:
1250 gen_op_eval_bneg(r_dst, r_src);
1251 break;
1252 case 0x7:
1253 gen_op_eval_bvs(r_dst, r_src);
1254 break;
1255 case 0x8:
1256 gen_op_eval_ba(r_dst);
1257 break;
1258 case 0x9:
1259 gen_op_eval_bne(r_dst, r_src);
1260 break;
1261 case 0xa:
1262 gen_op_eval_bg(r_dst, r_src);
1263 break;
1264 case 0xb:
1265 gen_op_eval_bge(r_dst, r_src);
1266 break;
1267 case 0xc:
1268 gen_op_eval_bgu(r_dst, r_src);
1269 break;
1270 case 0xd:
1271 gen_op_eval_bcc(r_dst, r_src);
1272 break;
1273 case 0xe:
1274 gen_op_eval_bpos(r_dst, r_src);
1275 break;
1276 case 0xf:
1277 gen_op_eval_bvc(r_dst, r_src);
1278 break;
1282 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1284 unsigned int offset;
1286 switch (cc) {
1287 default:
1288 case 0x0:
1289 offset = 0;
1290 break;
1291 case 0x1:
1292 offset = 32 - 10;
1293 break;
1294 case 0x2:
1295 offset = 34 - 10;
1296 break;
1297 case 0x3:
1298 offset = 36 - 10;
1299 break;
1302 switch (cond) {
1303 case 0x0:
1304 gen_op_eval_bn(r_dst);
1305 break;
1306 case 0x1:
1307 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0x2:
1310 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0x3:
1313 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0x4:
1316 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0x5:
1319 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0x6:
1322 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0x7:
1325 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1326 break;
1327 case 0x8:
1328 gen_op_eval_ba(r_dst);
1329 break;
1330 case 0x9:
1331 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1332 break;
1333 case 0xa:
1334 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1335 break;
1336 case 0xb:
1337 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1338 break;
1339 case 0xc:
1340 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1341 break;
1342 case 0xd:
1343 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1344 break;
1345 case 0xe:
1346 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1347 break;
1348 case 0xf:
1349 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1350 break;
1354 #ifdef TARGET_SPARC64
1355 // Inverted logic
1356 static const int gen_tcg_cond_reg[8] = {
1358 TCG_COND_NE,
1359 TCG_COND_GT,
1360 TCG_COND_GE,
1362 TCG_COND_EQ,
1363 TCG_COND_LE,
1364 TCG_COND_LT,
1367 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1369 int l1;
1371 l1 = gen_new_label();
1372 tcg_gen_movi_tl(r_dst, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1374 tcg_gen_movi_tl(r_dst, 1);
1375 gen_set_label(l1);
1377 #endif
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1381 TCGv r_cond)
1383 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1386 if (cond == 0x0) {
1387 /* unconditional not taken */
1388 if (a) {
1389 dc->pc = dc->npc + 4;
1390 dc->npc = dc->pc + 4;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->npc = dc->pc + 4;
1395 } else if (cond == 0x8) {
1396 /* unconditional taken */
1397 if (a) {
1398 dc->pc = target;
1399 dc->npc = dc->pc + 4;
1400 } else {
1401 dc->pc = dc->npc;
1402 dc->npc = target;
1404 } else {
1405 flush_cond(dc, r_cond);
1406 gen_cond(r_cond, cc, cond);
1407 if (a) {
1408 gen_branch_a(dc, target, dc->npc, r_cond);
1409 dc->is_br = 1;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->jump_pc[0] = target;
1413 dc->jump_pc[1] = dc->npc + 4;
1414 dc->npc = JUMP_PC;
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1421 TCGv r_cond)
1423 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1424 target_ulong target = dc->pc + offset;
1426 if (cond == 0x0) {
1427 /* unconditional not taken */
1428 if (a) {
1429 dc->pc = dc->npc + 4;
1430 dc->npc = dc->pc + 4;
1431 } else {
1432 dc->pc = dc->npc;
1433 dc->npc = dc->pc + 4;
1435 } else if (cond == 0x8) {
1436 /* unconditional taken */
1437 if (a) {
1438 dc->pc = target;
1439 dc->npc = dc->pc + 4;
1440 } else {
1441 dc->pc = dc->npc;
1442 dc->npc = target;
1444 } else {
1445 flush_cond(dc, r_cond);
1446 gen_fcond(r_cond, cc, cond);
1447 if (a) {
1448 gen_branch_a(dc, target, dc->npc, r_cond);
1449 dc->is_br = 1;
1450 } else {
1451 dc->pc = dc->npc;
1452 dc->jump_pc[0] = target;
1453 dc->jump_pc[1] = dc->npc + 4;
1454 dc->npc = JUMP_PC;
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1462 TCGv r_cond, TCGv r_reg)
1464 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1467 flush_cond(dc, r_cond);
1468 gen_cond_reg(r_cond, cond, r_reg);
1469 if (a) {
1470 gen_branch_a(dc, target, dc->npc, r_cond);
1471 dc->is_br = 1;
1472 } else {
1473 dc->pc = dc->npc;
1474 dc->jump_pc[0] = target;
1475 dc->jump_pc[1] = dc->npc + 4;
1476 dc->npc = JUMP_PC;
1480 static GenOpFunc * const gen_fcmpd[4] = {
1481 helper_fcmpd,
1482 helper_fcmpd_fcc1,
1483 helper_fcmpd_fcc2,
1484 helper_fcmpd_fcc3,
1487 static GenOpFunc * const gen_fcmpq[4] = {
1488 helper_fcmpq,
1489 helper_fcmpq_fcc1,
1490 helper_fcmpq_fcc2,
1491 helper_fcmpq_fcc3,
1494 static GenOpFunc * const gen_fcmped[4] = {
1495 helper_fcmped,
1496 helper_fcmped_fcc1,
1497 helper_fcmped_fcc2,
1498 helper_fcmped_fcc3,
1501 static GenOpFunc * const gen_fcmpeq[4] = {
1502 helper_fcmpeq,
1503 helper_fcmpeq_fcc1,
1504 helper_fcmpeq_fcc2,
1505 helper_fcmpeq_fcc3,
1508 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1510 switch (fccno) {
1511 case 0:
1512 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1513 break;
1514 case 1:
1515 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1516 break;
1517 case 2:
1518 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1519 break;
1520 case 3:
1521 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1522 break;
1526 static inline void gen_op_fcmpd(int fccno)
1528 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1531 static inline void gen_op_fcmpq(int fccno)
1533 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1536 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1538 switch (fccno) {
1539 case 0:
1540 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1541 break;
1542 case 1:
1543 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1544 break;
1545 case 2:
1546 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1547 break;
1548 case 3:
1549 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1550 break;
1554 static inline void gen_op_fcmped(int fccno)
1556 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1559 static inline void gen_op_fcmpeq(int fccno)
1561 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1564 #else
1566 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1568 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1571 static inline void gen_op_fcmpd(int fccno)
1573 tcg_gen_helper_0_0(helper_fcmpd);
1576 static inline void gen_op_fcmpq(int fccno)
1578 tcg_gen_helper_0_0(helper_fcmpq);
1581 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1583 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1586 static inline void gen_op_fcmped(int fccno)
1588 tcg_gen_helper_0_0(helper_fcmped);
1591 static inline void gen_op_fcmpeq(int fccno)
1593 tcg_gen_helper_0_0(helper_fcmpeq);
1595 #endif
1597 static inline void gen_op_fpexception_im(int fsr_flags)
1599 TCGv r_const;
1601 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1602 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1603 r_const = tcg_const_i32(TT_FP_EXCP);
1604 tcg_gen_helper_0_1(raise_exception, r_const);
1605 tcg_temp_free(r_const);
1608 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1610 #if !defined(CONFIG_USER_ONLY)
1611 if (!dc->fpu_enabled) {
1612 TCGv r_const;
1614 save_state(dc, r_cond);
1615 r_const = tcg_const_i32(TT_NFPU_INSN);
1616 tcg_gen_helper_0_1(raise_exception, r_const);
1617 tcg_temp_free(r_const);
1618 dc->is_br = 1;
1619 return 1;
1621 #endif
1622 return 0;
1625 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1627 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1630 static inline void gen_clear_float_exceptions(void)
1632 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1635 /* asi moves */
1636 #ifdef TARGET_SPARC64
1637 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1639 int asi;
1640 TCGv r_asi;
1642 if (IS_IMM) {
1643 r_asi = tcg_temp_new(TCG_TYPE_I32);
1644 tcg_gen_mov_i32(r_asi, cpu_asi);
1645 } else {
1646 asi = GET_FIELD(insn, 19, 26);
1647 r_asi = tcg_const_i32(asi);
1649 return r_asi;
1652 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1653 int sign)
1655 TCGv r_asi, r_size, r_sign;
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 r_sign = tcg_const_i32(sign);
1660 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1661 tcg_temp_free(r_sign);
1662 tcg_temp_free(r_size);
1663 tcg_temp_free(r_asi);
1666 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1668 TCGv r_asi, r_size;
1670 r_asi = gen_get_asi(insn, addr);
1671 r_size = tcg_const_i32(size);
1672 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1692 TCGv r_asi, r_size, r_rd;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(size);
1696 r_rd = tcg_const_i32(rd);
1697 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1698 tcg_temp_free(r_rd);
1699 tcg_temp_free(r_size);
1700 tcg_temp_free(r_asi);
1703 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1705 TCGv r_asi, r_size, r_sign;
1707 r_asi = gen_get_asi(insn, addr);
1708 r_size = tcg_const_i32(4);
1709 r_sign = tcg_const_i32(0);
1710 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1711 tcg_temp_free(r_sign);
1712 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1713 tcg_temp_free(r_size);
1714 tcg_temp_free(r_asi);
1715 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1718 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1720 TCGv r_asi, r_rd;
1722 r_asi = gen_get_asi(insn, addr);
1723 r_rd = tcg_const_i32(rd);
1724 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1725 tcg_temp_free(r_rd);
1726 tcg_temp_free(r_asi);
1729 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1731 TCGv r_temp, r_asi, r_size;
1733 r_temp = tcg_temp_new(TCG_TYPE_TL);
1734 gen_movl_reg_TN(rd + 1, r_temp);
1735 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1736 r_temp);
1737 tcg_temp_free(r_temp);
1738 r_asi = gen_get_asi(insn, addr);
1739 r_size = tcg_const_i32(8);
1740 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1741 tcg_temp_free(r_size);
1742 tcg_temp_free(r_asi);
1745 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1746 int rd)
1748 TCGv r_val1, r_asi;
1750 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1751 gen_movl_reg_TN(rd, r_val1);
1752 r_asi = gen_get_asi(insn, addr);
1753 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1754 tcg_temp_free(r_asi);
1755 tcg_temp_free(r_val1);
1758 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1759 int rd)
1761 TCGv r_asi;
1763 gen_movl_reg_TN(rd, cpu_tmp64);
1764 r_asi = gen_get_asi(insn, addr);
1765 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1766 tcg_temp_free(r_asi);
1769 #elif !defined(CONFIG_USER_ONLY)
1771 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1772 int sign)
1774 TCGv r_asi, r_size, r_sign;
1776 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1777 r_size = tcg_const_i32(size);
1778 r_sign = tcg_const_i32(sign);
1779 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1780 tcg_temp_free(r_sign);
1781 tcg_temp_free(r_size);
1782 tcg_temp_free(r_asi);
1783 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1786 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1788 TCGv r_asi, r_size;
1790 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1791 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1792 r_size = tcg_const_i32(size);
1793 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1794 tcg_temp_free(r_size);
1795 tcg_temp_free(r_asi);
1798 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1800 TCGv r_asi, r_size, r_sign;
1802 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1803 r_size = tcg_const_i32(4);
1804 r_sign = tcg_const_i32(0);
1805 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1806 tcg_temp_free(r_sign);
1807 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1808 tcg_temp_free(r_size);
1809 tcg_temp_free(r_asi);
1810 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1813 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1815 TCGv r_asi, r_size, r_sign;
1817 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1818 r_size = tcg_const_i32(8);
1819 r_sign = tcg_const_i32(0);
1820 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1821 tcg_temp_free(r_sign);
1822 tcg_temp_free(r_size);
1823 tcg_temp_free(r_asi);
1824 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1825 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1826 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1827 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1828 gen_movl_TN_reg(rd, hi);
1831 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1833 TCGv r_temp, r_asi, r_size;
1835 r_temp = tcg_temp_new(TCG_TYPE_TL);
1836 gen_movl_reg_TN(rd + 1, r_temp);
1837 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1838 tcg_temp_free(r_temp);
1839 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1840 r_size = tcg_const_i32(8);
1841 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1842 tcg_temp_free(r_size);
1843 tcg_temp_free(r_asi);
1845 #endif
1847 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1848 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1850 TCGv r_val, r_asi, r_size;
1852 gen_ld_asi(dst, addr, insn, 1, 0);
1854 r_val = tcg_const_i64(0xffULL);
1855 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1856 r_size = tcg_const_i32(1);
1857 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1858 tcg_temp_free(r_size);
1859 tcg_temp_free(r_asi);
1860 tcg_temp_free(r_val);
1862 #endif
1864 static inline TCGv get_src1(unsigned int insn, TCGv def)
1866 TCGv r_rs1 = def;
1867 unsigned int rs1;
1869 rs1 = GET_FIELD(insn, 13, 17);
1870 if (rs1 == 0)
1871 r_rs1 = tcg_const_tl(0); // XXX how to free?
1872 else if (rs1 < 8)
1873 r_rs1 = cpu_gregs[rs1];
1874 else
1875 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1876 return r_rs1;
1879 static inline TCGv get_src2(unsigned int insn, TCGv def)
1881 TCGv r_rs2 = def;
1882 unsigned int rs2;
1884 if (IS_IMM) { /* immediate */
1885 rs2 = GET_FIELDs(insn, 19, 31);
1886 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1887 } else { /* register */
1888 rs2 = GET_FIELD(insn, 27, 31);
1889 if (rs2 == 0)
1890 r_rs2 = tcg_const_tl(0); // XXX how to free?
1891 else if (rs2 < 8)
1892 r_rs2 = cpu_gregs[rs2];
1893 else
1894 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1896 return r_rs2;
1899 #define CHECK_IU_FEATURE(dc, FEATURE) \
1900 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1901 goto illegal_insn;
1902 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1903 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1904 goto nfpu_insn;
1906 /* before an instruction, dc->pc must be static */
1907 static void disas_sparc_insn(DisasContext * dc)
1909 unsigned int insn, opc, rs1, rs2, rd;
1911 if (unlikely(loglevel & CPU_LOG_TB_OP))
1912 tcg_gen_debug_insn_start(dc->pc);
1913 insn = ldl_code(dc->pc);
1914 opc = GET_FIELD(insn, 0, 1);
1916 rd = GET_FIELD(insn, 2, 6);
1918 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1919 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1921 switch (opc) {
1922 case 0: /* branches/sethi */
1924 unsigned int xop = GET_FIELD(insn, 7, 9);
1925 int32_t target;
1926 switch (xop) {
1927 #ifdef TARGET_SPARC64
1928 case 0x1: /* V9 BPcc */
1930 int cc;
1932 target = GET_FIELD_SP(insn, 0, 18);
1933 target = sign_extend(target, 18);
1934 target <<= 2;
1935 cc = GET_FIELD_SP(insn, 20, 21);
1936 if (cc == 0)
1937 do_branch(dc, target, insn, 0, cpu_cond);
1938 else if (cc == 2)
1939 do_branch(dc, target, insn, 1, cpu_cond);
1940 else
1941 goto illegal_insn;
1942 goto jmp_insn;
1944 case 0x3: /* V9 BPr */
1946 target = GET_FIELD_SP(insn, 0, 13) |
1947 (GET_FIELD_SP(insn, 20, 21) << 14);
1948 target = sign_extend(target, 16);
1949 target <<= 2;
1950 cpu_src1 = get_src1(insn, cpu_src1);
1951 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1952 goto jmp_insn;
1954 case 0x5: /* V9 FBPcc */
1956 int cc = GET_FIELD_SP(insn, 20, 21);
1957 if (gen_trap_ifnofpu(dc, cpu_cond))
1958 goto jmp_insn;
1959 target = GET_FIELD_SP(insn, 0, 18);
1960 target = sign_extend(target, 19);
1961 target <<= 2;
1962 do_fbranch(dc, target, insn, cc, cpu_cond);
1963 goto jmp_insn;
1965 #else
1966 case 0x7: /* CBN+x */
1968 goto ncp_insn;
1970 #endif
1971 case 0x2: /* BN+x */
1973 target = GET_FIELD(insn, 10, 31);
1974 target = sign_extend(target, 22);
1975 target <<= 2;
1976 do_branch(dc, target, insn, 0, cpu_cond);
1977 goto jmp_insn;
1979 case 0x6: /* FBN+x */
1981 if (gen_trap_ifnofpu(dc, cpu_cond))
1982 goto jmp_insn;
1983 target = GET_FIELD(insn, 10, 31);
1984 target = sign_extend(target, 22);
1985 target <<= 2;
1986 do_fbranch(dc, target, insn, 0, cpu_cond);
1987 goto jmp_insn;
1989 case 0x4: /* SETHI */
1990 if (rd) { // nop
1991 uint32_t value = GET_FIELD(insn, 10, 31);
1992 TCGv r_const;
1994 r_const = tcg_const_tl(value << 10);
1995 gen_movl_TN_reg(rd, r_const);
1996 tcg_temp_free(r_const);
1998 break;
1999 case 0x0: /* UNIMPL */
2000 default:
2001 goto illegal_insn;
2003 break;
2005 break;
2006 case 1:
2007 /*CALL*/ {
2008 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2009 TCGv r_const;
2011 r_const = tcg_const_tl(dc->pc);
2012 gen_movl_TN_reg(15, r_const);
2013 tcg_temp_free(r_const);
2014 target += dc->pc;
2015 gen_mov_pc_npc(dc, cpu_cond);
2016 dc->npc = target;
2018 goto jmp_insn;
2019 case 2: /* FPU & Logical Operations */
2021 unsigned int xop = GET_FIELD(insn, 7, 12);
2022 if (xop == 0x3a) { /* generate trap */
2023 int cond;
2025 cpu_src1 = get_src1(insn, cpu_src1);
2026 if (IS_IMM) {
2027 rs2 = GET_FIELD(insn, 25, 31);
2028 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2029 } else {
2030 rs2 = GET_FIELD(insn, 27, 31);
2031 if (rs2 != 0) {
2032 gen_movl_reg_TN(rs2, cpu_src2);
2033 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2034 } else
2035 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2037 cond = GET_FIELD(insn, 3, 6);
2038 if (cond == 0x8) {
2039 save_state(dc, cpu_cond);
2040 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2041 } else if (cond != 0) {
2042 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2043 #ifdef TARGET_SPARC64
2044 /* V9 icc/xcc */
2045 int cc = GET_FIELD_SP(insn, 11, 12);
2047 save_state(dc, cpu_cond);
2048 if (cc == 0)
2049 gen_cond(r_cond, 0, cond);
2050 else if (cc == 2)
2051 gen_cond(r_cond, 1, cond);
2052 else
2053 goto illegal_insn;
2054 #else
2055 save_state(dc, cpu_cond);
2056 gen_cond(r_cond, 0, cond);
2057 #endif
2058 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2059 tcg_temp_free(r_cond);
2061 gen_op_next_insn();
2062 tcg_gen_exit_tb(0);
2063 dc->is_br = 1;
2064 goto jmp_insn;
2065 } else if (xop == 0x28) {
2066 rs1 = GET_FIELD(insn, 13, 17);
2067 switch(rs1) {
2068 case 0: /* rdy */
2069 #ifndef TARGET_SPARC64
2070 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2071 manual, rdy on the microSPARC
2072 II */
2073 case 0x0f: /* stbar in the SPARCv8 manual,
2074 rdy on the microSPARC II */
2075 case 0x10 ... 0x1f: /* implementation-dependent in the
2076 SPARCv8 manual, rdy on the
2077 microSPARC II */
2078 #endif
2079 gen_movl_TN_reg(rd, cpu_y);
2080 break;
2081 #ifdef TARGET_SPARC64
2082 case 0x2: /* V9 rdccr */
2083 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2084 gen_movl_TN_reg(rd, cpu_dst);
2085 break;
2086 case 0x3: /* V9 rdasi */
2087 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2088 gen_movl_TN_reg(rd, cpu_dst);
2089 break;
2090 case 0x4: /* V9 rdtick */
2092 TCGv r_tickptr;
2094 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2095 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2096 offsetof(CPUState, tick));
2097 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2098 r_tickptr);
2099 tcg_temp_free(r_tickptr);
2100 gen_movl_TN_reg(rd, cpu_dst);
2102 break;
2103 case 0x5: /* V9 rdpc */
2105 TCGv r_const;
2107 r_const = tcg_const_tl(dc->pc);
2108 gen_movl_TN_reg(rd, r_const);
2109 tcg_temp_free(r_const);
2111 break;
2112 case 0x6: /* V9 rdfprs */
2113 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2114 gen_movl_TN_reg(rd, cpu_dst);
2115 break;
2116 case 0xf: /* V9 membar */
2117 break; /* no effect */
2118 case 0x13: /* Graphics Status */
2119 if (gen_trap_ifnofpu(dc, cpu_cond))
2120 goto jmp_insn;
2121 gen_movl_TN_reg(rd, cpu_gsr);
2122 break;
2123 case 0x17: /* Tick compare */
2124 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2125 break;
2126 case 0x18: /* System tick */
2128 TCGv r_tickptr;
2130 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2131 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2132 offsetof(CPUState, stick));
2133 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2134 r_tickptr);
2135 tcg_temp_free(r_tickptr);
2136 gen_movl_TN_reg(rd, cpu_dst);
2138 break;
2139 case 0x19: /* System tick compare */
2140 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2141 break;
2142 case 0x10: /* Performance Control */
2143 case 0x11: /* Performance Instrumentation Counter */
2144 case 0x12: /* Dispatch Control */
2145 case 0x14: /* Softint set, WO */
2146 case 0x15: /* Softint clear, WO */
2147 case 0x16: /* Softint write */
2148 #endif
2149 default:
2150 goto illegal_insn;
2152 #if !defined(CONFIG_USER_ONLY)
2153 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2154 #ifndef TARGET_SPARC64
2155 if (!supervisor(dc))
2156 goto priv_insn;
2157 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2158 #else
2159 CHECK_IU_FEATURE(dc, HYPV);
2160 if (!hypervisor(dc))
2161 goto priv_insn;
2162 rs1 = GET_FIELD(insn, 13, 17);
2163 switch (rs1) {
2164 case 0: // hpstate
2165 // gen_op_rdhpstate();
2166 break;
2167 case 1: // htstate
2168 // gen_op_rdhtstate();
2169 break;
2170 case 3: // hintp
2171 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2172 break;
2173 case 5: // htba
2174 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2175 break;
2176 case 6: // hver
2177 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2178 break;
2179 case 31: // hstick_cmpr
2180 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2181 break;
2182 default:
2183 goto illegal_insn;
2185 #endif
2186 gen_movl_TN_reg(rd, cpu_dst);
2187 break;
2188 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2189 if (!supervisor(dc))
2190 goto priv_insn;
2191 #ifdef TARGET_SPARC64
2192 rs1 = GET_FIELD(insn, 13, 17);
2193 switch (rs1) {
2194 case 0: // tpc
2196 TCGv r_tsptr;
2198 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2199 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2200 offsetof(CPUState, tsptr));
2201 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2202 offsetof(trap_state, tpc));
2203 tcg_temp_free(r_tsptr);
2205 break;
2206 case 1: // tnpc
2208 TCGv r_tsptr;
2210 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2211 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2212 offsetof(CPUState, tsptr));
2213 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2214 offsetof(trap_state, tnpc));
2215 tcg_temp_free(r_tsptr);
2217 break;
2218 case 2: // tstate
2220 TCGv r_tsptr;
2222 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2223 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2224 offsetof(CPUState, tsptr));
2225 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2226 offsetof(trap_state, tstate));
2227 tcg_temp_free(r_tsptr);
2229 break;
2230 case 3: // tt
2232 TCGv r_tsptr;
2234 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2235 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2236 offsetof(CPUState, tsptr));
2237 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2238 offsetof(trap_state, tt));
2239 tcg_temp_free(r_tsptr);
2241 break;
2242 case 4: // tick
2244 TCGv r_tickptr;
2246 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2247 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2248 offsetof(CPUState, tick));
2249 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2250 r_tickptr);
2251 gen_movl_TN_reg(rd, cpu_tmp0);
2252 tcg_temp_free(r_tickptr);
2254 break;
2255 case 5: // tba
2256 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2257 break;
2258 case 6: // pstate
2259 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2260 offsetof(CPUSPARCState, pstate));
2261 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2262 break;
2263 case 7: // tl
2264 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2265 offsetof(CPUSPARCState, tl));
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267 break;
2268 case 8: // pil
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, psrpil));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2272 break;
2273 case 9: // cwp
2274 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2275 break;
2276 case 10: // cansave
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, cansave));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2280 break;
2281 case 11: // canrestore
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, canrestore));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2285 break;
2286 case 12: // cleanwin
2287 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2288 offsetof(CPUSPARCState, cleanwin));
2289 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2290 break;
2291 case 13: // otherwin
2292 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2293 offsetof(CPUSPARCState, otherwin));
2294 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2295 break;
2296 case 14: // wstate
2297 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2298 offsetof(CPUSPARCState, wstate));
2299 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2300 break;
2301 case 16: // UA2005 gl
2302 CHECK_IU_FEATURE(dc, GL);
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, gl));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306 break;
2307 case 26: // UA2005 strand status
2308 CHECK_IU_FEATURE(dc, HYPV);
2309 if (!hypervisor(dc))
2310 goto priv_insn;
2311 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2312 break;
2313 case 31: // ver
2314 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2315 break;
2316 case 15: // fq
2317 default:
2318 goto illegal_insn;
2320 #else
2321 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2322 #endif
2323 gen_movl_TN_reg(rd, cpu_tmp0);
2324 break;
2325 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2326 #ifdef TARGET_SPARC64
2327 save_state(dc, cpu_cond);
2328 tcg_gen_helper_0_0(helper_flushw);
2329 #else
2330 if (!supervisor(dc))
2331 goto priv_insn;
2332 gen_movl_TN_reg(rd, cpu_tbr);
2333 #endif
2334 break;
2335 #endif
2336 } else if (xop == 0x34) { /* FPU Operations */
2337 if (gen_trap_ifnofpu(dc, cpu_cond))
2338 goto jmp_insn;
2339 gen_op_clear_ieee_excp_and_FTT();
2340 rs1 = GET_FIELD(insn, 13, 17);
2341 rs2 = GET_FIELD(insn, 27, 31);
2342 xop = GET_FIELD(insn, 18, 26);
2343 switch (xop) {
2344 case 0x1: /* fmovs */
2345 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2346 break;
2347 case 0x5: /* fnegs */
2348 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2349 cpu_fpr[rs2]);
2350 break;
2351 case 0x9: /* fabss */
2352 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2353 cpu_fpr[rs2]);
2354 break;
2355 case 0x29: /* fsqrts */
2356 CHECK_FPU_FEATURE(dc, FSQRT);
2357 gen_clear_float_exceptions();
2358 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2359 cpu_fpr[rs2]);
2360 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc, FSQRT);
2365 gen_op_load_fpr_DT1(DFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 tcg_gen_helper_0_0(helper_fsqrtd);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2369 gen_op_store_DT0_fpr(DFPREG(rd));
2370 break;
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc, FLOAT128);
2373 gen_op_load_fpr_QT1(QFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_fsqrtq);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2377 gen_op_store_QT0_fpr(QFPREG(rd));
2378 break;
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2382 cpu_fpr[rs1], cpu_fpr[rs2]);
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2385 break;
2386 case 0x42:
2387 gen_op_load_fpr_DT0(DFPREG(rs1));
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_0_0(helper_faddd);
2391 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2393 break;
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT0(QFPREG(rs1));
2397 gen_op_load_fpr_QT1(QFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_faddq);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2401 gen_op_store_QT0_fpr(QFPREG(rd));
2402 break;
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2406 cpu_fpr[rs1], cpu_fpr[rs2]);
2407 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2408 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2409 break;
2410 case 0x46:
2411 gen_op_load_fpr_DT0(DFPREG(rs1));
2412 gen_op_load_fpr_DT1(DFPREG(rs2));
2413 gen_clear_float_exceptions();
2414 tcg_gen_helper_0_0(helper_fsubd);
2415 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2416 gen_op_store_DT0_fpr(DFPREG(rd));
2417 break;
2418 case 0x47: /* fsubq */
2419 CHECK_FPU_FEATURE(dc, FLOAT128);
2420 gen_op_load_fpr_QT0(QFPREG(rs1));
2421 gen_op_load_fpr_QT1(QFPREG(rs2));
2422 gen_clear_float_exceptions();
2423 tcg_gen_helper_0_0(helper_fsubq);
2424 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2425 gen_op_store_QT0_fpr(QFPREG(rd));
2426 break;
2427 case 0x49: /* fmuls */
2428 CHECK_FPU_FEATURE(dc, FMUL);
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2431 cpu_fpr[rs1], cpu_fpr[rs2]);
2432 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2433 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2434 break;
2435 case 0x4a: /* fmuld */
2436 CHECK_FPU_FEATURE(dc, FMUL);
2437 gen_op_load_fpr_DT0(DFPREG(rs1));
2438 gen_op_load_fpr_DT1(DFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmuld);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2442 gen_op_store_DT0_fpr(DFPREG(rd));
2443 break;
2444 case 0x4b: /* fmulq */
2445 CHECK_FPU_FEATURE(dc, FLOAT128);
2446 CHECK_FPU_FEATURE(dc, FMUL);
2447 gen_op_load_fpr_QT0(QFPREG(rs1));
2448 gen_op_load_fpr_QT1(QFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fmulq);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2452 gen_op_store_QT0_fpr(QFPREG(rd));
2453 break;
2454 case 0x4d: /* fdivs */
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2457 cpu_fpr[rs1], cpu_fpr[rs2]);
2458 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2459 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2460 break;
2461 case 0x4e:
2462 gen_op_load_fpr_DT0(DFPREG(rs1));
2463 gen_op_load_fpr_DT1(DFPREG(rs2));
2464 gen_clear_float_exceptions();
2465 tcg_gen_helper_0_0(helper_fdivd);
2466 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2467 gen_op_store_DT0_fpr(DFPREG(rd));
2468 break;
2469 case 0x4f: /* fdivq */
2470 CHECK_FPU_FEATURE(dc, FLOAT128);
2471 gen_op_load_fpr_QT0(QFPREG(rs1));
2472 gen_op_load_fpr_QT1(QFPREG(rs2));
2473 gen_clear_float_exceptions();
2474 tcg_gen_helper_0_0(helper_fdivq);
2475 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2476 gen_op_store_QT0_fpr(QFPREG(rd));
2477 break;
2478 case 0x69:
2479 CHECK_FPU_FEATURE(dc, FSMULD);
2480 gen_op_load_fpr_FT0(rs1);
2481 gen_op_load_fpr_FT1(rs2);
2482 gen_clear_float_exceptions();
2483 tcg_gen_helper_0_0(helper_fsmuld);
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2486 break;
2487 case 0x6e: /* fdmulq */
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 gen_op_load_fpr_DT0(DFPREG(rs1));
2490 gen_op_load_fpr_DT1(DFPREG(rs2));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_0_0(helper_fdmulq);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2494 gen_op_store_QT0_fpr(QFPREG(rd));
2495 break;
2496 case 0xc4: /* fitos */
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2499 cpu_fpr[rs2]);
2500 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2501 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2502 break;
2503 case 0xc6:
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdtos);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2508 gen_op_store_FT0_fpr(rd);
2509 break;
2510 case 0xc7: /* fqtos */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_0_0(helper_fqtos);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2516 gen_op_store_FT0_fpr(rd);
2517 break;
2518 case 0xc8:
2519 gen_op_load_fpr_FT1(rs2);
2520 tcg_gen_helper_0_0(helper_fitod);
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 break;
2523 case 0xc9:
2524 gen_op_load_fpr_FT1(rs2);
2525 tcg_gen_helper_0_0(helper_fstod);
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2527 break;
2528 case 0xcb: /* fqtod */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_QT1(QFPREG(rs2));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fqtod);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2534 gen_op_store_DT0_fpr(DFPREG(rd));
2535 break;
2536 case 0xcc: /* fitoq */
2537 CHECK_FPU_FEATURE(dc, FLOAT128);
2538 gen_op_load_fpr_FT1(rs2);
2539 tcg_gen_helper_0_0(helper_fitoq);
2540 gen_op_store_QT0_fpr(QFPREG(rd));
2541 break;
2542 case 0xcd: /* fstoq */
2543 CHECK_FPU_FEATURE(dc, FLOAT128);
2544 gen_op_load_fpr_FT1(rs2);
2545 tcg_gen_helper_0_0(helper_fstoq);
2546 gen_op_store_QT0_fpr(QFPREG(rd));
2547 break;
2548 case 0xce: /* fdtoq */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_DT1(DFPREG(rs2));
2551 tcg_gen_helper_0_0(helper_fdtoq);
2552 gen_op_store_QT0_fpr(QFPREG(rd));
2553 break;
2554 case 0xd1: /* fstoi */
2555 gen_clear_float_exceptions();
2556 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2557 cpu_fpr[rs2]);
2558 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2559 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560 break;
2561 case 0xd2:
2562 gen_op_load_fpr_DT1(DFPREG(rs2));
2563 gen_clear_float_exceptions();
2564 tcg_gen_helper_0_0(helper_fdtoi);
2565 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2566 gen_op_store_FT0_fpr(rd);
2567 break;
2568 case 0xd3: /* fqtoi */
2569 CHECK_FPU_FEATURE(dc, FLOAT128);
2570 gen_op_load_fpr_QT1(QFPREG(rs2));
2571 gen_clear_float_exceptions();
2572 tcg_gen_helper_0_0(helper_fqtoi);
2573 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2574 gen_op_store_FT0_fpr(rd);
2575 break;
2576 #ifdef TARGET_SPARC64
2577 case 0x2: /* V9 fmovd */
2578 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2579 cpu_fpr[DFPREG(rs2)]);
2580 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2581 cpu_fpr[DFPREG(rs2) + 1]);
2582 break;
2583 case 0x3: /* V9 fmovq */
2584 CHECK_FPU_FEATURE(dc, FLOAT128);
2585 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2586 cpu_fpr[QFPREG(rs2)]);
2587 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2588 cpu_fpr[QFPREG(rs2) + 1]);
2589 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2590 cpu_fpr[QFPREG(rs2) + 2]);
2591 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2592 cpu_fpr[QFPREG(rs2) + 3]);
2593 break;
2594 case 0x6: /* V9 fnegd */
2595 gen_op_load_fpr_DT1(DFPREG(rs2));
2596 tcg_gen_helper_0_0(helper_fnegd);
2597 gen_op_store_DT0_fpr(DFPREG(rd));
2598 break;
2599 case 0x7: /* V9 fnegq */
2600 CHECK_FPU_FEATURE(dc, FLOAT128);
2601 gen_op_load_fpr_QT1(QFPREG(rs2));
2602 tcg_gen_helper_0_0(helper_fnegq);
2603 gen_op_store_QT0_fpr(QFPREG(rd));
2604 break;
2605 case 0xa: /* V9 fabsd */
2606 gen_op_load_fpr_DT1(DFPREG(rs2));
2607 tcg_gen_helper_0_0(helper_fabsd);
2608 gen_op_store_DT0_fpr(DFPREG(rd));
2609 break;
2610 case 0xb: /* V9 fabsq */
2611 CHECK_FPU_FEATURE(dc, FLOAT128);
2612 gen_op_load_fpr_QT1(QFPREG(rs2));
2613 tcg_gen_helper_0_0(helper_fabsq);
2614 gen_op_store_QT0_fpr(QFPREG(rd));
2615 break;
2616 case 0x81: /* V9 fstox */
2617 gen_op_load_fpr_FT1(rs2);
2618 gen_clear_float_exceptions();
2619 tcg_gen_helper_0_0(helper_fstox);
2620 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2621 gen_op_store_DT0_fpr(DFPREG(rd));
2622 break;
2623 case 0x82: /* V9 fdtox */
2624 gen_op_load_fpr_DT1(DFPREG(rs2));
2625 gen_clear_float_exceptions();
2626 tcg_gen_helper_0_0(helper_fdtox);
2627 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2628 gen_op_store_DT0_fpr(DFPREG(rd));
2629 break;
2630 case 0x83: /* V9 fqtox */
2631 CHECK_FPU_FEATURE(dc, FLOAT128);
2632 gen_op_load_fpr_QT1(QFPREG(rs2));
2633 gen_clear_float_exceptions();
2634 tcg_gen_helper_0_0(helper_fqtox);
2635 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2636 gen_op_store_DT0_fpr(DFPREG(rd));
2637 break;
2638 case 0x84: /* V9 fxtos */
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_clear_float_exceptions();
2641 tcg_gen_helper_0_0(helper_fxtos);
2642 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2643 gen_op_store_FT0_fpr(rd);
2644 break;
2645 case 0x88: /* V9 fxtod */
2646 gen_op_load_fpr_DT1(DFPREG(rs2));
2647 gen_clear_float_exceptions();
2648 tcg_gen_helper_0_0(helper_fxtod);
2649 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2650 gen_op_store_DT0_fpr(DFPREG(rd));
2651 break;
2652 case 0x8c: /* V9 fxtoq */
2653 CHECK_FPU_FEATURE(dc, FLOAT128);
2654 gen_op_load_fpr_DT1(DFPREG(rs2));
2655 gen_clear_float_exceptions();
2656 tcg_gen_helper_0_0(helper_fxtoq);
2657 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2658 gen_op_store_QT0_fpr(QFPREG(rd));
2659 break;
2660 #endif
2661 default:
2662 goto illegal_insn;
2664 } else if (xop == 0x35) { /* FPU Operations */
2665 #ifdef TARGET_SPARC64
2666 int cond;
2667 #endif
2668 if (gen_trap_ifnofpu(dc, cpu_cond))
2669 goto jmp_insn;
2670 gen_op_clear_ieee_excp_and_FTT();
2671 rs1 = GET_FIELD(insn, 13, 17);
2672 rs2 = GET_FIELD(insn, 27, 31);
2673 xop = GET_FIELD(insn, 18, 26);
2674 #ifdef TARGET_SPARC64
2675 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2676 int l1;
2678 l1 = gen_new_label();
2679 cond = GET_FIELD_SP(insn, 14, 17);
2680 cpu_src1 = get_src1(insn, cpu_src1);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2682 0, l1);
2683 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2684 gen_set_label(l1);
2685 break;
2686 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2687 int l1;
2689 l1 = gen_new_label();
2690 cond = GET_FIELD_SP(insn, 14, 17);
2691 cpu_src1 = get_src1(insn, cpu_src1);
2692 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2693 0, l1);
2694 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2695 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2696 gen_set_label(l1);
2697 break;
2698 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2699 int l1;
2701 CHECK_FPU_FEATURE(dc, FLOAT128);
2702 l1 = gen_new_label();
2703 cond = GET_FIELD_SP(insn, 14, 17);
2704 cpu_src1 = get_src1(insn, cpu_src1);
2705 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2706 0, l1);
2707 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2708 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2709 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2710 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2711 gen_set_label(l1);
2712 break;
2714 #endif
2715 switch (xop) {
2716 #ifdef TARGET_SPARC64
2717 #define FMOVSCC(fcc) \
2719 TCGv r_cond; \
2720 int l1; \
2722 l1 = gen_new_label(); \
2723 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2724 cond = GET_FIELD_SP(insn, 14, 17); \
2725 gen_fcond(r_cond, fcc, cond); \
2726 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2727 0, l1); \
2728 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2729 gen_set_label(l1); \
2730 tcg_temp_free(r_cond); \
2732 #define FMOVDCC(fcc) \
2734 TCGv r_cond; \
2735 int l1; \
2737 l1 = gen_new_label(); \
2738 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2739 cond = GET_FIELD_SP(insn, 14, 17); \
2740 gen_fcond(r_cond, fcc, cond); \
2741 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2742 0, l1); \
2743 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2744 cpu_fpr[DFPREG(rs2)]); \
2745 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2746 cpu_fpr[DFPREG(rs2) + 1]); \
2747 gen_set_label(l1); \
2748 tcg_temp_free(r_cond); \
2750 #define FMOVQCC(fcc) \
2752 TCGv r_cond; \
2753 int l1; \
2755 l1 = gen_new_label(); \
2756 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2757 cond = GET_FIELD_SP(insn, 14, 17); \
2758 gen_fcond(r_cond, fcc, cond); \
2759 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2760 0, l1); \
2761 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2762 cpu_fpr[QFPREG(rs2)]); \
2763 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2764 cpu_fpr[QFPREG(rs2) + 1]); \
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2766 cpu_fpr[QFPREG(rs2) + 2]); \
2767 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2768 cpu_fpr[QFPREG(rs2) + 3]); \
2769 gen_set_label(l1); \
2770 tcg_temp_free(r_cond); \
2772 case 0x001: /* V9 fmovscc %fcc0 */
2773 FMOVSCC(0);
2774 break;
2775 case 0x002: /* V9 fmovdcc %fcc0 */
2776 FMOVDCC(0);
2777 break;
2778 case 0x003: /* V9 fmovqcc %fcc0 */
2779 CHECK_FPU_FEATURE(dc, FLOAT128);
2780 FMOVQCC(0);
2781 break;
2782 case 0x041: /* V9 fmovscc %fcc1 */
2783 FMOVSCC(1);
2784 break;
2785 case 0x042: /* V9 fmovdcc %fcc1 */
2786 FMOVDCC(1);
2787 break;
2788 case 0x043: /* V9 fmovqcc %fcc1 */
2789 CHECK_FPU_FEATURE(dc, FLOAT128);
2790 FMOVQCC(1);
2791 break;
2792 case 0x081: /* V9 fmovscc %fcc2 */
2793 FMOVSCC(2);
2794 break;
2795 case 0x082: /* V9 fmovdcc %fcc2 */
2796 FMOVDCC(2);
2797 break;
2798 case 0x083: /* V9 fmovqcc %fcc2 */
2799 CHECK_FPU_FEATURE(dc, FLOAT128);
2800 FMOVQCC(2);
2801 break;
2802 case 0x0c1: /* V9 fmovscc %fcc3 */
2803 FMOVSCC(3);
2804 break;
2805 case 0x0c2: /* V9 fmovdcc %fcc3 */
2806 FMOVDCC(3);
2807 break;
2808 case 0x0c3: /* V9 fmovqcc %fcc3 */
2809 CHECK_FPU_FEATURE(dc, FLOAT128);
2810 FMOVQCC(3);
2811 break;
2812 #undef FMOVSCC
2813 #undef FMOVDCC
2814 #undef FMOVQCC
2815 #define FMOVCC(size_FDQ, icc) \
2817 TCGv r_cond; \
2818 int l1; \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_cond(r_cond, icc, cond); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2825 0, l1); \
2826 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2827 (glue(size_FDQ, FPREG(rs2))); \
2828 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2829 (glue(size_FDQ, FPREG(rd))); \
2830 gen_set_label(l1); \
2831 tcg_temp_free(r_cond); \
2833 #define FMOVSCC(icc) \
2835 TCGv r_cond; \
2836 int l1; \
2838 l1 = gen_new_label(); \
2839 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2840 cond = GET_FIELD_SP(insn, 14, 17); \
2841 gen_cond(r_cond, icc, cond); \
2842 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2843 0, l1); \
2844 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 #define FMOVDCC(icc) \
2850 TCGv r_cond; \
2851 int l1; \
2853 l1 = gen_new_label(); \
2854 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2855 cond = GET_FIELD_SP(insn, 14, 17); \
2856 gen_cond(r_cond, icc, cond); \
2857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2858 0, l1); \
2859 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2860 cpu_fpr[DFPREG(rs2)]); \
2861 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2862 cpu_fpr[DFPREG(rs2) + 1]); \
2863 gen_set_label(l1); \
2864 tcg_temp_free(r_cond); \
2866 #define FMOVQCC(icc) \
2868 TCGv r_cond; \
2869 int l1; \
2871 l1 = gen_new_label(); \
2872 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2873 cond = GET_FIELD_SP(insn, 14, 17); \
2874 gen_cond(r_cond, icc, cond); \
2875 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2876 0, l1); \
2877 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2878 cpu_fpr[QFPREG(rs2)]); \
2879 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2880 cpu_fpr[QFPREG(rs2) + 1]); \
2881 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2882 cpu_fpr[QFPREG(rs2) + 2]); \
2883 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2884 cpu_fpr[QFPREG(rs2) + 3]); \
2885 gen_set_label(l1); \
2886 tcg_temp_free(r_cond); \
2889 case 0x101: /* V9 fmovscc %icc */
2890 FMOVSCC(0);
2891 break;
2892 case 0x102: /* V9 fmovdcc %icc */
2893 FMOVDCC(0);
2894 case 0x103: /* V9 fmovqcc %icc */
2895 CHECK_FPU_FEATURE(dc, FLOAT128);
2896 FMOVQCC(0);
2897 break;
2898 case 0x181: /* V9 fmovscc %xcc */
2899 FMOVSCC(1);
2900 break;
2901 case 0x182: /* V9 fmovdcc %xcc */
2902 FMOVDCC(1);
2903 break;
2904 case 0x183: /* V9 fmovqcc %xcc */
2905 CHECK_FPU_FEATURE(dc, FLOAT128);
2906 FMOVQCC(1);
2907 break;
2908 #undef FMOVSCC
2909 #undef FMOVDCC
2910 #undef FMOVQCC
2911 #endif
2912 case 0x51: /* fcmps, V9 %fcc */
2913 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2914 break;
2915 case 0x52: /* fcmpd, V9 %fcc */
2916 gen_op_load_fpr_DT0(DFPREG(rs1));
2917 gen_op_load_fpr_DT1(DFPREG(rs2));
2918 gen_op_fcmpd(rd & 3);
2919 break;
2920 case 0x53: /* fcmpq, V9 %fcc */
2921 CHECK_FPU_FEATURE(dc, FLOAT128);
2922 gen_op_load_fpr_QT0(QFPREG(rs1));
2923 gen_op_load_fpr_QT1(QFPREG(rs2));
2924 gen_op_fcmpq(rd & 3);
2925 break;
2926 case 0x55: /* fcmpes, V9 %fcc */
2927 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2928 break;
2929 case 0x56: /* fcmped, V9 %fcc */
2930 gen_op_load_fpr_DT0(DFPREG(rs1));
2931 gen_op_load_fpr_DT1(DFPREG(rs2));
2932 gen_op_fcmped(rd & 3);
2933 break;
2934 case 0x57: /* fcmpeq, V9 %fcc */
2935 CHECK_FPU_FEATURE(dc, FLOAT128);
2936 gen_op_load_fpr_QT0(QFPREG(rs1));
2937 gen_op_load_fpr_QT1(QFPREG(rs2));
2938 gen_op_fcmpeq(rd & 3);
2939 break;
2940 default:
2941 goto illegal_insn;
2943 } else if (xop == 0x2) {
2944 // clr/mov shortcut
2946 rs1 = GET_FIELD(insn, 13, 17);
2947 if (rs1 == 0) {
2948 // or %g0, x, y -> mov T0, x; mov y, T0
2949 if (IS_IMM) { /* immediate */
2950 TCGv r_const;
2952 rs2 = GET_FIELDs(insn, 19, 31);
2953 r_const = tcg_const_tl((int)rs2);
2954 gen_movl_TN_reg(rd, r_const);
2955 tcg_temp_free(r_const);
2956 } else { /* register */
2957 rs2 = GET_FIELD(insn, 27, 31);
2958 gen_movl_reg_TN(rs2, cpu_dst);
2959 gen_movl_TN_reg(rd, cpu_dst);
2961 } else {
2962 cpu_src1 = get_src1(insn, cpu_src1);
2963 if (IS_IMM) { /* immediate */
2964 rs2 = GET_FIELDs(insn, 19, 31);
2965 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2966 gen_movl_TN_reg(rd, cpu_dst);
2967 } else { /* register */
2968 // or x, %g0, y -> mov T1, x; mov y, T1
2969 rs2 = GET_FIELD(insn, 27, 31);
2970 if (rs2 != 0) {
2971 gen_movl_reg_TN(rs2, cpu_src2);
2972 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2973 gen_movl_TN_reg(rd, cpu_dst);
2974 } else
2975 gen_movl_TN_reg(rd, cpu_src1);
2978 #ifdef TARGET_SPARC64
2979 } else if (xop == 0x25) { /* sll, V9 sllx */
2980 cpu_src1 = get_src1(insn, cpu_src1);
2981 if (IS_IMM) { /* immediate */
2982 rs2 = GET_FIELDs(insn, 20, 31);
2983 if (insn & (1 << 12)) {
2984 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2985 } else {
2986 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2988 } else { /* register */
2989 rs2 = GET_FIELD(insn, 27, 31);
2990 gen_movl_reg_TN(rs2, cpu_src2);
2991 if (insn & (1 << 12)) {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2993 } else {
2994 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2996 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2998 gen_movl_TN_reg(rd, cpu_dst);
2999 } else if (xop == 0x26) { /* srl, V9 srlx */
3000 cpu_src1 = get_src1(insn, cpu_src1);
3001 if (IS_IMM) { /* immediate */
3002 rs2 = GET_FIELDs(insn, 20, 31);
3003 if (insn & (1 << 12)) {
3004 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3005 } else {
3006 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3007 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3009 } else { /* register */
3010 rs2 = GET_FIELD(insn, 27, 31);
3011 gen_movl_reg_TN(rs2, cpu_src2);
3012 if (insn & (1 << 12)) {
3013 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3014 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3015 } else {
3016 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3017 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3018 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3021 gen_movl_TN_reg(rd, cpu_dst);
3022 } else if (xop == 0x27) { /* sra, V9 srax */
3023 cpu_src1 = get_src1(insn, cpu_src1);
3024 if (IS_IMM) { /* immediate */
3025 rs2 = GET_FIELDs(insn, 20, 31);
3026 if (insn & (1 << 12)) {
3027 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3028 } else {
3029 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3030 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3031 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3033 } else { /* register */
3034 rs2 = GET_FIELD(insn, 27, 31);
3035 gen_movl_reg_TN(rs2, cpu_src2);
3036 if (insn & (1 << 12)) {
3037 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3038 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3039 } else {
3040 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3041 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3042 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3043 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3046 gen_movl_TN_reg(rd, cpu_dst);
3047 #endif
3048 } else if (xop < 0x36) {
3049 cpu_src1 = get_src1(insn, cpu_src1);
3050 cpu_src2 = get_src2(insn, cpu_src2);
3051 if (xop < 0x20) {
3052 switch (xop & ~0x10) {
3053 case 0x0:
3054 if (xop & 0x10)
3055 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3056 else
3057 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3058 break;
3059 case 0x1:
3060 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3061 if (xop & 0x10)
3062 gen_op_logic_cc(cpu_dst);
3063 break;
3064 case 0x2:
3065 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3066 if (xop & 0x10)
3067 gen_op_logic_cc(cpu_dst);
3068 break;
3069 case 0x3:
3070 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3071 if (xop & 0x10)
3072 gen_op_logic_cc(cpu_dst);
3073 break;
3074 case 0x4:
3075 if (xop & 0x10)
3076 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3077 else
3078 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3079 break;
3080 case 0x5:
3081 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3082 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3083 if (xop & 0x10)
3084 gen_op_logic_cc(cpu_dst);
3085 break;
3086 case 0x6:
3087 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3088 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3089 if (xop & 0x10)
3090 gen_op_logic_cc(cpu_dst);
3091 break;
3092 case 0x7:
3093 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3094 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3095 if (xop & 0x10)
3096 gen_op_logic_cc(cpu_dst);
3097 break;
3098 case 0x8:
3099 if (xop & 0x10)
3100 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3101 else {
3102 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3103 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3104 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3106 break;
3107 #ifdef TARGET_SPARC64
3108 case 0x9: /* V9 mulx */
3109 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3110 break;
3111 #endif
3112 case 0xa:
3113 CHECK_IU_FEATURE(dc, MUL);
3114 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3115 if (xop & 0x10)
3116 gen_op_logic_cc(cpu_dst);
3117 break;
3118 case 0xb:
3119 CHECK_IU_FEATURE(dc, MUL);
3120 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3121 if (xop & 0x10)
3122 gen_op_logic_cc(cpu_dst);
3123 break;
3124 case 0xc:
3125 if (xop & 0x10)
3126 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3127 else {
3128 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3129 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3130 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3132 break;
3133 #ifdef TARGET_SPARC64
3134 case 0xd: /* V9 udivx */
3135 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3136 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3137 gen_trap_ifdivzero_tl(cpu_cc_src2);
3138 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3139 break;
3140 #endif
3141 case 0xe:
3142 CHECK_IU_FEATURE(dc, DIV);
3143 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3144 cpu_src2);
3145 if (xop & 0x10)
3146 gen_op_div_cc(cpu_dst);
3147 break;
3148 case 0xf:
3149 CHECK_IU_FEATURE(dc, DIV);
3150 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3151 cpu_src2);
3152 if (xop & 0x10)
3153 gen_op_div_cc(cpu_dst);
3154 break;
3155 default:
3156 goto illegal_insn;
3158 gen_movl_TN_reg(rd, cpu_dst);
3159 } else {
3160 switch (xop) {
3161 case 0x20: /* taddcc */
3162 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3163 gen_movl_TN_reg(rd, cpu_dst);
3164 break;
3165 case 0x21: /* tsubcc */
3166 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3167 gen_movl_TN_reg(rd, cpu_dst);
3168 break;
3169 case 0x22: /* taddcctv */
3170 save_state(dc, cpu_cond);
3171 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3172 gen_movl_TN_reg(rd, cpu_dst);
3173 break;
3174 case 0x23: /* tsubcctv */
3175 save_state(dc, cpu_cond);
3176 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3177 gen_movl_TN_reg(rd, cpu_dst);
3178 break;
3179 case 0x24: /* mulscc */
3180 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3181 gen_movl_TN_reg(rd, cpu_dst);
3182 break;
3183 #ifndef TARGET_SPARC64
3184 case 0x25: /* sll */
3185 if (IS_IMM) { /* immediate */
3186 rs2 = GET_FIELDs(insn, 20, 31);
3187 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3188 } else { /* register */
3189 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3190 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3192 gen_movl_TN_reg(rd, cpu_dst);
3193 break;
3194 case 0x26: /* srl */
3195 if (IS_IMM) { /* immediate */
3196 rs2 = GET_FIELDs(insn, 20, 31);
3197 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3198 } else { /* register */
3199 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3200 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3202 gen_movl_TN_reg(rd, cpu_dst);
3203 break;
3204 case 0x27: /* sra */
3205 if (IS_IMM) { /* immediate */
3206 rs2 = GET_FIELDs(insn, 20, 31);
3207 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3208 } else { /* register */
3209 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3210 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3212 gen_movl_TN_reg(rd, cpu_dst);
3213 break;
3214 #endif
3215 case 0x30:
3217 switch(rd) {
3218 case 0: /* wry */
3219 tcg_gen_xor_tl(cpu_y, cpu_src1, cpu_src2);
3220 break;
3221 #ifndef TARGET_SPARC64
3222 case 0x01 ... 0x0f: /* undefined in the
3223 SPARCv8 manual, nop
3224 on the microSPARC
3225 II */
3226 case 0x10 ... 0x1f: /* implementation-dependent
3227 in the SPARCv8
3228 manual, nop on the
3229 microSPARC II */
3230 break;
3231 #else
3232 case 0x2: /* V9 wrccr */
3233 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3234 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3235 break;
3236 case 0x3: /* V9 wrasi */
3237 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3238 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3239 break;
3240 case 0x6: /* V9 wrfprs */
3241 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3242 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3243 save_state(dc, cpu_cond);
3244 gen_op_next_insn();
3245 tcg_gen_exit_tb(0);
3246 dc->is_br = 1;
3247 break;
3248 case 0xf: /* V9 sir, nop if user */
3249 #if !defined(CONFIG_USER_ONLY)
3250 if (supervisor(dc))
3251 ; // XXX
3252 #endif
3253 break;
3254 case 0x13: /* Graphics Status */
3255 if (gen_trap_ifnofpu(dc, cpu_cond))
3256 goto jmp_insn;
3257 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3258 break;
3259 case 0x17: /* Tick compare */
3260 #if !defined(CONFIG_USER_ONLY)
3261 if (!supervisor(dc))
3262 goto illegal_insn;
3263 #endif
3265 TCGv r_tickptr;
3267 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3268 cpu_src2);
3269 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3270 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3271 offsetof(CPUState, tick));
3272 tcg_gen_helper_0_2(helper_tick_set_limit,
3273 r_tickptr, cpu_tick_cmpr);
3274 tcg_temp_free(r_tickptr);
3276 break;
3277 case 0x18: /* System tick */
3278 #if !defined(CONFIG_USER_ONLY)
3279 if (!supervisor(dc))
3280 goto illegal_insn;
3281 #endif
3283 TCGv r_tickptr;
3285 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3286 cpu_src2);
3287 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3288 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3289 offsetof(CPUState, stick));
3290 tcg_gen_helper_0_2(helper_tick_set_count,
3291 r_tickptr, cpu_dst);
3292 tcg_temp_free(r_tickptr);
3294 break;
3295 case 0x19: /* System tick compare */
3296 #if !defined(CONFIG_USER_ONLY)
3297 if (!supervisor(dc))
3298 goto illegal_insn;
3299 #endif
3301 TCGv r_tickptr;
3303 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3304 cpu_src2);
3305 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3306 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3307 offsetof(CPUState, stick));
3308 tcg_gen_helper_0_2(helper_tick_set_limit,
3309 r_tickptr, cpu_stick_cmpr);
3310 tcg_temp_free(r_tickptr);
3312 break;
3314 case 0x10: /* Performance Control */
3315 case 0x11: /* Performance Instrumentation
3316 Counter */
3317 case 0x12: /* Dispatch Control */
3318 case 0x14: /* Softint set */
3319 case 0x15: /* Softint clear */
3320 case 0x16: /* Softint write */
3321 #endif
3322 default:
3323 goto illegal_insn;
3326 break;
3327 #if !defined(CONFIG_USER_ONLY)
3328 case 0x31: /* wrpsr, V9 saved, restored */
3330 if (!supervisor(dc))
3331 goto priv_insn;
3332 #ifdef TARGET_SPARC64
3333 switch (rd) {
3334 case 0:
3335 tcg_gen_helper_0_0(helper_saved);
3336 break;
3337 case 1:
3338 tcg_gen_helper_0_0(helper_restored);
3339 break;
3340 case 2: /* UA2005 allclean */
3341 case 3: /* UA2005 otherw */
3342 case 4: /* UA2005 normalw */
3343 case 5: /* UA2005 invalw */
3344 // XXX
3345 default:
3346 goto illegal_insn;
3348 #else
3349 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3350 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3351 save_state(dc, cpu_cond);
3352 gen_op_next_insn();
3353 tcg_gen_exit_tb(0);
3354 dc->is_br = 1;
3355 #endif
3357 break;
3358 case 0x32: /* wrwim, V9 wrpr */
3360 if (!supervisor(dc))
3361 goto priv_insn;
3362 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3363 #ifdef TARGET_SPARC64
3364 switch (rd) {
3365 case 0: // tpc
3367 TCGv r_tsptr;
3369 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3370 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3371 offsetof(CPUState, tsptr));
3372 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3373 offsetof(trap_state, tpc));
3374 tcg_temp_free(r_tsptr);
3376 break;
3377 case 1: // tnpc
3379 TCGv r_tsptr;
3381 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3382 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3383 offsetof(CPUState, tsptr));
3384 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3385 offsetof(trap_state, tnpc));
3386 tcg_temp_free(r_tsptr);
3388 break;
3389 case 2: // tstate
3391 TCGv r_tsptr;
3393 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3394 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3395 offsetof(CPUState, tsptr));
3396 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3397 offsetof(trap_state,
3398 tstate));
3399 tcg_temp_free(r_tsptr);
3401 break;
3402 case 3: // tt
3404 TCGv r_tsptr;
3406 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3407 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3408 offsetof(CPUState, tsptr));
3409 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3410 offsetof(trap_state, tt));
3411 tcg_temp_free(r_tsptr);
3413 break;
3414 case 4: // tick
3416 TCGv r_tickptr;
3418 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3419 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3420 offsetof(CPUState, tick));
3421 tcg_gen_helper_0_2(helper_tick_set_count,
3422 r_tickptr, cpu_tmp0);
3423 tcg_temp_free(r_tickptr);
3425 break;
3426 case 5: // tba
3427 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3428 break;
3429 case 6: // pstate
3430 save_state(dc, cpu_cond);
3431 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3432 gen_op_next_insn();
3433 tcg_gen_exit_tb(0);
3434 dc->is_br = 1;
3435 break;
3436 case 7: // tl
3437 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3438 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3439 offsetof(CPUSPARCState, tl));
3440 break;
3441 case 8: // pil
3442 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3443 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3444 offsetof(CPUSPARCState,
3445 psrpil));
3446 break;
3447 case 9: // cwp
3448 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3449 break;
3450 case 10: // cansave
3451 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3452 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3453 offsetof(CPUSPARCState,
3454 cansave));
3455 break;
3456 case 11: // canrestore
3457 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3458 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3459 offsetof(CPUSPARCState,
3460 canrestore));
3461 break;
3462 case 12: // cleanwin
3463 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3464 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3465 offsetof(CPUSPARCState,
3466 cleanwin));
3467 break;
3468 case 13: // otherwin
3469 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3470 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3471 offsetof(CPUSPARCState,
3472 otherwin));
3473 break;
3474 case 14: // wstate
3475 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3476 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3477 offsetof(CPUSPARCState,
3478 wstate));
3479 break;
3480 case 16: // UA2005 gl
3481 CHECK_IU_FEATURE(dc, GL);
3482 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3483 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3484 offsetof(CPUSPARCState, gl));
3485 break;
3486 case 26: // UA2005 strand status
3487 CHECK_IU_FEATURE(dc, HYPV);
3488 if (!hypervisor(dc))
3489 goto priv_insn;
3490 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3491 break;
3492 default:
3493 goto illegal_insn;
3495 #else
3496 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3497 if (dc->def->nwindows != 32)
3498 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3499 (1 << dc->def->nwindows) - 1);
3500 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3501 #endif
3503 break;
3504 case 0x33: /* wrtbr, UA2005 wrhpr */
3506 #ifndef TARGET_SPARC64
3507 if (!supervisor(dc))
3508 goto priv_insn;
3509 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3510 #else
3511 CHECK_IU_FEATURE(dc, HYPV);
3512 if (!hypervisor(dc))
3513 goto priv_insn;
3514 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3515 switch (rd) {
3516 case 0: // hpstate
3517 // XXX gen_op_wrhpstate();
3518 save_state(dc, cpu_cond);
3519 gen_op_next_insn();
3520 tcg_gen_exit_tb(0);
3521 dc->is_br = 1;
3522 break;
3523 case 1: // htstate
3524 // XXX gen_op_wrhtstate();
3525 break;
3526 case 3: // hintp
3527 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3528 break;
3529 case 5: // htba
3530 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3531 break;
3532 case 31: // hstick_cmpr
3534 TCGv r_tickptr;
3536 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3537 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3538 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3539 offsetof(CPUState, hstick));
3540 tcg_gen_helper_0_2(helper_tick_set_limit,
3541 r_tickptr, cpu_hstick_cmpr);
3542 tcg_temp_free(r_tickptr);
3544 break;
3545 case 6: // hver readonly
3546 default:
3547 goto illegal_insn;
3549 #endif
3551 break;
3552 #endif
3553 #ifdef TARGET_SPARC64
3554 case 0x2c: /* V9 movcc */
3556 int cc = GET_FIELD_SP(insn, 11, 12);
3557 int cond = GET_FIELD_SP(insn, 14, 17);
3558 TCGv r_cond;
3559 int l1;
3561 r_cond = tcg_temp_new(TCG_TYPE_TL);
3562 if (insn & (1 << 18)) {
3563 if (cc == 0)
3564 gen_cond(r_cond, 0, cond);
3565 else if (cc == 2)
3566 gen_cond(r_cond, 1, cond);
3567 else
3568 goto illegal_insn;
3569 } else {
3570 gen_fcond(r_cond, cc, cond);
3573 l1 = gen_new_label();
3575 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3576 if (IS_IMM) { /* immediate */
3577 TCGv r_const;
3579 rs2 = GET_FIELD_SPs(insn, 0, 10);
3580 r_const = tcg_const_tl((int)rs2);
3581 gen_movl_TN_reg(rd, r_const);
3582 tcg_temp_free(r_const);
3583 } else {
3584 rs2 = GET_FIELD_SP(insn, 0, 4);
3585 gen_movl_reg_TN(rs2, cpu_tmp0);
3586 gen_movl_TN_reg(rd, cpu_tmp0);
3588 gen_set_label(l1);
3589 tcg_temp_free(r_cond);
3590 break;
3592 case 0x2d: /* V9 sdivx */
3593 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3594 gen_movl_TN_reg(rd, cpu_dst);
3595 break;
3596 case 0x2e: /* V9 popc */
3598 cpu_src2 = get_src2(insn, cpu_src2);
3599 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3600 cpu_src2);
3601 gen_movl_TN_reg(rd, cpu_dst);
3603 case 0x2f: /* V9 movr */
3605 int cond = GET_FIELD_SP(insn, 10, 12);
3606 int l1;
3608 cpu_src1 = get_src1(insn, cpu_src1);
3610 l1 = gen_new_label();
3612 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3613 cpu_src1, 0, l1);
3614 if (IS_IMM) { /* immediate */
3615 TCGv r_const;
3617 rs2 = GET_FIELD_SPs(insn, 0, 9);
3618 r_const = tcg_const_tl((int)rs2);
3619 gen_movl_TN_reg(rd, r_const);
3620 tcg_temp_free(r_const);
3621 } else {
3622 rs2 = GET_FIELD_SP(insn, 0, 4);
3623 gen_movl_reg_TN(rs2, cpu_tmp0);
3624 gen_movl_TN_reg(rd, cpu_tmp0);
3626 gen_set_label(l1);
3627 break;
3629 #endif
3630 default:
3631 goto illegal_insn;
3634 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3635 #ifdef TARGET_SPARC64
3636 int opf = GET_FIELD_SP(insn, 5, 13);
3637 rs1 = GET_FIELD(insn, 13, 17);
3638 rs2 = GET_FIELD(insn, 27, 31);
3639 if (gen_trap_ifnofpu(dc, cpu_cond))
3640 goto jmp_insn;
3642 switch (opf) {
3643 case 0x000: /* VIS I edge8cc */
3644 case 0x001: /* VIS II edge8n */
3645 case 0x002: /* VIS I edge8lcc */
3646 case 0x003: /* VIS II edge8ln */
3647 case 0x004: /* VIS I edge16cc */
3648 case 0x005: /* VIS II edge16n */
3649 case 0x006: /* VIS I edge16lcc */
3650 case 0x007: /* VIS II edge16ln */
3651 case 0x008: /* VIS I edge32cc */
3652 case 0x009: /* VIS II edge32n */
3653 case 0x00a: /* VIS I edge32lcc */
3654 case 0x00b: /* VIS II edge32ln */
3655 // XXX
3656 goto illegal_insn;
3657 case 0x010: /* VIS I array8 */
3658 CHECK_FPU_FEATURE(dc, VIS1);
3659 cpu_src1 = get_src1(insn, cpu_src1);
3660 gen_movl_reg_TN(rs2, cpu_src2);
3661 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3662 cpu_src2);
3663 gen_movl_TN_reg(rd, cpu_dst);
3664 break;
3665 case 0x012: /* VIS I array16 */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 cpu_src1 = get_src1(insn, cpu_src1);
3668 gen_movl_reg_TN(rs2, cpu_src2);
3669 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3670 cpu_src2);
3671 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3672 gen_movl_TN_reg(rd, cpu_dst);
3673 break;
3674 case 0x014: /* VIS I array32 */
3675 CHECK_FPU_FEATURE(dc, VIS1);
3676 cpu_src1 = get_src1(insn, cpu_src1);
3677 gen_movl_reg_TN(rs2, cpu_src2);
3678 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3679 cpu_src2);
3680 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3681 gen_movl_TN_reg(rd, cpu_dst);
3682 break;
3683 case 0x018: /* VIS I alignaddr */
3684 CHECK_FPU_FEATURE(dc, VIS1);
3685 cpu_src1 = get_src1(insn, cpu_src1);
3686 gen_movl_reg_TN(rs2, cpu_src2);
3687 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3688 cpu_src2);
3689 gen_movl_TN_reg(rd, cpu_dst);
3690 break;
3691 case 0x019: /* VIS II bmask */
3692 case 0x01a: /* VIS I alignaddrl */
3693 // XXX
3694 goto illegal_insn;
3695 case 0x020: /* VIS I fcmple16 */
3696 CHECK_FPU_FEATURE(dc, VIS1);
3697 gen_op_load_fpr_DT0(DFPREG(rs1));
3698 gen_op_load_fpr_DT1(DFPREG(rs2));
3699 tcg_gen_helper_0_0(helper_fcmple16);
3700 gen_op_store_DT0_fpr(DFPREG(rd));
3701 break;
3702 case 0x022: /* VIS I fcmpne16 */
3703 CHECK_FPU_FEATURE(dc, VIS1);
3704 gen_op_load_fpr_DT0(DFPREG(rs1));
3705 gen_op_load_fpr_DT1(DFPREG(rs2));
3706 tcg_gen_helper_0_0(helper_fcmpne16);
3707 gen_op_store_DT0_fpr(DFPREG(rd));
3708 break;
3709 case 0x024: /* VIS I fcmple32 */
3710 CHECK_FPU_FEATURE(dc, VIS1);
3711 gen_op_load_fpr_DT0(DFPREG(rs1));
3712 gen_op_load_fpr_DT1(DFPREG(rs2));
3713 tcg_gen_helper_0_0(helper_fcmple32);
3714 gen_op_store_DT0_fpr(DFPREG(rd));
3715 break;
3716 case 0x026: /* VIS I fcmpne32 */
3717 CHECK_FPU_FEATURE(dc, VIS1);
3718 gen_op_load_fpr_DT0(DFPREG(rs1));
3719 gen_op_load_fpr_DT1(DFPREG(rs2));
3720 tcg_gen_helper_0_0(helper_fcmpne32);
3721 gen_op_store_DT0_fpr(DFPREG(rd));
3722 break;
3723 case 0x028: /* VIS I fcmpgt16 */
3724 CHECK_FPU_FEATURE(dc, VIS1);
3725 gen_op_load_fpr_DT0(DFPREG(rs1));
3726 gen_op_load_fpr_DT1(DFPREG(rs2));
3727 tcg_gen_helper_0_0(helper_fcmpgt16);
3728 gen_op_store_DT0_fpr(DFPREG(rd));
3729 break;
3730 case 0x02a: /* VIS I fcmpeq16 */
3731 CHECK_FPU_FEATURE(dc, VIS1);
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 tcg_gen_helper_0_0(helper_fcmpeq16);
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3736 break;
3737 case 0x02c: /* VIS I fcmpgt32 */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 tcg_gen_helper_0_0(helper_fcmpgt32);
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x02e: /* VIS I fcmpeq32 */
3745 CHECK_FPU_FEATURE(dc, VIS1);
3746 gen_op_load_fpr_DT0(DFPREG(rs1));
3747 gen_op_load_fpr_DT1(DFPREG(rs2));
3748 tcg_gen_helper_0_0(helper_fcmpeq32);
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3750 break;
3751 case 0x031: /* VIS I fmul8x16 */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 tcg_gen_helper_0_0(helper_fmul8x16);
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3757 break;
3758 case 0x033: /* VIS I fmul8x16au */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 tcg_gen_helper_0_0(helper_fmul8x16au);
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3764 break;
3765 case 0x035: /* VIS I fmul8x16al */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 tcg_gen_helper_0_0(helper_fmul8x16al);
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3771 break;
3772 case 0x036: /* VIS I fmul8sux16 */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_op_load_fpr_DT0(DFPREG(rs1));
3775 gen_op_load_fpr_DT1(DFPREG(rs2));
3776 tcg_gen_helper_0_0(helper_fmul8sux16);
3777 gen_op_store_DT0_fpr(DFPREG(rd));
3778 break;
3779 case 0x037: /* VIS I fmul8ulx16 */
3780 CHECK_FPU_FEATURE(dc, VIS1);
3781 gen_op_load_fpr_DT0(DFPREG(rs1));
3782 gen_op_load_fpr_DT1(DFPREG(rs2));
3783 tcg_gen_helper_0_0(helper_fmul8ulx16);
3784 gen_op_store_DT0_fpr(DFPREG(rd));
3785 break;
3786 case 0x038: /* VIS I fmuld8sux16 */
3787 CHECK_FPU_FEATURE(dc, VIS1);
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 tcg_gen_helper_0_0(helper_fmuld8sux16);
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3792 break;
3793 case 0x039: /* VIS I fmuld8ulx16 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x03a: /* VIS I fpack32 */
3801 case 0x03b: /* VIS I fpack16 */
3802 case 0x03d: /* VIS I fpackfix */
3803 case 0x03e: /* VIS I pdist */
3804 // XXX
3805 goto illegal_insn;
3806 case 0x048: /* VIS I faligndata */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 gen_op_load_fpr_DT0(DFPREG(rs1));
3809 gen_op_load_fpr_DT1(DFPREG(rs2));
3810 tcg_gen_helper_0_0(helper_faligndata);
3811 gen_op_store_DT0_fpr(DFPREG(rd));
3812 break;
3813 case 0x04b: /* VIS I fpmerge */
3814 CHECK_FPU_FEATURE(dc, VIS1);
3815 gen_op_load_fpr_DT0(DFPREG(rs1));
3816 gen_op_load_fpr_DT1(DFPREG(rs2));
3817 tcg_gen_helper_0_0(helper_fpmerge);
3818 gen_op_store_DT0_fpr(DFPREG(rd));
3819 break;
3820 case 0x04c: /* VIS II bshuffle */
3821 // XXX
3822 goto illegal_insn;
3823 case 0x04d: /* VIS I fexpand */
3824 CHECK_FPU_FEATURE(dc, VIS1);
3825 gen_op_load_fpr_DT0(DFPREG(rs1));
3826 gen_op_load_fpr_DT1(DFPREG(rs2));
3827 tcg_gen_helper_0_0(helper_fexpand);
3828 gen_op_store_DT0_fpr(DFPREG(rd));
3829 break;
3830 case 0x050: /* VIS I fpadd16 */
3831 CHECK_FPU_FEATURE(dc, VIS1);
3832 gen_op_load_fpr_DT0(DFPREG(rs1));
3833 gen_op_load_fpr_DT1(DFPREG(rs2));
3834 tcg_gen_helper_0_0(helper_fpadd16);
3835 gen_op_store_DT0_fpr(DFPREG(rd));
3836 break;
3837 case 0x051: /* VIS I fpadd16s */
3838 CHECK_FPU_FEATURE(dc, VIS1);
3839 gen_op_load_fpr_FT0(rs1);
3840 gen_op_load_fpr_FT1(rs2);
3841 tcg_gen_helper_0_0(helper_fpadd16s);
3842 gen_op_store_FT0_fpr(rd);
3843 break;
3844 case 0x052: /* VIS I fpadd32 */
3845 CHECK_FPU_FEATURE(dc, VIS1);
3846 gen_op_load_fpr_DT0(DFPREG(rs1));
3847 gen_op_load_fpr_DT1(DFPREG(rs2));
3848 tcg_gen_helper_0_0(helper_fpadd32);
3849 gen_op_store_DT0_fpr(DFPREG(rd));
3850 break;
3851 case 0x053: /* VIS I fpadd32s */
3852 CHECK_FPU_FEATURE(dc, VIS1);
3853 gen_op_load_fpr_FT0(rs1);
3854 gen_op_load_fpr_FT1(rs2);
3855 tcg_gen_helper_0_0(helper_fpadd32s);
3856 gen_op_store_FT0_fpr(rd);
3857 break;
3858 case 0x054: /* VIS I fpsub16 */
3859 CHECK_FPU_FEATURE(dc, VIS1);
3860 gen_op_load_fpr_DT0(DFPREG(rs1));
3861 gen_op_load_fpr_DT1(DFPREG(rs2));
3862 tcg_gen_helper_0_0(helper_fpsub16);
3863 gen_op_store_DT0_fpr(DFPREG(rd));
3864 break;
3865 case 0x055: /* VIS I fpsub16s */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_FT0(rs1);
3868 gen_op_load_fpr_FT1(rs2);
3869 tcg_gen_helper_0_0(helper_fpsub16s);
3870 gen_op_store_FT0_fpr(rd);
3871 break;
3872 case 0x056: /* VIS I fpsub32 */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 tcg_gen_helper_0_0(helper_fpsub32);
3877 gen_op_store_DT0_fpr(DFPREG(rd));
3878 break;
3879 case 0x057: /* VIS I fpsub32s */
3880 CHECK_FPU_FEATURE(dc, VIS1);
3881 gen_op_load_fpr_FT0(rs1);
3882 gen_op_load_fpr_FT1(rs2);
3883 tcg_gen_helper_0_0(helper_fpsub32s);
3884 gen_op_store_FT0_fpr(rd);
3885 break;
3886 case 0x060: /* VIS I fzero */
3887 CHECK_FPU_FEATURE(dc, VIS1);
3888 tcg_gen_helper_0_0(helper_movl_DT0_0);
3889 gen_op_store_DT0_fpr(DFPREG(rd));
3890 break;
3891 case 0x061: /* VIS I fzeros */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 tcg_gen_helper_0_0(helper_movl_FT0_0);
3894 gen_op_store_FT0_fpr(rd);
3895 break;
3896 case 0x062: /* VIS I fnor */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_DT0(DFPREG(rs1));
3899 gen_op_load_fpr_DT1(DFPREG(rs2));
3900 tcg_gen_helper_0_0(helper_fnor);
3901 gen_op_store_DT0_fpr(DFPREG(rd));
3902 break;
3903 case 0x063: /* VIS I fnors */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_op_load_fpr_FT0(rs1);
3906 gen_op_load_fpr_FT1(rs2);
3907 tcg_gen_helper_0_0(helper_fnors);
3908 gen_op_store_FT0_fpr(rd);
3909 break;
3910 case 0x064: /* VIS I fandnot2 */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 gen_op_load_fpr_DT1(DFPREG(rs1));
3913 gen_op_load_fpr_DT0(DFPREG(rs2));
3914 tcg_gen_helper_0_0(helper_fandnot);
3915 gen_op_store_DT0_fpr(DFPREG(rd));
3916 break;
3917 case 0x065: /* VIS I fandnot2s */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 gen_op_load_fpr_FT1(rs1);
3920 gen_op_load_fpr_FT0(rs2);
3921 tcg_gen_helper_0_0(helper_fandnots);
3922 gen_op_store_FT0_fpr(rd);
3923 break;
3924 case 0x066: /* VIS I fnot2 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 gen_op_load_fpr_DT1(DFPREG(rs2));
3927 tcg_gen_helper_0_0(helper_fnot);
3928 gen_op_store_DT0_fpr(DFPREG(rd));
3929 break;
3930 case 0x067: /* VIS I fnot2s */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 gen_op_load_fpr_FT1(rs2);
3933 tcg_gen_helper_0_0(helper_fnot);
3934 gen_op_store_FT0_fpr(rd);
3935 break;
3936 case 0x068: /* VIS I fandnot1 */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 gen_op_load_fpr_DT0(DFPREG(rs1));
3939 gen_op_load_fpr_DT1(DFPREG(rs2));
3940 tcg_gen_helper_0_0(helper_fandnot);
3941 gen_op_store_DT0_fpr(DFPREG(rd));
3942 break;
3943 case 0x069: /* VIS I fandnot1s */
3944 CHECK_FPU_FEATURE(dc, VIS1);
3945 gen_op_load_fpr_FT0(rs1);
3946 gen_op_load_fpr_FT1(rs2);
3947 tcg_gen_helper_0_0(helper_fandnots);
3948 gen_op_store_FT0_fpr(rd);
3949 break;
3950 case 0x06a: /* VIS I fnot1 */
3951 CHECK_FPU_FEATURE(dc, VIS1);
3952 gen_op_load_fpr_DT1(DFPREG(rs1));
3953 tcg_gen_helper_0_0(helper_fnot);
3954 gen_op_store_DT0_fpr(DFPREG(rd));
3955 break;
3956 case 0x06b: /* VIS I fnot1s */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 gen_op_load_fpr_FT1(rs1);
3959 tcg_gen_helper_0_0(helper_fnot);
3960 gen_op_store_FT0_fpr(rd);
3961 break;
3962 case 0x06c: /* VIS I fxor */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 gen_op_load_fpr_DT0(DFPREG(rs1));
3965 gen_op_load_fpr_DT1(DFPREG(rs2));
3966 tcg_gen_helper_0_0(helper_fxor);
3967 gen_op_store_DT0_fpr(DFPREG(rd));
3968 break;
3969 case 0x06d: /* VIS I fxors */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 gen_op_load_fpr_FT0(rs1);
3972 gen_op_load_fpr_FT1(rs2);
3973 tcg_gen_helper_0_0(helper_fxors);
3974 gen_op_store_FT0_fpr(rd);
3975 break;
3976 case 0x06e: /* VIS I fnand */
3977 CHECK_FPU_FEATURE(dc, VIS1);
3978 gen_op_load_fpr_DT0(DFPREG(rs1));
3979 gen_op_load_fpr_DT1(DFPREG(rs2));
3980 tcg_gen_helper_0_0(helper_fnand);
3981 gen_op_store_DT0_fpr(DFPREG(rd));
3982 break;
3983 case 0x06f: /* VIS I fnands */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 gen_op_load_fpr_FT0(rs1);
3986 gen_op_load_fpr_FT1(rs2);
3987 tcg_gen_helper_0_0(helper_fnands);
3988 gen_op_store_FT0_fpr(rd);
3989 break;
3990 case 0x070: /* VIS I fand */
3991 CHECK_FPU_FEATURE(dc, VIS1);
3992 gen_op_load_fpr_DT0(DFPREG(rs1));
3993 gen_op_load_fpr_DT1(DFPREG(rs2));
3994 tcg_gen_helper_0_0(helper_fand);
3995 gen_op_store_DT0_fpr(DFPREG(rd));
3996 break;
3997 case 0x071: /* VIS I fands */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 gen_op_load_fpr_FT0(rs1);
4000 gen_op_load_fpr_FT1(rs2);
4001 tcg_gen_helper_0_0(helper_fands);
4002 gen_op_store_FT0_fpr(rd);
4003 break;
4004 case 0x072: /* VIS I fxnor */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 gen_op_load_fpr_DT0(DFPREG(rs1));
4007 gen_op_load_fpr_DT1(DFPREG(rs2));
4008 tcg_gen_helper_0_0(helper_fxnor);
4009 gen_op_store_DT0_fpr(DFPREG(rd));
4010 break;
4011 case 0x073: /* VIS I fxnors */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 gen_op_load_fpr_FT0(rs1);
4014 gen_op_load_fpr_FT1(rs2);
4015 tcg_gen_helper_0_0(helper_fxnors);
4016 gen_op_store_FT0_fpr(rd);
4017 break;
4018 case 0x074: /* VIS I fsrc1 */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 gen_op_load_fpr_DT0(DFPREG(rs1));
4021 gen_op_store_DT0_fpr(DFPREG(rd));
4022 break;
4023 case 0x075: /* VIS I fsrc1s */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 gen_op_load_fpr_FT0(rs1);
4026 gen_op_store_FT0_fpr(rd);
4027 break;
4028 case 0x076: /* VIS I fornot2 */
4029 CHECK_FPU_FEATURE(dc, VIS1);
4030 gen_op_load_fpr_DT1(DFPREG(rs1));
4031 gen_op_load_fpr_DT0(DFPREG(rs2));
4032 tcg_gen_helper_0_0(helper_fornot);
4033 gen_op_store_DT0_fpr(DFPREG(rd));
4034 break;
4035 case 0x077: /* VIS I fornot2s */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 gen_op_load_fpr_FT1(rs1);
4038 gen_op_load_fpr_FT0(rs2);
4039 tcg_gen_helper_0_0(helper_fornots);
4040 gen_op_store_FT0_fpr(rd);
4041 break;
4042 case 0x078: /* VIS I fsrc2 */
4043 CHECK_FPU_FEATURE(dc, VIS1);
4044 gen_op_load_fpr_DT0(DFPREG(rs2));
4045 gen_op_store_DT0_fpr(DFPREG(rd));
4046 break;
4047 case 0x079: /* VIS I fsrc2s */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 gen_op_load_fpr_FT0(rs2);
4050 gen_op_store_FT0_fpr(rd);
4051 break;
4052 case 0x07a: /* VIS I fornot1 */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 gen_op_load_fpr_DT0(DFPREG(rs1));
4055 gen_op_load_fpr_DT1(DFPREG(rs2));
4056 tcg_gen_helper_0_0(helper_fornot);
4057 gen_op_store_DT0_fpr(DFPREG(rd));
4058 break;
4059 case 0x07b: /* VIS I fornot1s */
4060 CHECK_FPU_FEATURE(dc, VIS1);
4061 gen_op_load_fpr_FT0(rs1);
4062 gen_op_load_fpr_FT1(rs2);
4063 tcg_gen_helper_0_0(helper_fornots);
4064 gen_op_store_FT0_fpr(rd);
4065 break;
4066 case 0x07c: /* VIS I for */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 gen_op_load_fpr_DT0(DFPREG(rs1));
4069 gen_op_load_fpr_DT1(DFPREG(rs2));
4070 tcg_gen_helper_0_0(helper_for);
4071 gen_op_store_DT0_fpr(DFPREG(rd));
4072 break;
4073 case 0x07d: /* VIS I fors */
4074 CHECK_FPU_FEATURE(dc, VIS1);
4075 gen_op_load_fpr_FT0(rs1);
4076 gen_op_load_fpr_FT1(rs2);
4077 tcg_gen_helper_0_0(helper_fors);
4078 gen_op_store_FT0_fpr(rd);
4079 break;
4080 case 0x07e: /* VIS I fone */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 tcg_gen_helper_0_0(helper_movl_DT0_1);
4083 gen_op_store_DT0_fpr(DFPREG(rd));
4084 break;
4085 case 0x07f: /* VIS I fones */
4086 CHECK_FPU_FEATURE(dc, VIS1);
4087 tcg_gen_helper_0_0(helper_movl_FT0_1);
4088 gen_op_store_FT0_fpr(rd);
4089 break;
4090 case 0x080: /* VIS I shutdown */
4091 case 0x081: /* VIS II siam */
4092 // XXX
4093 goto illegal_insn;
4094 default:
4095 goto illegal_insn;
4097 #else
4098 goto ncp_insn;
4099 #endif
4100 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4101 #ifdef TARGET_SPARC64
4102 goto illegal_insn;
4103 #else
4104 goto ncp_insn;
4105 #endif
4106 #ifdef TARGET_SPARC64
4107 } else if (xop == 0x39) { /* V9 return */
4108 TCGv r_const;
4110 save_state(dc, cpu_cond);
4111 cpu_src1 = get_src1(insn, cpu_src1);
4112 if (IS_IMM) { /* immediate */
4113 rs2 = GET_FIELDs(insn, 19, 31);
4114 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4115 } else { /* register */
4116 rs2 = GET_FIELD(insn, 27, 31);
4117 if (rs2) {
4118 gen_movl_reg_TN(rs2, cpu_src2);
4119 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4120 } else
4121 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4123 tcg_gen_helper_0_0(helper_restore);
4124 gen_mov_pc_npc(dc, cpu_cond);
4125 r_const = tcg_const_i32(3);
4126 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4127 tcg_temp_free(r_const);
4128 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4129 dc->npc = DYNAMIC_PC;
4130 goto jmp_insn;
4131 #endif
4132 } else {
4133 cpu_src1 = get_src1(insn, cpu_src1);
4134 if (IS_IMM) { /* immediate */
4135 rs2 = GET_FIELDs(insn, 19, 31);
4136 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4137 } else { /* register */
4138 rs2 = GET_FIELD(insn, 27, 31);
4139 if (rs2) {
4140 gen_movl_reg_TN(rs2, cpu_src2);
4141 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4142 } else
4143 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4145 switch (xop) {
4146 case 0x38: /* jmpl */
4148 TCGv r_const;
4150 r_const = tcg_const_tl(dc->pc);
4151 gen_movl_TN_reg(rd, r_const);
4152 tcg_temp_free(r_const);
4153 gen_mov_pc_npc(dc, cpu_cond);
4154 r_const = tcg_const_i32(3);
4155 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4156 r_const);
4157 tcg_temp_free(r_const);
4158 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4159 dc->npc = DYNAMIC_PC;
4161 goto jmp_insn;
4162 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4163 case 0x39: /* rett, V9 return */
4165 TCGv r_const;
4167 if (!supervisor(dc))
4168 goto priv_insn;
4169 gen_mov_pc_npc(dc, cpu_cond);
4170 r_const = tcg_const_i32(3);
4171 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4172 r_const);
4173 tcg_temp_free(r_const);
4174 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4175 dc->npc = DYNAMIC_PC;
4176 tcg_gen_helper_0_0(helper_rett);
4178 goto jmp_insn;
4179 #endif
4180 case 0x3b: /* flush */
4181 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4182 goto unimp_flush;
4183 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4184 break;
4185 case 0x3c: /* save */
4186 save_state(dc, cpu_cond);
4187 tcg_gen_helper_0_0(helper_save);
4188 gen_movl_TN_reg(rd, cpu_dst);
4189 break;
4190 case 0x3d: /* restore */
4191 save_state(dc, cpu_cond);
4192 tcg_gen_helper_0_0(helper_restore);
4193 gen_movl_TN_reg(rd, cpu_dst);
4194 break;
4195 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4196 case 0x3e: /* V9 done/retry */
4198 switch (rd) {
4199 case 0:
4200 if (!supervisor(dc))
4201 goto priv_insn;
4202 dc->npc = DYNAMIC_PC;
4203 dc->pc = DYNAMIC_PC;
4204 tcg_gen_helper_0_0(helper_done);
4205 goto jmp_insn;
4206 case 1:
4207 if (!supervisor(dc))
4208 goto priv_insn;
4209 dc->npc = DYNAMIC_PC;
4210 dc->pc = DYNAMIC_PC;
4211 tcg_gen_helper_0_0(helper_retry);
4212 goto jmp_insn;
4213 default:
4214 goto illegal_insn;
4217 break;
4218 #endif
4219 default:
4220 goto illegal_insn;
4223 break;
4225 break;
4226 case 3: /* load/store instructions */
4228 unsigned int xop = GET_FIELD(insn, 7, 12);
4230 cpu_src1 = get_src1(insn, cpu_src1);
4231 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4232 rs2 = GET_FIELD(insn, 27, 31);
4233 gen_movl_reg_TN(rs2, cpu_src2);
4234 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4235 } else if (IS_IMM) { /* immediate */
4236 rs2 = GET_FIELDs(insn, 19, 31);
4237 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4238 } else { /* register */
4239 rs2 = GET_FIELD(insn, 27, 31);
4240 if (rs2 != 0) {
4241 gen_movl_reg_TN(rs2, cpu_src2);
4242 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4243 } else
4244 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4246 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4247 (xop > 0x17 && xop <= 0x1d ) ||
4248 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4249 switch (xop) {
4250 case 0x0: /* load unsigned word */
4251 gen_address_mask(dc, cpu_addr);
4252 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4253 break;
4254 case 0x1: /* load unsigned byte */
4255 gen_address_mask(dc, cpu_addr);
4256 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4257 break;
4258 case 0x2: /* load unsigned halfword */
4259 gen_address_mask(dc, cpu_addr);
4260 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4261 break;
4262 case 0x3: /* load double word */
4263 if (rd & 1)
4264 goto illegal_insn;
4265 else {
4266 TCGv r_const;
4268 save_state(dc, cpu_cond);
4269 r_const = tcg_const_i32(7);
4270 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4271 r_const); // XXX remove
4272 tcg_temp_free(r_const);
4273 gen_address_mask(dc, cpu_addr);
4274 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4275 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4276 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4277 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4278 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4279 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4280 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4282 break;
4283 case 0x9: /* load signed byte */
4284 gen_address_mask(dc, cpu_addr);
4285 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4286 break;
4287 case 0xa: /* load signed halfword */
4288 gen_address_mask(dc, cpu_addr);
4289 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4290 break;
4291 case 0xd: /* ldstub -- XXX: should be atomically */
4293 TCGv r_const;
4295 gen_address_mask(dc, cpu_addr);
4296 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4297 r_const = tcg_const_tl(0xff);
4298 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4299 tcg_temp_free(r_const);
4301 break;
4302 case 0x0f: /* swap register with memory. Also
4303 atomically */
4304 CHECK_IU_FEATURE(dc, SWAP);
4305 gen_movl_reg_TN(rd, cpu_val);
4306 gen_address_mask(dc, cpu_addr);
4307 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4308 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4309 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4310 break;
4311 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4312 case 0x10: /* load word alternate */
4313 #ifndef TARGET_SPARC64
4314 if (IS_IMM)
4315 goto illegal_insn;
4316 if (!supervisor(dc))
4317 goto priv_insn;
4318 #endif
4319 save_state(dc, cpu_cond);
4320 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4321 break;
4322 case 0x11: /* load unsigned byte alternate */
4323 #ifndef TARGET_SPARC64
4324 if (IS_IMM)
4325 goto illegal_insn;
4326 if (!supervisor(dc))
4327 goto priv_insn;
4328 #endif
4329 save_state(dc, cpu_cond);
4330 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4331 break;
4332 case 0x12: /* load unsigned halfword alternate */
4333 #ifndef TARGET_SPARC64
4334 if (IS_IMM)
4335 goto illegal_insn;
4336 if (!supervisor(dc))
4337 goto priv_insn;
4338 #endif
4339 save_state(dc, cpu_cond);
4340 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4341 break;
4342 case 0x13: /* load double word alternate */
4343 #ifndef TARGET_SPARC64
4344 if (IS_IMM)
4345 goto illegal_insn;
4346 if (!supervisor(dc))
4347 goto priv_insn;
4348 #endif
4349 if (rd & 1)
4350 goto illegal_insn;
4351 save_state(dc, cpu_cond);
4352 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4353 goto skip_move;
4354 case 0x19: /* load signed byte alternate */
4355 #ifndef TARGET_SPARC64
4356 if (IS_IMM)
4357 goto illegal_insn;
4358 if (!supervisor(dc))
4359 goto priv_insn;
4360 #endif
4361 save_state(dc, cpu_cond);
4362 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4363 break;
4364 case 0x1a: /* load signed halfword alternate */
4365 #ifndef TARGET_SPARC64
4366 if (IS_IMM)
4367 goto illegal_insn;
4368 if (!supervisor(dc))
4369 goto priv_insn;
4370 #endif
4371 save_state(dc, cpu_cond);
4372 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4373 break;
4374 case 0x1d: /* ldstuba -- XXX: should be atomically */
4375 #ifndef TARGET_SPARC64
4376 if (IS_IMM)
4377 goto illegal_insn;
4378 if (!supervisor(dc))
4379 goto priv_insn;
4380 #endif
4381 save_state(dc, cpu_cond);
4382 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4383 break;
4384 case 0x1f: /* swap reg with alt. memory. Also
4385 atomically */
4386 CHECK_IU_FEATURE(dc, SWAP);
4387 #ifndef TARGET_SPARC64
4388 if (IS_IMM)
4389 goto illegal_insn;
4390 if (!supervisor(dc))
4391 goto priv_insn;
4392 #endif
4393 save_state(dc, cpu_cond);
4394 gen_movl_reg_TN(rd, cpu_val);
4395 gen_swap_asi(cpu_val, cpu_addr, insn);
4396 break;
4398 #ifndef TARGET_SPARC64
4399 case 0x30: /* ldc */
4400 case 0x31: /* ldcsr */
4401 case 0x33: /* lddc */
4402 goto ncp_insn;
4403 #endif
4404 #endif
4405 #ifdef TARGET_SPARC64
4406 case 0x08: /* V9 ldsw */
4407 gen_address_mask(dc, cpu_addr);
4408 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4409 break;
4410 case 0x0b: /* V9 ldx */
4411 gen_address_mask(dc, cpu_addr);
4412 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4413 break;
4414 case 0x18: /* V9 ldswa */
4415 save_state(dc, cpu_cond);
4416 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4417 break;
4418 case 0x1b: /* V9 ldxa */
4419 save_state(dc, cpu_cond);
4420 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4421 break;
4422 case 0x2d: /* V9 prefetch, no effect */
4423 goto skip_move;
4424 case 0x30: /* V9 ldfa */
4425 save_state(dc, cpu_cond);
4426 gen_ldf_asi(cpu_addr, insn, 4, rd);
4427 goto skip_move;
4428 case 0x33: /* V9 lddfa */
4429 save_state(dc, cpu_cond);
4430 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4431 goto skip_move;
4432 case 0x3d: /* V9 prefetcha, no effect */
4433 goto skip_move;
4434 case 0x32: /* V9 ldqfa */
4435 CHECK_FPU_FEATURE(dc, FLOAT128);
4436 save_state(dc, cpu_cond);
4437 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4438 goto skip_move;
4439 #endif
4440 default:
4441 goto illegal_insn;
4443 gen_movl_TN_reg(rd, cpu_val);
4444 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4445 skip_move: ;
4446 #endif
4447 } else if (xop >= 0x20 && xop < 0x24) {
4448 if (gen_trap_ifnofpu(dc, cpu_cond))
4449 goto jmp_insn;
4450 save_state(dc, cpu_cond);
4451 switch (xop) {
4452 case 0x20: /* load fpreg */
4453 gen_address_mask(dc, cpu_addr);
4454 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4455 break;
4456 case 0x21: /* ldfsr, V9 ldxfsr */
4457 #ifdef TARGET_SPARC64
4458 gen_address_mask(dc, cpu_addr);
4459 if (rd == 1) {
4460 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4461 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4462 } else
4463 #else
4465 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4466 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4468 #endif
4469 break;
4470 case 0x22: /* load quad fpreg */
4472 TCGv r_const;
4474 CHECK_FPU_FEATURE(dc, FLOAT128);
4475 r_const = tcg_const_i32(dc->mem_idx);
4476 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4477 tcg_temp_free(r_const);
4478 gen_op_store_QT0_fpr(QFPREG(rd));
4480 break;
4481 case 0x23: /* load double fpreg */
4483 TCGv r_const;
4485 r_const = tcg_const_i32(dc->mem_idx);
4486 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4487 tcg_temp_free(r_const);
4488 gen_op_store_DT0_fpr(DFPREG(rd));
4490 break;
4491 default:
4492 goto illegal_insn;
4494 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4495 xop == 0xe || xop == 0x1e) {
4496 gen_movl_reg_TN(rd, cpu_val);
4497 switch (xop) {
4498 case 0x4: /* store word */
4499 gen_address_mask(dc, cpu_addr);
4500 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4501 break;
4502 case 0x5: /* store byte */
4503 gen_address_mask(dc, cpu_addr);
4504 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4505 break;
4506 case 0x6: /* store halfword */
4507 gen_address_mask(dc, cpu_addr);
4508 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4509 break;
4510 case 0x7: /* store double word */
4511 if (rd & 1)
4512 goto illegal_insn;
4513 else {
4514 TCGv r_low, r_const;
4516 save_state(dc, cpu_cond);
4517 gen_address_mask(dc, cpu_addr);
4518 r_const = tcg_const_i32(7);
4519 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4520 r_const); // XXX remove
4521 tcg_temp_free(r_const);
4522 r_low = tcg_temp_new(TCG_TYPE_TL);
4523 gen_movl_reg_TN(rd + 1, r_low);
4524 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4525 r_low);
4526 tcg_temp_free(r_low);
4527 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4529 break;
4530 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4531 case 0x14: /* store word alternate */
4532 #ifndef TARGET_SPARC64
4533 if (IS_IMM)
4534 goto illegal_insn;
4535 if (!supervisor(dc))
4536 goto priv_insn;
4537 #endif
4538 save_state(dc, cpu_cond);
4539 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4540 break;
4541 case 0x15: /* store byte alternate */
4542 #ifndef TARGET_SPARC64
4543 if (IS_IMM)
4544 goto illegal_insn;
4545 if (!supervisor(dc))
4546 goto priv_insn;
4547 #endif
4548 save_state(dc, cpu_cond);
4549 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4550 break;
4551 case 0x16: /* store halfword alternate */
4552 #ifndef TARGET_SPARC64
4553 if (IS_IMM)
4554 goto illegal_insn;
4555 if (!supervisor(dc))
4556 goto priv_insn;
4557 #endif
4558 save_state(dc, cpu_cond);
4559 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4560 break;
4561 case 0x17: /* store double word alternate */
4562 #ifndef TARGET_SPARC64
4563 if (IS_IMM)
4564 goto illegal_insn;
4565 if (!supervisor(dc))
4566 goto priv_insn;
4567 #endif
4568 if (rd & 1)
4569 goto illegal_insn;
4570 else {
4571 save_state(dc, cpu_cond);
4572 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4574 break;
4575 #endif
4576 #ifdef TARGET_SPARC64
4577 case 0x0e: /* V9 stx */
4578 gen_address_mask(dc, cpu_addr);
4579 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4580 break;
4581 case 0x1e: /* V9 stxa */
4582 save_state(dc, cpu_cond);
4583 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4584 break;
4585 #endif
4586 default:
4587 goto illegal_insn;
4589 } else if (xop > 0x23 && xop < 0x28) {
4590 if (gen_trap_ifnofpu(dc, cpu_cond))
4591 goto jmp_insn;
4592 save_state(dc, cpu_cond);
4593 switch (xop) {
4594 case 0x24: /* store fpreg */
4595 gen_address_mask(dc, cpu_addr);
4596 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4597 break;
4598 case 0x25: /* stfsr, V9 stxfsr */
4599 #ifdef TARGET_SPARC64
4600 gen_address_mask(dc, cpu_addr);
4601 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4602 if (rd == 1)
4603 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4604 else {
4605 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4606 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4608 #else
4609 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4610 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4611 #endif
4612 break;
4613 case 0x26:
4614 #ifdef TARGET_SPARC64
4615 /* V9 stqf, store quad fpreg */
4617 TCGv r_const;
4619 CHECK_FPU_FEATURE(dc, FLOAT128);
4620 gen_op_load_fpr_QT0(QFPREG(rd));
4621 r_const = tcg_const_i32(dc->mem_idx);
4622 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4623 tcg_temp_free(r_const);
4625 break;
4626 #else /* !TARGET_SPARC64 */
4627 /* stdfq, store floating point queue */
4628 #if defined(CONFIG_USER_ONLY)
4629 goto illegal_insn;
4630 #else
4631 if (!supervisor(dc))
4632 goto priv_insn;
4633 if (gen_trap_ifnofpu(dc, cpu_cond))
4634 goto jmp_insn;
4635 goto nfq_insn;
4636 #endif
4637 #endif
4638 case 0x27: /* store double fpreg */
4640 TCGv r_const;
4642 gen_op_load_fpr_DT0(DFPREG(rd));
4643 r_const = tcg_const_i32(dc->mem_idx);
4644 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4645 tcg_temp_free(r_const);
4647 break;
4648 default:
4649 goto illegal_insn;
4651 } else if (xop > 0x33 && xop < 0x3f) {
4652 save_state(dc, cpu_cond);
4653 switch (xop) {
4654 #ifdef TARGET_SPARC64
4655 case 0x34: /* V9 stfa */
4656 gen_stf_asi(cpu_addr, insn, 4, rd);
4657 break;
4658 case 0x36: /* V9 stqfa */
4660 TCGv r_const;
4662 CHECK_FPU_FEATURE(dc, FLOAT128);
4663 r_const = tcg_const_i32(7);
4664 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4665 r_const);
4666 tcg_temp_free(r_const);
4667 gen_op_load_fpr_QT0(QFPREG(rd));
4668 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4670 break;
4671 case 0x37: /* V9 stdfa */
4672 gen_op_load_fpr_DT0(DFPREG(rd));
4673 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4674 break;
4675 case 0x3c: /* V9 casa */
4676 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4677 gen_movl_TN_reg(rd, cpu_val);
4678 break;
4679 case 0x3e: /* V9 casxa */
4680 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4681 gen_movl_TN_reg(rd, cpu_val);
4682 break;
4683 #else
4684 case 0x34: /* stc */
4685 case 0x35: /* stcsr */
4686 case 0x36: /* stdcq */
4687 case 0x37: /* stdc */
4688 goto ncp_insn;
4689 #endif
4690 default:
4691 goto illegal_insn;
4694 else
4695 goto illegal_insn;
4697 break;
4699 /* default case for non jump instructions */
4700 if (dc->npc == DYNAMIC_PC) {
4701 dc->pc = DYNAMIC_PC;
4702 gen_op_next_insn();
4703 } else if (dc->npc == JUMP_PC) {
4704 /* we can do a static jump */
4705 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4706 dc->is_br = 1;
4707 } else {
4708 dc->pc = dc->npc;
4709 dc->npc = dc->npc + 4;
4711 jmp_insn:
4712 return;
4713 illegal_insn:
4715 TCGv r_const;
4717 save_state(dc, cpu_cond);
4718 r_const = tcg_const_i32(TT_ILL_INSN);
4719 tcg_gen_helper_0_1(raise_exception, r_const);
4720 tcg_temp_free(r_const);
4721 dc->is_br = 1;
4723 return;
4724 unimp_flush:
4726 TCGv r_const;
4728 save_state(dc, cpu_cond);
4729 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4730 tcg_gen_helper_0_1(raise_exception, r_const);
4731 tcg_temp_free(r_const);
4732 dc->is_br = 1;
4734 return;
4735 #if !defined(CONFIG_USER_ONLY)
4736 priv_insn:
4738 TCGv r_const;
4740 save_state(dc, cpu_cond);
4741 r_const = tcg_const_i32(TT_PRIV_INSN);
4742 tcg_gen_helper_0_1(raise_exception, r_const);
4743 tcg_temp_free(r_const);
4744 dc->is_br = 1;
4746 return;
4747 #endif
4748 nfpu_insn:
4749 save_state(dc, cpu_cond);
4750 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4751 dc->is_br = 1;
4752 return;
4753 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4754 nfq_insn:
4755 save_state(dc, cpu_cond);
4756 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4757 dc->is_br = 1;
4758 return;
4759 #endif
4760 #ifndef TARGET_SPARC64
4761 ncp_insn:
4763 TCGv r_const;
4765 save_state(dc, cpu_cond);
4766 r_const = tcg_const_i32(TT_NCP_INSN);
4767 tcg_gen_helper_0_1(raise_exception, r_const);
4768 tcg_temp_free(r_const);
4769 dc->is_br = 1;
4771 return;
4772 #endif
4775 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4776 int spc, CPUSPARCState *env)
4778 target_ulong pc_start, last_pc;
4779 uint16_t *gen_opc_end;
4780 DisasContext dc1, *dc = &dc1;
4781 int j, lj = -1;
4782 int num_insns;
4783 int max_insns;
4785 memset(dc, 0, sizeof(DisasContext));
4786 dc->tb = tb;
4787 pc_start = tb->pc;
4788 dc->pc = pc_start;
4789 last_pc = dc->pc;
4790 dc->npc = (target_ulong) tb->cs_base;
4791 dc->mem_idx = cpu_mmu_index(env);
4792 dc->def = env->def;
4793 if ((dc->def->features & CPU_FEATURE_FLOAT))
4794 dc->fpu_enabled = cpu_fpu_enabled(env);
4795 else
4796 dc->fpu_enabled = 0;
4797 #ifdef TARGET_SPARC64
4798 dc->address_mask_32bit = env->pstate & PS_AM;
4799 #endif
4800 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4802 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4803 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4804 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4806 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4808 // loads and stores
4809 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4810 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4812 num_insns = 0;
4813 max_insns = tb->cflags & CF_COUNT_MASK;
4814 if (max_insns == 0)
4815 max_insns = CF_COUNT_MASK;
4816 gen_icount_start();
4817 do {
4818 if (env->nb_breakpoints > 0) {
4819 for(j = 0; j < env->nb_breakpoints; j++) {
4820 if (env->breakpoints[j] == dc->pc) {
4821 if (dc->pc != pc_start)
4822 save_state(dc, cpu_cond);
4823 tcg_gen_helper_0_0(helper_debug);
4824 tcg_gen_exit_tb(0);
4825 dc->is_br = 1;
4826 goto exit_gen_loop;
4830 if (spc) {
4831 if (loglevel > 0)
4832 fprintf(logfile, "Search PC...\n");
4833 j = gen_opc_ptr - gen_opc_buf;
4834 if (lj < j) {
4835 lj++;
4836 while (lj < j)
4837 gen_opc_instr_start[lj++] = 0;
4838 gen_opc_pc[lj] = dc->pc;
4839 gen_opc_npc[lj] = dc->npc;
4840 gen_opc_instr_start[lj] = 1;
4841 gen_opc_icount[lj] = num_insns;
4844 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4845 gen_io_start();
4846 last_pc = dc->pc;
4847 disas_sparc_insn(dc);
4848 num_insns++;
4850 if (dc->is_br)
4851 break;
4852 /* if the next PC is different, we abort now */
4853 if (dc->pc != (last_pc + 4))
4854 break;
4855 /* if we reach a page boundary, we stop generation so that the
4856 PC of a TT_TFAULT exception is always in the right page */
4857 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4858 break;
4859 /* if single step mode, we generate only one instruction and
4860 generate an exception */
4861 if (env->singlestep_enabled) {
4862 tcg_gen_movi_tl(cpu_pc, dc->pc);
4863 tcg_gen_exit_tb(0);
4864 break;
4866 } while ((gen_opc_ptr < gen_opc_end) &&
4867 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4868 num_insns < max_insns);
4870 exit_gen_loop:
4871 tcg_temp_free(cpu_addr);
4872 tcg_temp_free(cpu_val);
4873 tcg_temp_free(cpu_dst);
4874 tcg_temp_free(cpu_tmp64);
4875 tcg_temp_free(cpu_tmp32);
4876 tcg_temp_free(cpu_tmp0);
4877 if (tb->cflags & CF_LAST_IO)
4878 gen_io_end();
4879 if (!dc->is_br) {
4880 if (dc->pc != DYNAMIC_PC &&
4881 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4882 /* static PC and NPC: we can use direct chaining */
4883 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4884 } else {
4885 if (dc->pc != DYNAMIC_PC)
4886 tcg_gen_movi_tl(cpu_pc, dc->pc);
4887 save_npc(dc, cpu_cond);
4888 tcg_gen_exit_tb(0);
4891 gen_icount_end(tb, num_insns);
4892 *gen_opc_ptr = INDEX_op_end;
4893 if (spc) {
4894 j = gen_opc_ptr - gen_opc_buf;
4895 lj++;
4896 while (lj <= j)
4897 gen_opc_instr_start[lj++] = 0;
4898 #if 0
4899 if (loglevel > 0) {
4900 page_dump(logfile);
4902 #endif
4903 gen_opc_jump_pc[0] = dc->jump_pc[0];
4904 gen_opc_jump_pc[1] = dc->jump_pc[1];
4905 } else {
4906 tb->size = last_pc + 4 - pc_start;
4907 tb->icount = num_insns;
4909 #ifdef DEBUG_DISAS
4910 if (loglevel & CPU_LOG_TB_IN_ASM) {
4911 fprintf(logfile, "--------------\n");
4912 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4913 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4914 fprintf(logfile, "\n");
4916 #endif
4919 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4921 gen_intermediate_code_internal(tb, 0, env);
4924 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4926 gen_intermediate_code_internal(tb, 1, env);
4929 void gen_intermediate_code_init(CPUSPARCState *env)
4931 unsigned int i;
4932 static int inited;
4933 static const char * const gregnames[8] = {
4934 NULL, // g0 not used
4935 "g1",
4936 "g2",
4937 "g3",
4938 "g4",
4939 "g5",
4940 "g6",
4941 "g7",
4943 static const char * const fregnames[64] = {
4944 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4945 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4946 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4947 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4948 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4949 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4950 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4951 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4954 /* init various static tables */
4955 if (!inited) {
4956 inited = 1;
4958 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4959 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4960 offsetof(CPUState, regwptr),
4961 "regwptr");
4962 #ifdef TARGET_SPARC64
4963 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4964 TCG_AREG0, offsetof(CPUState, xcc),
4965 "xcc");
4966 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4967 TCG_AREG0, offsetof(CPUState, asi),
4968 "asi");
4969 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4970 TCG_AREG0, offsetof(CPUState, fprs),
4971 "fprs");
4972 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4973 TCG_AREG0, offsetof(CPUState, gsr),
4974 "gsr");
4975 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4976 TCG_AREG0,
4977 offsetof(CPUState, tick_cmpr),
4978 "tick_cmpr");
4979 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4980 TCG_AREG0,
4981 offsetof(CPUState, stick_cmpr),
4982 "stick_cmpr");
4983 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4984 TCG_AREG0,
4985 offsetof(CPUState, hstick_cmpr),
4986 "hstick_cmpr");
4987 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4988 offsetof(CPUState, hintp),
4989 "hintp");
4990 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4991 offsetof(CPUState, htba),
4992 "htba");
4993 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4994 offsetof(CPUState, hver),
4995 "hver");
4996 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4997 offsetof(CPUState, ssr), "ssr");
4998 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4999 offsetof(CPUState, version), "ver");
5000 #else
5001 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
5002 TCG_AREG0, offsetof(CPUState, wim),
5003 "wim");
5004 #endif
5005 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
5006 TCG_AREG0, offsetof(CPUState, cond),
5007 "cond");
5008 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
5009 TCG_AREG0, offsetof(CPUState, cc_src),
5010 "cc_src");
5011 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5012 offsetof(CPUState, cc_src2),
5013 "cc_src2");
5014 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
5015 TCG_AREG0, offsetof(CPUState, cc_dst),
5016 "cc_dst");
5017 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
5018 TCG_AREG0, offsetof(CPUState, psr),
5019 "psr");
5020 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
5021 TCG_AREG0, offsetof(CPUState, fsr),
5022 "fsr");
5023 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
5024 TCG_AREG0, offsetof(CPUState, pc),
5025 "pc");
5026 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
5027 TCG_AREG0, offsetof(CPUState, npc),
5028 "npc");
5029 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
5030 TCG_AREG0, offsetof(CPUState, y), "y");
5031 #ifndef CONFIG_USER_ONLY
5032 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
5033 TCG_AREG0, offsetof(CPUState, tbr),
5034 "tbr");
5035 #endif
5036 for (i = 1; i < 8; i++)
5037 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5038 offsetof(CPUState, gregs[i]),
5039 gregnames[i]);
5040 for (i = 0; i < TARGET_FPREGS; i++)
5041 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5042 offsetof(CPUState, fpr[i]),
5043 fregnames[i]);
5045 /* register helpers */
5047 #undef DEF_HELPER
5048 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5049 #include "helper.h"
5053 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5054 unsigned long searched_pc, int pc_pos, void *puc)
5056 target_ulong npc;
5057 env->pc = gen_opc_pc[pc_pos];
5058 npc = gen_opc_npc[pc_pos];
5059 if (npc == 1) {
5060 /* dynamic NPC: already stored */
5061 } else if (npc == 2) {
5062 target_ulong t2 = (target_ulong)(unsigned long)puc;
5063 /* jump PC: use T2 and the jump targets of the translation */
5064 if (t2)
5065 env->npc = gen_opc_jump_pc[0];
5066 else
5067 env->npc = gen_opc_jump_pc[1];
5068 } else {
5069 env->npc = npc;