Use the new concat_i32_i64 op for std and stda
[qemu/mini2440.git] / target-sparc / translate.c
blobbe00054ae25e4230ad5e65754d23300f792c905e
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define DEBUG_DISAS
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
96 static int sign_extend(int x, int len)
98 len = 32 - len;
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
108 offsetof(CPU_DoubleU, l.upper));
109 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
110 offsetof(CPU_DoubleU, l.lower));
113 static void gen_op_load_fpr_DT1(unsigned int src)
115 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
116 offsetof(CPU_DoubleU, l.upper));
117 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
118 offsetof(CPU_DoubleU, l.lower));
121 static void gen_op_store_DT0_fpr(unsigned int dst)
123 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.upper));
125 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
126 offsetof(CPU_DoubleU, l.lower));
129 static void gen_op_load_fpr_QT0(unsigned int src)
131 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
132 offsetof(CPU_QuadU, l.upmost));
133 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
134 offsetof(CPU_QuadU, l.upper));
135 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
136 offsetof(CPU_QuadU, l.lower));
137 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
138 offsetof(CPU_QuadU, l.lowest));
141 static void gen_op_load_fpr_QT1(unsigned int src)
143 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
144 offsetof(CPU_QuadU, l.upmost));
145 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
146 offsetof(CPU_QuadU, l.upper));
147 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
148 offsetof(CPU_QuadU, l.lower));
149 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
150 offsetof(CPU_QuadU, l.lowest));
153 static void gen_op_store_QT0_fpr(unsigned int dst)
155 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.upmost));
157 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
160 offsetof(CPU_QuadU, l.lower));
161 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
162 offsetof(CPU_QuadU, l.lowest));
165 /* moves */
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
170 #endif
171 #else
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
175 #else
176 #endif
177 #endif
179 #ifdef TARGET_SPARC64
180 #ifndef TARGET_ABI32
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
182 #else
183 #define AM_CHECK(dc) (1)
184 #endif
185 #endif
187 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
189 #ifdef TARGET_SPARC64
190 if (AM_CHECK(dc))
191 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
192 #endif
195 static inline void gen_movl_reg_TN(int reg, TCGv tn)
197 if (reg == 0)
198 tcg_gen_movi_tl(tn, 0);
199 else if (reg < 8)
200 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
201 else {
202 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
206 static inline void gen_movl_TN_reg(int reg, TCGv tn)
208 if (reg == 0)
209 return;
210 else if (reg < 8)
211 tcg_gen_mov_tl(cpu_gregs[reg], tn);
212 else {
213 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_goto_tb(DisasContext *s, int tb_num,
218 target_ulong pc, target_ulong npc)
220 TranslationBlock *tb;
222 tb = s->tb;
223 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
224 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num);
227 tcg_gen_movi_tl(cpu_pc, pc);
228 tcg_gen_movi_tl(cpu_npc, npc);
229 tcg_gen_exit_tb((long)tb + tb_num);
230 } else {
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc, pc);
233 tcg_gen_movi_tl(cpu_npc, npc);
234 tcg_gen_exit_tb(0);
238 // XXX suboptimal
239 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
241 tcg_gen_extu_i32_tl(reg, src);
242 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
243 tcg_gen_andi_tl(reg, reg, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
248 tcg_gen_extu_i32_tl(reg, src);
249 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
250 tcg_gen_andi_tl(reg, reg, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc, 0);
277 #endif
279 /* old op:
280 if (!T0)
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
283 env->psr |= PSR_NEG;
285 static inline void gen_cc_NZ_icc(TCGv dst)
287 TCGv r_temp;
288 int l1, l2;
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 r_temp = tcg_temp_new(TCG_TYPE_TL);
293 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
294 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
295 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
296 gen_set_label(l1);
297 tcg_gen_ext_i32_tl(r_temp, dst);
298 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
299 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
300 gen_set_label(l2);
301 tcg_temp_free(r_temp);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst)
307 int l1, l2;
309 l1 = gen_new_label();
310 l2 = gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
312 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
313 gen_set_label(l1);
314 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
315 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
316 gen_set_label(l2);
318 #endif
320 /* old op:
321 if (T0 < src1)
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
326 TCGv r_temp1, r_temp2;
327 int l1;
329 l1 = gen_new_label();
330 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
331 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
332 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
333 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
334 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
335 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
336 gen_set_label(l1);
337 tcg_temp_free(r_temp1);
338 tcg_temp_free(r_temp2);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
344 int l1;
346 l1 = gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
348 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
349 gen_set_label(l1);
351 #endif
353 /* old op:
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
355 env->psr |= PSR_OVF;
357 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
359 TCGv r_temp;
361 r_temp = tcg_temp_new(TCG_TYPE_TL);
362 tcg_gen_xor_tl(r_temp, src1, src2);
363 tcg_gen_xori_tl(r_temp, r_temp, -1);
364 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
365 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
366 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
368 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
369 tcg_temp_free(r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
376 TCGv r_temp;
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_temp_free(r_temp);
387 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
389 #endif
391 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
393 TCGv r_temp, r_const;
394 int l1;
396 l1 = gen_new_label();
398 r_temp = tcg_temp_new(TCG_TYPE_TL);
399 tcg_gen_xor_tl(r_temp, src1, src2);
400 tcg_gen_xori_tl(r_temp, r_temp, -1);
401 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
402 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
403 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
405 r_const = tcg_const_i32(TT_TOVF);
406 tcg_gen_helper_0_1(raise_exception, r_const);
407 tcg_temp_free(r_const);
408 gen_set_label(l1);
409 tcg_temp_free(r_temp);
412 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
414 int l1;
416 l1 = gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0, src1, src2);
418 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
421 gen_set_label(l1);
424 static inline void gen_tag_tv(TCGv src1, TCGv src2)
426 int l1;
427 TCGv r_const;
429 l1 = gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0, src1, src2);
431 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
433 r_const = tcg_const_i32(TT_TOVF);
434 tcg_gen_helper_0_1(raise_exception, r_const);
435 tcg_temp_free(r_const);
436 gen_set_label(l1);
439 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
441 tcg_gen_mov_tl(cpu_cc_src, src1);
442 tcg_gen_mov_tl(cpu_cc_src2, src2);
443 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
444 gen_cc_clear_icc();
445 gen_cc_NZ_icc(cpu_cc_dst);
446 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
447 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 #ifdef TARGET_SPARC64
449 gen_cc_clear_xcc();
450 gen_cc_NZ_xcc(cpu_cc_dst);
451 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
452 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453 #endif
454 tcg_gen_mov_tl(dst, cpu_cc_dst);
457 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
459 tcg_gen_mov_tl(cpu_cc_src, src1);
460 tcg_gen_mov_tl(cpu_cc_src2, src2);
461 gen_mov_reg_C(cpu_tmp0, cpu_psr);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
463 gen_cc_clear_icc();
464 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
465 #ifdef TARGET_SPARC64
466 gen_cc_clear_xcc();
467 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
468 #endif
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
470 gen_cc_NZ_icc(cpu_cc_dst);
471 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst);
475 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
476 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 #endif
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
483 tcg_gen_mov_tl(cpu_cc_src, src1);
484 tcg_gen_mov_tl(cpu_cc_src2, src2);
485 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
486 gen_cc_clear_icc();
487 gen_cc_NZ_icc(cpu_cc_dst);
488 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
489 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
491 #ifdef TARGET_SPARC64
492 gen_cc_clear_xcc();
493 gen_cc_NZ_xcc(cpu_cc_dst);
494 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
495 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 #endif
497 tcg_gen_mov_tl(dst, cpu_cc_dst);
500 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
502 tcg_gen_mov_tl(cpu_cc_src, src1);
503 tcg_gen_mov_tl(cpu_cc_src2, src2);
504 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
505 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 gen_cc_clear_icc();
508 gen_cc_NZ_icc(cpu_cc_dst);
509 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
510 #ifdef TARGET_SPARC64
511 gen_cc_clear_xcc();
512 gen_cc_NZ_xcc(cpu_cc_dst);
513 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
514 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
515 #endif
516 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 /* old op:
520 if (src1 < T1)
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
525 TCGv r_temp1, r_temp2;
526 int l1;
528 l1 = gen_new_label();
529 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
530 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
531 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
532 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
533 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
534 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
535 gen_set_label(l1);
536 tcg_temp_free(r_temp1);
537 tcg_temp_free(r_temp2);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
543 int l1;
545 l1 = gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
547 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
548 gen_set_label(l1);
550 #endif
552 /* old op:
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
554 env->psr |= PSR_OVF;
556 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
558 TCGv r_temp;
560 r_temp = tcg_temp_new(TCG_TYPE_TL);
561 tcg_gen_xor_tl(r_temp, src1, src2);
562 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
563 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
564 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
566 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
567 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
568 tcg_temp_free(r_temp);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
574 TCGv r_temp;
576 r_temp = tcg_temp_new(TCG_TYPE_TL);
577 tcg_gen_xor_tl(r_temp, src1, src2);
578 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
579 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
580 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
582 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
583 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
584 tcg_temp_free(r_temp);
586 #endif
588 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
590 TCGv r_temp, r_const;
591 int l1;
593 l1 = gen_new_label();
595 r_temp = tcg_temp_new(TCG_TYPE_TL);
596 tcg_gen_xor_tl(r_temp, src1, src2);
597 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
598 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
599 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
601 r_const = tcg_const_i32(TT_TOVF);
602 tcg_gen_helper_0_1(raise_exception, r_const);
603 tcg_temp_free(r_const);
604 gen_set_label(l1);
605 tcg_temp_free(r_temp);
608 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
610 tcg_gen_mov_tl(cpu_cc_src, src1);
611 tcg_gen_mov_tl(cpu_cc_src2, src2);
612 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
613 gen_cc_clear_icc();
614 gen_cc_NZ_icc(cpu_cc_dst);
615 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
616 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 #ifdef TARGET_SPARC64
618 gen_cc_clear_xcc();
619 gen_cc_NZ_xcc(cpu_cc_dst);
620 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
621 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
622 #endif
623 tcg_gen_mov_tl(dst, cpu_cc_dst);
626 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
628 tcg_gen_mov_tl(cpu_cc_src, src1);
629 tcg_gen_mov_tl(cpu_cc_src2, src2);
630 gen_mov_reg_C(cpu_tmp0, cpu_psr);
631 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
632 gen_cc_clear_icc();
633 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
634 #ifdef TARGET_SPARC64
635 gen_cc_clear_xcc();
636 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
637 #endif
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
639 gen_cc_NZ_icc(cpu_cc_dst);
640 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
641 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst);
644 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
645 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
646 #endif
647 tcg_gen_mov_tl(dst, cpu_cc_dst);
650 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
652 tcg_gen_mov_tl(cpu_cc_src, src1);
653 tcg_gen_mov_tl(cpu_cc_src2, src2);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
660 #ifdef TARGET_SPARC64
661 gen_cc_clear_xcc();
662 gen_cc_NZ_xcc(cpu_cc_dst);
663 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
664 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
665 #endif
666 tcg_gen_mov_tl(dst, cpu_cc_dst);
669 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
671 tcg_gen_mov_tl(cpu_cc_src, src1);
672 tcg_gen_mov_tl(cpu_cc_src2, src2);
673 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
674 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 gen_cc_clear_icc();
677 gen_cc_NZ_icc(cpu_cc_dst);
678 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
679 #ifdef TARGET_SPARC64
680 gen_cc_clear_xcc();
681 gen_cc_NZ_xcc(cpu_cc_dst);
682 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
683 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684 #endif
685 tcg_gen_mov_tl(dst, cpu_cc_dst);
688 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
690 TCGv r_temp;
691 int l1;
693 l1 = gen_new_label();
694 r_temp = tcg_temp_new(TCG_TYPE_TL);
696 /* old op:
697 if (!(env->y & 1))
698 T1 = 0;
700 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
701 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
704 tcg_gen_movi_tl(cpu_cc_src2, 0);
705 gen_set_label(l1);
707 // b2 = T0 & 1;
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
710 tcg_gen_shli_tl(r_temp, r_temp, 31);
711 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
712 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
714 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
716 // b1 = N ^ V;
717 gen_mov_reg_N(cpu_tmp0, cpu_psr);
718 gen_mov_reg_V(r_temp, cpu_psr);
719 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
720 tcg_temp_free(r_temp);
722 // T0 = (b1 << 31) | (T0 >> 1);
723 // src1 = T0;
724 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
725 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
726 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
731 gen_cc_clear_icc();
732 gen_cc_NZ_icc(cpu_cc_dst);
733 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
735 tcg_gen_mov_tl(dst, cpu_cc_dst);
738 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
740 TCGv r_temp, r_temp2;
742 r_temp = tcg_temp_new(TCG_TYPE_I64);
743 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
745 tcg_gen_extu_i32_i64(r_temp, src2);
746 tcg_gen_extu_i32_i64(r_temp2, src1);
747 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
749 tcg_gen_shri_i64(r_temp, r_temp2, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
751 tcg_temp_free(r_temp);
752 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst, r_temp2);
755 #else
756 tcg_gen_trunc_i64_tl(dst, r_temp2);
757 #endif
758 tcg_temp_free(r_temp2);
761 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
763 TCGv r_temp, r_temp2;
765 r_temp = tcg_temp_new(TCG_TYPE_I64);
766 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
768 tcg_gen_ext_i32_i64(r_temp, src2);
769 tcg_gen_ext_i32_i64(r_temp2, src1);
770 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
772 tcg_gen_shri_i64(r_temp, r_temp2, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
774 tcg_temp_free(r_temp);
775 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst, r_temp2);
778 #else
779 tcg_gen_trunc_i64_tl(dst, r_temp2);
780 #endif
781 tcg_temp_free(r_temp2);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
787 TCGv r_const;
788 int l1;
790 l1 = gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
792 r_const = tcg_const_i32(TT_DIV_ZERO);
793 tcg_gen_helper_0_1(raise_exception, r_const);
794 tcg_temp_free(r_const);
795 gen_set_label(l1);
798 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
800 int l1, l2;
802 l1 = gen_new_label();
803 l2 = gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src, src1);
805 tcg_gen_mov_tl(cpu_cc_src2, src2);
806 gen_trap_ifdivzero_tl(cpu_cc_src2);
807 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
808 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
809 tcg_gen_movi_i64(dst, INT64_MIN);
810 tcg_gen_br(l2);
811 gen_set_label(l1);
812 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
813 gen_set_label(l2);
815 #endif
817 static inline void gen_op_div_cc(TCGv dst)
819 int l1;
821 tcg_gen_mov_tl(cpu_cc_dst, dst);
822 gen_cc_clear_icc();
823 gen_cc_NZ_icc(cpu_cc_dst);
824 l1 = gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
826 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
827 gen_set_label(l1);
830 static inline void gen_op_logic_cc(TCGv dst)
832 tcg_gen_mov_tl(cpu_cc_dst, dst);
834 gen_cc_clear_icc();
835 gen_cc_NZ_icc(cpu_cc_dst);
836 #ifdef TARGET_SPARC64
837 gen_cc_clear_xcc();
838 gen_cc_NZ_xcc(cpu_cc_dst);
839 #endif
842 // 1
843 static inline void gen_op_eval_ba(TCGv dst)
845 tcg_gen_movi_tl(dst, 1);
848 // Z
849 static inline void gen_op_eval_be(TCGv dst, TCGv src)
851 gen_mov_reg_Z(dst, src);
854 // Z | (N ^ V)
855 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
857 gen_mov_reg_N(cpu_tmp0, src);
858 gen_mov_reg_V(dst, src);
859 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
860 gen_mov_reg_Z(cpu_tmp0, src);
861 tcg_gen_or_tl(dst, dst, cpu_tmp0);
864 // N ^ V
865 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
867 gen_mov_reg_V(cpu_tmp0, src);
868 gen_mov_reg_N(dst, src);
869 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
872 // C | Z
873 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
875 gen_mov_reg_Z(cpu_tmp0, src);
876 gen_mov_reg_C(dst, src);
877 tcg_gen_or_tl(dst, dst, cpu_tmp0);
880 // C
881 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
883 gen_mov_reg_C(dst, src);
886 // V
887 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
889 gen_mov_reg_V(dst, src);
892 // 0
893 static inline void gen_op_eval_bn(TCGv dst)
895 tcg_gen_movi_tl(dst, 0);
898 // N
899 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
901 gen_mov_reg_N(dst, src);
904 // !Z
905 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
907 gen_mov_reg_Z(dst, src);
908 tcg_gen_xori_tl(dst, dst, 0x1);
911 // !(Z | (N ^ V))
912 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
914 gen_mov_reg_N(cpu_tmp0, src);
915 gen_mov_reg_V(dst, src);
916 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
917 gen_mov_reg_Z(cpu_tmp0, src);
918 tcg_gen_or_tl(dst, dst, cpu_tmp0);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !(N ^ V)
923 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
925 gen_mov_reg_V(cpu_tmp0, src);
926 gen_mov_reg_N(dst, src);
927 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
928 tcg_gen_xori_tl(dst, dst, 0x1);
931 // !(C | Z)
932 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
934 gen_mov_reg_Z(cpu_tmp0, src);
935 gen_mov_reg_C(dst, src);
936 tcg_gen_or_tl(dst, dst, cpu_tmp0);
937 tcg_gen_xori_tl(dst, dst, 0x1);
940 // !C
941 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
943 gen_mov_reg_C(dst, src);
944 tcg_gen_xori_tl(dst, dst, 0x1);
947 // !N
948 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
950 gen_mov_reg_N(dst, src);
951 tcg_gen_xori_tl(dst, dst, 0x1);
954 // !V
955 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
957 gen_mov_reg_V(dst, src);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 FPSR bit field FCC1 | FCC0:
966 3 unordered
968 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
969 unsigned int fcc_offset)
971 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
972 tcg_gen_andi_tl(reg, reg, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
976 unsigned int fcc_offset)
978 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
979 tcg_gen_andi_tl(reg, reg, 0x1);
982 // !0: FCC0 | FCC1
983 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
986 gen_mov_reg_FCC0(dst, src, fcc_offset);
987 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
988 tcg_gen_or_tl(dst, dst, cpu_tmp0);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC0(dst, src, fcc_offset);
996 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
997 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1000 // 1 or 3: FCC0
1001 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1002 unsigned int fcc_offset)
1004 gen_mov_reg_FCC0(dst, src, fcc_offset);
1007 // 1: FCC0 & !FCC1
1008 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1014 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 // 2 or 3: FCC1
1018 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1019 unsigned int fcc_offset)
1021 gen_mov_reg_FCC1(dst, src, fcc_offset);
1024 // 2: !FCC0 & FCC1
1025 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1026 unsigned int fcc_offset)
1028 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 tcg_gen_xori_tl(dst, dst, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1031 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1034 // 3: FCC0 & FCC1
1035 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1036 unsigned int fcc_offset)
1038 gen_mov_reg_FCC0(dst, src, fcc_offset);
1039 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1040 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1045 unsigned int fcc_offset)
1047 gen_mov_reg_FCC0(dst, src, fcc_offset);
1048 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1049 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1055 unsigned int fcc_offset)
1057 gen_mov_reg_FCC0(dst, src, fcc_offset);
1058 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1059 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1060 tcg_gen_xori_tl(dst, dst, 0x1);
1063 // 0 or 2: !FCC0
1064 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1065 unsigned int fcc_offset)
1067 gen_mov_reg_FCC0(dst, src, fcc_offset);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1073 unsigned int fcc_offset)
1075 gen_mov_reg_FCC0(dst, src, fcc_offset);
1076 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1077 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1078 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1079 tcg_gen_xori_tl(dst, dst, 0x1);
1082 // 0 or 1: !FCC1
1083 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1084 unsigned int fcc_offset)
1086 gen_mov_reg_FCC1(dst, src, fcc_offset);
1087 tcg_gen_xori_tl(dst, dst, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1092 unsigned int fcc_offset)
1094 gen_mov_reg_FCC0(dst, src, fcc_offset);
1095 tcg_gen_xori_tl(dst, dst, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1097 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC0(dst, src, fcc_offset);
1106 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1107 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1111 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1112 target_ulong pc2, TCGv r_cond)
1114 int l1;
1116 l1 = gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1120 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1122 gen_set_label(l1);
1123 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1126 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1127 target_ulong pc2, TCGv r_cond)
1129 int l1;
1131 l1 = gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1135 gen_goto_tb(dc, 0, pc2, pc1);
1137 gen_set_label(l1);
1138 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1141 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1142 TCGv r_cond)
1144 int l1, l2;
1146 l1 = gen_new_label();
1147 l2 = gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1151 tcg_gen_movi_tl(cpu_npc, npc1);
1152 tcg_gen_br(l2);
1154 gen_set_label(l1);
1155 tcg_gen_movi_tl(cpu_npc, npc2);
1156 gen_set_label(l2);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext *dc, TCGv cond)
1163 if (dc->npc == JUMP_PC) {
1164 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1165 dc->npc = DYNAMIC_PC;
1169 static inline void save_npc(DisasContext *dc, TCGv cond)
1171 if (dc->npc == JUMP_PC) {
1172 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1173 dc->npc = DYNAMIC_PC;
1174 } else if (dc->npc != DYNAMIC_PC) {
1175 tcg_gen_movi_tl(cpu_npc, dc->npc);
1179 static inline void save_state(DisasContext *dc, TCGv cond)
1181 tcg_gen_movi_tl(cpu_pc, dc->pc);
1182 save_npc(dc, cond);
1185 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1187 if (dc->npc == JUMP_PC) {
1188 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1189 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1190 dc->pc = DYNAMIC_PC;
1191 } else if (dc->npc == DYNAMIC_PC) {
1192 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1193 dc->pc = DYNAMIC_PC;
1194 } else {
1195 dc->pc = dc->npc;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1202 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1205 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1207 TCGv r_src;
1209 #ifdef TARGET_SPARC64
1210 if (cc)
1211 r_src = cpu_xcc;
1212 else
1213 r_src = cpu_psr;
1214 #else
1215 r_src = cpu_psr;
1216 #endif
1217 switch (cond) {
1218 case 0x0:
1219 gen_op_eval_bn(r_dst);
1220 break;
1221 case 0x1:
1222 gen_op_eval_be(r_dst, r_src);
1223 break;
1224 case 0x2:
1225 gen_op_eval_ble(r_dst, r_src);
1226 break;
1227 case 0x3:
1228 gen_op_eval_bl(r_dst, r_src);
1229 break;
1230 case 0x4:
1231 gen_op_eval_bleu(r_dst, r_src);
1232 break;
1233 case 0x5:
1234 gen_op_eval_bcs(r_dst, r_src);
1235 break;
1236 case 0x6:
1237 gen_op_eval_bneg(r_dst, r_src);
1238 break;
1239 case 0x7:
1240 gen_op_eval_bvs(r_dst, r_src);
1241 break;
1242 case 0x8:
1243 gen_op_eval_ba(r_dst);
1244 break;
1245 case 0x9:
1246 gen_op_eval_bne(r_dst, r_src);
1247 break;
1248 case 0xa:
1249 gen_op_eval_bg(r_dst, r_src);
1250 break;
1251 case 0xb:
1252 gen_op_eval_bge(r_dst, r_src);
1253 break;
1254 case 0xc:
1255 gen_op_eval_bgu(r_dst, r_src);
1256 break;
1257 case 0xd:
1258 gen_op_eval_bcc(r_dst, r_src);
1259 break;
1260 case 0xe:
1261 gen_op_eval_bpos(r_dst, r_src);
1262 break;
1263 case 0xf:
1264 gen_op_eval_bvc(r_dst, r_src);
1265 break;
1269 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1271 unsigned int offset;
1273 switch (cc) {
1274 default:
1275 case 0x0:
1276 offset = 0;
1277 break;
1278 case 0x1:
1279 offset = 32 - 10;
1280 break;
1281 case 0x2:
1282 offset = 34 - 10;
1283 break;
1284 case 0x3:
1285 offset = 36 - 10;
1286 break;
1289 switch (cond) {
1290 case 0x0:
1291 gen_op_eval_bn(r_dst);
1292 break;
1293 case 0x1:
1294 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0x2:
1297 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0x3:
1300 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1301 break;
1302 case 0x4:
1303 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0x5:
1306 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0x6:
1309 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1310 break;
1311 case 0x7:
1312 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1313 break;
1314 case 0x8:
1315 gen_op_eval_ba(r_dst);
1316 break;
1317 case 0x9:
1318 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1319 break;
1320 case 0xa:
1321 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1322 break;
1323 case 0xb:
1324 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1325 break;
1326 case 0xc:
1327 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1328 break;
1329 case 0xd:
1330 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1331 break;
1332 case 0xe:
1333 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1334 break;
1335 case 0xf:
1336 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1337 break;
1341 #ifdef TARGET_SPARC64
1342 // Inverted logic
1343 static const int gen_tcg_cond_reg[8] = {
1345 TCG_COND_NE,
1346 TCG_COND_GT,
1347 TCG_COND_GE,
1349 TCG_COND_EQ,
1350 TCG_COND_LE,
1351 TCG_COND_LT,
1354 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1356 int l1;
1358 l1 = gen_new_label();
1359 tcg_gen_movi_tl(r_dst, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1361 tcg_gen_movi_tl(r_dst, 1);
1362 gen_set_label(l1);
1364 #endif
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1368 TCGv r_cond)
1370 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1371 target_ulong target = dc->pc + offset;
1373 if (cond == 0x0) {
1374 /* unconditional not taken */
1375 if (a) {
1376 dc->pc = dc->npc + 4;
1377 dc->npc = dc->pc + 4;
1378 } else {
1379 dc->pc = dc->npc;
1380 dc->npc = dc->pc + 4;
1382 } else if (cond == 0x8) {
1383 /* unconditional taken */
1384 if (a) {
1385 dc->pc = target;
1386 dc->npc = dc->pc + 4;
1387 } else {
1388 dc->pc = dc->npc;
1389 dc->npc = target;
1391 } else {
1392 flush_cond(dc, r_cond);
1393 gen_cond(r_cond, cc, cond);
1394 if (a) {
1395 gen_branch_a(dc, target, dc->npc, r_cond);
1396 dc->is_br = 1;
1397 } else {
1398 dc->pc = dc->npc;
1399 dc->jump_pc[0] = target;
1400 dc->jump_pc[1] = dc->npc + 4;
1401 dc->npc = JUMP_PC;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1408 TCGv r_cond)
1410 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1411 target_ulong target = dc->pc + offset;
1413 if (cond == 0x0) {
1414 /* unconditional not taken */
1415 if (a) {
1416 dc->pc = dc->npc + 4;
1417 dc->npc = dc->pc + 4;
1418 } else {
1419 dc->pc = dc->npc;
1420 dc->npc = dc->pc + 4;
1422 } else if (cond == 0x8) {
1423 /* unconditional taken */
1424 if (a) {
1425 dc->pc = target;
1426 dc->npc = dc->pc + 4;
1427 } else {
1428 dc->pc = dc->npc;
1429 dc->npc = target;
1431 } else {
1432 flush_cond(dc, r_cond);
1433 gen_fcond(r_cond, cc, cond);
1434 if (a) {
1435 gen_branch_a(dc, target, dc->npc, r_cond);
1436 dc->is_br = 1;
1437 } else {
1438 dc->pc = dc->npc;
1439 dc->jump_pc[0] = target;
1440 dc->jump_pc[1] = dc->npc + 4;
1441 dc->npc = JUMP_PC;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1449 TCGv r_cond, TCGv r_reg)
1451 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1452 target_ulong target = dc->pc + offset;
1454 flush_cond(dc, r_cond);
1455 gen_cond_reg(r_cond, cond, r_reg);
1456 if (a) {
1457 gen_branch_a(dc, target, dc->npc, r_cond);
1458 dc->is_br = 1;
1459 } else {
1460 dc->pc = dc->npc;
1461 dc->jump_pc[0] = target;
1462 dc->jump_pc[1] = dc->npc + 4;
1463 dc->npc = JUMP_PC;
1467 static GenOpFunc * const gen_fcmpd[4] = {
1468 helper_fcmpd,
1469 helper_fcmpd_fcc1,
1470 helper_fcmpd_fcc2,
1471 helper_fcmpd_fcc3,
1474 static GenOpFunc * const gen_fcmpq[4] = {
1475 helper_fcmpq,
1476 helper_fcmpq_fcc1,
1477 helper_fcmpq_fcc2,
1478 helper_fcmpq_fcc3,
1481 static GenOpFunc * const gen_fcmped[4] = {
1482 helper_fcmped,
1483 helper_fcmped_fcc1,
1484 helper_fcmped_fcc2,
1485 helper_fcmped_fcc3,
1488 static GenOpFunc * const gen_fcmpeq[4] = {
1489 helper_fcmpeq,
1490 helper_fcmpeq_fcc1,
1491 helper_fcmpeq_fcc2,
1492 helper_fcmpeq_fcc3,
1495 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1497 switch (fccno) {
1498 case 0:
1499 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1500 break;
1501 case 1:
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1503 break;
1504 case 2:
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1506 break;
1507 case 3:
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1509 break;
1513 static inline void gen_op_fcmpd(int fccno)
1515 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1518 static inline void gen_op_fcmpq(int fccno)
1520 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1523 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1525 switch (fccno) {
1526 case 0:
1527 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1528 break;
1529 case 1:
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1531 break;
1532 case 2:
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1534 break;
1535 case 3:
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1537 break;
1541 static inline void gen_op_fcmped(int fccno)
1543 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1546 static inline void gen_op_fcmpeq(int fccno)
1548 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1551 #else
1553 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1555 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1558 static inline void gen_op_fcmpd(int fccno)
1560 tcg_gen_helper_0_0(helper_fcmpd);
1563 static inline void gen_op_fcmpq(int fccno)
1565 tcg_gen_helper_0_0(helper_fcmpq);
1568 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1570 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1573 static inline void gen_op_fcmped(int fccno)
1575 tcg_gen_helper_0_0(helper_fcmped);
1578 static inline void gen_op_fcmpeq(int fccno)
1580 tcg_gen_helper_0_0(helper_fcmpeq);
1582 #endif
1584 static inline void gen_op_fpexception_im(int fsr_flags)
1586 TCGv r_const;
1588 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1589 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1590 r_const = tcg_const_i32(TT_FP_EXCP);
1591 tcg_gen_helper_0_1(raise_exception, r_const);
1592 tcg_temp_free(r_const);
1595 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc->fpu_enabled) {
1599 TCGv r_const;
1601 save_state(dc, r_cond);
1602 r_const = tcg_const_i32(TT_NFPU_INSN);
1603 tcg_gen_helper_0_1(raise_exception, r_const);
1604 tcg_temp_free(r_const);
1605 dc->is_br = 1;
1606 return 1;
1608 #endif
1609 return 0;
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1622 /* asi moves */
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1626 int asi;
1627 TCGv r_asi;
1629 if (IS_IMM) {
1630 r_asi = tcg_temp_new(TCG_TYPE_I32);
1631 tcg_gen_mov_i32(r_asi, cpu_asi);
1632 } else {
1633 asi = GET_FIELD(insn, 19, 26);
1634 r_asi = tcg_const_i32(asi);
1636 return r_asi;
1639 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1640 int sign)
1642 TCGv r_asi, r_size, r_sign;
1644 r_asi = gen_get_asi(insn, addr);
1645 r_size = tcg_const_i32(size);
1646 r_sign = tcg_const_i32(sign);
1647 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1648 tcg_temp_free(r_sign);
1649 tcg_temp_free(r_size);
1650 tcg_temp_free(r_asi);
1653 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1655 TCGv r_asi, r_size;
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1660 tcg_temp_free(r_size);
1661 tcg_temp_free(r_asi);
1664 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1666 TCGv r_asi, r_size, r_rd;
1668 r_asi = gen_get_asi(insn, addr);
1669 r_size = tcg_const_i32(size);
1670 r_rd = tcg_const_i32(rd);
1671 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1672 tcg_temp_free(r_rd);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1692 TCGv r_asi, r_size, r_sign;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(4);
1696 r_sign = tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1698 tcg_temp_free(r_sign);
1699 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1700 tcg_temp_free(r_size);
1701 tcg_temp_free(r_asi);
1702 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1705 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1707 TCGv r_asi, r_rd;
1709 r_asi = gen_get_asi(insn, addr);
1710 r_rd = tcg_const_i32(rd);
1711 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1712 tcg_temp_free(r_rd);
1713 tcg_temp_free(r_asi);
1716 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1718 TCGv r_low, r_asi, r_size;
1720 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1721 r_low = tcg_temp_new(TCG_TYPE_I32);
1722 tcg_gen_trunc_tl_i32(r_low, cpu_tmp0);
1723 tcg_gen_trunc_tl_i32(cpu_tmp32, hi);
1724 tcg_gen_concat_i32_i64(cpu_tmp64, r_low, cpu_tmp32);
1725 tcg_temp_free(r_low);
1726 r_asi = gen_get_asi(insn, addr);
1727 r_size = tcg_const_i32(8);
1728 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1729 tcg_temp_free(r_size);
1730 tcg_temp_free(r_asi);
1733 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1734 int rd)
1736 TCGv r_val1, r_asi;
1738 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1739 gen_movl_reg_TN(rd, r_val1);
1740 r_asi = gen_get_asi(insn, addr);
1741 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1742 tcg_temp_free(r_asi);
1743 tcg_temp_free(r_val1);
1746 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1747 int rd)
1749 TCGv r_asi;
1751 gen_movl_reg_TN(rd, cpu_tmp64);
1752 r_asi = gen_get_asi(insn, addr);
1753 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1754 tcg_temp_free(r_asi);
1757 #elif !defined(CONFIG_USER_ONLY)
1759 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1760 int sign)
1762 TCGv r_asi, r_size, r_sign;
1764 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1765 r_size = tcg_const_i32(size);
1766 r_sign = tcg_const_i32(sign);
1767 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1768 tcg_temp_free(r_sign);
1769 tcg_temp_free(r_size);
1770 tcg_temp_free(r_asi);
1771 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1774 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1776 TCGv r_asi, r_size;
1778 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1779 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1780 r_size = tcg_const_i32(size);
1781 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1782 tcg_temp_free(r_size);
1783 tcg_temp_free(r_asi);
1786 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1788 TCGv r_asi, r_size, r_sign;
1790 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1791 r_size = tcg_const_i32(4);
1792 r_sign = tcg_const_i32(0);
1793 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1794 tcg_temp_free(r_sign);
1795 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1796 tcg_temp_free(r_size);
1797 tcg_temp_free(r_asi);
1798 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1801 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1803 TCGv r_asi, r_size, r_sign;
1805 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1806 r_size = tcg_const_i32(8);
1807 r_sign = tcg_const_i32(0);
1808 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1809 tcg_temp_free(r_sign);
1810 tcg_temp_free(r_size);
1811 tcg_temp_free(r_asi);
1812 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1813 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1814 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1815 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1816 gen_movl_TN_reg(rd, hi);
1819 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1821 TCGv r_low, r_asi, r_size;
1823 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1824 r_low = tcg_temp_new(TCG_TYPE_I32);
1825 tcg_gen_trunc_tl_i32(r_low, cpu_tmp0);
1826 tcg_gen_trunc_tl_i32(cpu_tmp32, hi);
1827 tcg_gen_concat_i32_i64(cpu_tmp64, r_low, cpu_tmp32);
1828 tcg_temp_free(r_low);
1829 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1830 r_size = tcg_const_i32(8);
1831 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1832 tcg_temp_free(r_size);
1833 tcg_temp_free(r_asi);
1835 #endif
1837 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1838 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1840 TCGv r_val, r_asi, r_size;
1842 gen_ld_asi(dst, addr, insn, 1, 0);
1844 r_val = tcg_const_i64(0xffULL);
1845 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1846 r_size = tcg_const_i32(1);
1847 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1848 tcg_temp_free(r_size);
1849 tcg_temp_free(r_asi);
1850 tcg_temp_free(r_val);
1852 #endif
1854 static inline TCGv get_src1(unsigned int insn, TCGv def)
1856 TCGv r_rs1 = def;
1857 unsigned int rs1;
1859 rs1 = GET_FIELD(insn, 13, 17);
1860 if (rs1 == 0)
1861 r_rs1 = tcg_const_tl(0); // XXX how to free?
1862 else if (rs1 < 8)
1863 r_rs1 = cpu_gregs[rs1];
1864 else
1865 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1866 return r_rs1;
1869 static inline TCGv get_src2(unsigned int insn, TCGv def)
1871 TCGv r_rs2 = def;
1872 unsigned int rs2;
1874 if (IS_IMM) { /* immediate */
1875 rs2 = GET_FIELDs(insn, 19, 31);
1876 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1877 } else { /* register */
1878 rs2 = GET_FIELD(insn, 27, 31);
1879 if (rs2 == 0)
1880 r_rs2 = tcg_const_tl(0); // XXX how to free?
1881 else if (rs2 < 8)
1882 r_rs2 = cpu_gregs[rs2];
1883 else
1884 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1886 return r_rs2;
1889 #define CHECK_IU_FEATURE(dc, FEATURE) \
1890 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1891 goto illegal_insn;
1892 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1893 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1894 goto nfpu_insn;
1896 /* before an instruction, dc->pc must be static */
1897 static void disas_sparc_insn(DisasContext * dc)
1899 unsigned int insn, opc, rs1, rs2, rd;
1901 if (unlikely(loglevel & CPU_LOG_TB_OP))
1902 tcg_gen_debug_insn_start(dc->pc);
1903 insn = ldl_code(dc->pc);
1904 opc = GET_FIELD(insn, 0, 1);
1906 rd = GET_FIELD(insn, 2, 6);
1908 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1909 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1911 switch (opc) {
1912 case 0: /* branches/sethi */
1914 unsigned int xop = GET_FIELD(insn, 7, 9);
1915 int32_t target;
1916 switch (xop) {
1917 #ifdef TARGET_SPARC64
1918 case 0x1: /* V9 BPcc */
1920 int cc;
1922 target = GET_FIELD_SP(insn, 0, 18);
1923 target = sign_extend(target, 18);
1924 target <<= 2;
1925 cc = GET_FIELD_SP(insn, 20, 21);
1926 if (cc == 0)
1927 do_branch(dc, target, insn, 0, cpu_cond);
1928 else if (cc == 2)
1929 do_branch(dc, target, insn, 1, cpu_cond);
1930 else
1931 goto illegal_insn;
1932 goto jmp_insn;
1934 case 0x3: /* V9 BPr */
1936 target = GET_FIELD_SP(insn, 0, 13) |
1937 (GET_FIELD_SP(insn, 20, 21) << 14);
1938 target = sign_extend(target, 16);
1939 target <<= 2;
1940 cpu_src1 = get_src1(insn, cpu_src1);
1941 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1942 goto jmp_insn;
1944 case 0x5: /* V9 FBPcc */
1946 int cc = GET_FIELD_SP(insn, 20, 21);
1947 if (gen_trap_ifnofpu(dc, cpu_cond))
1948 goto jmp_insn;
1949 target = GET_FIELD_SP(insn, 0, 18);
1950 target = sign_extend(target, 19);
1951 target <<= 2;
1952 do_fbranch(dc, target, insn, cc, cpu_cond);
1953 goto jmp_insn;
1955 #else
1956 case 0x7: /* CBN+x */
1958 goto ncp_insn;
1960 #endif
1961 case 0x2: /* BN+x */
1963 target = GET_FIELD(insn, 10, 31);
1964 target = sign_extend(target, 22);
1965 target <<= 2;
1966 do_branch(dc, target, insn, 0, cpu_cond);
1967 goto jmp_insn;
1969 case 0x6: /* FBN+x */
1971 if (gen_trap_ifnofpu(dc, cpu_cond))
1972 goto jmp_insn;
1973 target = GET_FIELD(insn, 10, 31);
1974 target = sign_extend(target, 22);
1975 target <<= 2;
1976 do_fbranch(dc, target, insn, 0, cpu_cond);
1977 goto jmp_insn;
1979 case 0x4: /* SETHI */
1980 if (rd) { // nop
1981 uint32_t value = GET_FIELD(insn, 10, 31);
1982 TCGv r_const;
1984 r_const = tcg_const_tl(value << 10);
1985 gen_movl_TN_reg(rd, r_const);
1986 tcg_temp_free(r_const);
1988 break;
1989 case 0x0: /* UNIMPL */
1990 default:
1991 goto illegal_insn;
1993 break;
1995 break;
1996 case 1:
1997 /*CALL*/ {
1998 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1999 TCGv r_const;
2001 r_const = tcg_const_tl(dc->pc);
2002 gen_movl_TN_reg(15, r_const);
2003 tcg_temp_free(r_const);
2004 target += dc->pc;
2005 gen_mov_pc_npc(dc, cpu_cond);
2006 dc->npc = target;
2008 goto jmp_insn;
2009 case 2: /* FPU & Logical Operations */
2011 unsigned int xop = GET_FIELD(insn, 7, 12);
2012 if (xop == 0x3a) { /* generate trap */
2013 int cond;
2015 cpu_src1 = get_src1(insn, cpu_src1);
2016 if (IS_IMM) {
2017 rs2 = GET_FIELD(insn, 25, 31);
2018 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2019 } else {
2020 rs2 = GET_FIELD(insn, 27, 31);
2021 if (rs2 != 0) {
2022 gen_movl_reg_TN(rs2, cpu_src2);
2023 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2024 } else
2025 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2027 cond = GET_FIELD(insn, 3, 6);
2028 if (cond == 0x8) {
2029 save_state(dc, cpu_cond);
2030 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2031 } else if (cond != 0) {
2032 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2033 #ifdef TARGET_SPARC64
2034 /* V9 icc/xcc */
2035 int cc = GET_FIELD_SP(insn, 11, 12);
2037 save_state(dc, cpu_cond);
2038 if (cc == 0)
2039 gen_cond(r_cond, 0, cond);
2040 else if (cc == 2)
2041 gen_cond(r_cond, 1, cond);
2042 else
2043 goto illegal_insn;
2044 #else
2045 save_state(dc, cpu_cond);
2046 gen_cond(r_cond, 0, cond);
2047 #endif
2048 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2049 tcg_temp_free(r_cond);
2051 gen_op_next_insn();
2052 tcg_gen_exit_tb(0);
2053 dc->is_br = 1;
2054 goto jmp_insn;
2055 } else if (xop == 0x28) {
2056 rs1 = GET_FIELD(insn, 13, 17);
2057 switch(rs1) {
2058 case 0: /* rdy */
2059 #ifndef TARGET_SPARC64
2060 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2061 manual, rdy on the microSPARC
2062 II */
2063 case 0x0f: /* stbar in the SPARCv8 manual,
2064 rdy on the microSPARC II */
2065 case 0x10 ... 0x1f: /* implementation-dependent in the
2066 SPARCv8 manual, rdy on the
2067 microSPARC II */
2068 #endif
2069 gen_movl_TN_reg(rd, cpu_y);
2070 break;
2071 #ifdef TARGET_SPARC64
2072 case 0x2: /* V9 rdccr */
2073 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2074 gen_movl_TN_reg(rd, cpu_dst);
2075 break;
2076 case 0x3: /* V9 rdasi */
2077 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2078 gen_movl_TN_reg(rd, cpu_dst);
2079 break;
2080 case 0x4: /* V9 rdtick */
2082 TCGv r_tickptr;
2084 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2085 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2086 offsetof(CPUState, tick));
2087 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2088 r_tickptr);
2089 tcg_temp_free(r_tickptr);
2090 gen_movl_TN_reg(rd, cpu_dst);
2092 break;
2093 case 0x5: /* V9 rdpc */
2095 TCGv r_const;
2097 r_const = tcg_const_tl(dc->pc);
2098 gen_movl_TN_reg(rd, r_const);
2099 tcg_temp_free(r_const);
2101 break;
2102 case 0x6: /* V9 rdfprs */
2103 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2104 gen_movl_TN_reg(rd, cpu_dst);
2105 break;
2106 case 0xf: /* V9 membar */
2107 break; /* no effect */
2108 case 0x13: /* Graphics Status */
2109 if (gen_trap_ifnofpu(dc, cpu_cond))
2110 goto jmp_insn;
2111 gen_movl_TN_reg(rd, cpu_gsr);
2112 break;
2113 case 0x17: /* Tick compare */
2114 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2115 break;
2116 case 0x18: /* System tick */
2118 TCGv r_tickptr;
2120 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2121 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2122 offsetof(CPUState, stick));
2123 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2124 r_tickptr);
2125 tcg_temp_free(r_tickptr);
2126 gen_movl_TN_reg(rd, cpu_dst);
2128 break;
2129 case 0x19: /* System tick compare */
2130 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2131 break;
2132 case 0x10: /* Performance Control */
2133 case 0x11: /* Performance Instrumentation Counter */
2134 case 0x12: /* Dispatch Control */
2135 case 0x14: /* Softint set, WO */
2136 case 0x15: /* Softint clear, WO */
2137 case 0x16: /* Softint write */
2138 #endif
2139 default:
2140 goto illegal_insn;
2142 #if !defined(CONFIG_USER_ONLY)
2143 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2144 #ifndef TARGET_SPARC64
2145 if (!supervisor(dc))
2146 goto priv_insn;
2147 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2148 #else
2149 CHECK_IU_FEATURE(dc, HYPV);
2150 if (!hypervisor(dc))
2151 goto priv_insn;
2152 rs1 = GET_FIELD(insn, 13, 17);
2153 switch (rs1) {
2154 case 0: // hpstate
2155 // gen_op_rdhpstate();
2156 break;
2157 case 1: // htstate
2158 // gen_op_rdhtstate();
2159 break;
2160 case 3: // hintp
2161 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2162 break;
2163 case 5: // htba
2164 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2165 break;
2166 case 6: // hver
2167 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2168 break;
2169 case 31: // hstick_cmpr
2170 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2171 break;
2172 default:
2173 goto illegal_insn;
2175 #endif
2176 gen_movl_TN_reg(rd, cpu_dst);
2177 break;
2178 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2179 if (!supervisor(dc))
2180 goto priv_insn;
2181 #ifdef TARGET_SPARC64
2182 rs1 = GET_FIELD(insn, 13, 17);
2183 switch (rs1) {
2184 case 0: // tpc
2186 TCGv r_tsptr;
2188 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2189 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2190 offsetof(CPUState, tsptr));
2191 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2192 offsetof(trap_state, tpc));
2193 tcg_temp_free(r_tsptr);
2195 break;
2196 case 1: // tnpc
2198 TCGv r_tsptr;
2200 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2201 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2202 offsetof(CPUState, tsptr));
2203 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2204 offsetof(trap_state, tnpc));
2205 tcg_temp_free(r_tsptr);
2207 break;
2208 case 2: // tstate
2210 TCGv r_tsptr;
2212 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2213 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2214 offsetof(CPUState, tsptr));
2215 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2216 offsetof(trap_state, tstate));
2217 tcg_temp_free(r_tsptr);
2219 break;
2220 case 3: // tt
2222 TCGv r_tsptr;
2224 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2225 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2226 offsetof(CPUState, tsptr));
2227 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2228 offsetof(trap_state, tt));
2229 tcg_temp_free(r_tsptr);
2231 break;
2232 case 4: // tick
2234 TCGv r_tickptr;
2236 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2237 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2238 offsetof(CPUState, tick));
2239 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2240 r_tickptr);
2241 gen_movl_TN_reg(rd, cpu_tmp0);
2242 tcg_temp_free(r_tickptr);
2244 break;
2245 case 5: // tba
2246 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2247 break;
2248 case 6: // pstate
2249 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2250 offsetof(CPUSPARCState, pstate));
2251 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2252 break;
2253 case 7: // tl
2254 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2255 offsetof(CPUSPARCState, tl));
2256 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2257 break;
2258 case 8: // pil
2259 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2260 offsetof(CPUSPARCState, psrpil));
2261 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2262 break;
2263 case 9: // cwp
2264 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2265 break;
2266 case 10: // cansave
2267 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268 offsetof(CPUSPARCState, cansave));
2269 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2270 break;
2271 case 11: // canrestore
2272 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273 offsetof(CPUSPARCState, canrestore));
2274 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2275 break;
2276 case 12: // cleanwin
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, cleanwin));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2280 break;
2281 case 13: // otherwin
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, otherwin));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2285 break;
2286 case 14: // wstate
2287 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2288 offsetof(CPUSPARCState, wstate));
2289 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2290 break;
2291 case 16: // UA2005 gl
2292 CHECK_IU_FEATURE(dc, GL);
2293 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2294 offsetof(CPUSPARCState, gl));
2295 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2296 break;
2297 case 26: // UA2005 strand status
2298 CHECK_IU_FEATURE(dc, HYPV);
2299 if (!hypervisor(dc))
2300 goto priv_insn;
2301 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2302 break;
2303 case 31: // ver
2304 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2305 break;
2306 case 15: // fq
2307 default:
2308 goto illegal_insn;
2310 #else
2311 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2312 #endif
2313 gen_movl_TN_reg(rd, cpu_tmp0);
2314 break;
2315 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2316 #ifdef TARGET_SPARC64
2317 save_state(dc, cpu_cond);
2318 tcg_gen_helper_0_0(helper_flushw);
2319 #else
2320 if (!supervisor(dc))
2321 goto priv_insn;
2322 gen_movl_TN_reg(rd, cpu_tbr);
2323 #endif
2324 break;
2325 #endif
2326 } else if (xop == 0x34) { /* FPU Operations */
2327 if (gen_trap_ifnofpu(dc, cpu_cond))
2328 goto jmp_insn;
2329 gen_op_clear_ieee_excp_and_FTT();
2330 rs1 = GET_FIELD(insn, 13, 17);
2331 rs2 = GET_FIELD(insn, 27, 31);
2332 xop = GET_FIELD(insn, 18, 26);
2333 switch (xop) {
2334 case 0x1: /* fmovs */
2335 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2336 break;
2337 case 0x5: /* fnegs */
2338 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2339 cpu_fpr[rs2]);
2340 break;
2341 case 0x9: /* fabss */
2342 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2343 cpu_fpr[rs2]);
2344 break;
2345 case 0x29: /* fsqrts */
2346 CHECK_FPU_FEATURE(dc, FSQRT);
2347 gen_clear_float_exceptions();
2348 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2349 cpu_fpr[rs2]);
2350 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2351 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2352 break;
2353 case 0x2a: /* fsqrtd */
2354 CHECK_FPU_FEATURE(dc, FSQRT);
2355 gen_op_load_fpr_DT1(DFPREG(rs2));
2356 gen_clear_float_exceptions();
2357 tcg_gen_helper_0_0(helper_fsqrtd);
2358 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2359 gen_op_store_DT0_fpr(DFPREG(rd));
2360 break;
2361 case 0x2b: /* fsqrtq */
2362 CHECK_FPU_FEATURE(dc, FLOAT128);
2363 gen_op_load_fpr_QT1(QFPREG(rs2));
2364 gen_clear_float_exceptions();
2365 tcg_gen_helper_0_0(helper_fsqrtq);
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2367 gen_op_store_QT0_fpr(QFPREG(rd));
2368 break;
2369 case 0x41: /* fadds */
2370 gen_clear_float_exceptions();
2371 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2372 cpu_fpr[rs1], cpu_fpr[rs2]);
2373 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2374 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2375 break;
2376 case 0x42:
2377 gen_op_load_fpr_DT0(DFPREG(rs1));
2378 gen_op_load_fpr_DT1(DFPREG(rs2));
2379 gen_clear_float_exceptions();
2380 tcg_gen_helper_0_0(helper_faddd);
2381 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2382 gen_op_store_DT0_fpr(DFPREG(rd));
2383 break;
2384 case 0x43: /* faddq */
2385 CHECK_FPU_FEATURE(dc, FLOAT128);
2386 gen_op_load_fpr_QT0(QFPREG(rs1));
2387 gen_op_load_fpr_QT1(QFPREG(rs2));
2388 gen_clear_float_exceptions();
2389 tcg_gen_helper_0_0(helper_faddq);
2390 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2391 gen_op_store_QT0_fpr(QFPREG(rd));
2392 break;
2393 case 0x45: /* fsubs */
2394 gen_clear_float_exceptions();
2395 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2396 cpu_fpr[rs1], cpu_fpr[rs2]);
2397 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2398 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2399 break;
2400 case 0x46:
2401 gen_op_load_fpr_DT0(DFPREG(rs1));
2402 gen_op_load_fpr_DT1(DFPREG(rs2));
2403 gen_clear_float_exceptions();
2404 tcg_gen_helper_0_0(helper_fsubd);
2405 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2406 gen_op_store_DT0_fpr(DFPREG(rd));
2407 break;
2408 case 0x47: /* fsubq */
2409 CHECK_FPU_FEATURE(dc, FLOAT128);
2410 gen_op_load_fpr_QT0(QFPREG(rs1));
2411 gen_op_load_fpr_QT1(QFPREG(rs2));
2412 gen_clear_float_exceptions();
2413 tcg_gen_helper_0_0(helper_fsubq);
2414 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2415 gen_op_store_QT0_fpr(QFPREG(rd));
2416 break;
2417 case 0x49: /* fmuls */
2418 CHECK_FPU_FEATURE(dc, FMUL);
2419 gen_clear_float_exceptions();
2420 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2421 cpu_fpr[rs1], cpu_fpr[rs2]);
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2423 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2424 break;
2425 case 0x4a: /* fmuld */
2426 CHECK_FPU_FEATURE(dc, FMUL);
2427 gen_op_load_fpr_DT0(DFPREG(rs1));
2428 gen_op_load_fpr_DT1(DFPREG(rs2));
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_0_0(helper_fmuld);
2431 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2432 gen_op_store_DT0_fpr(DFPREG(rd));
2433 break;
2434 case 0x4b: /* fmulq */
2435 CHECK_FPU_FEATURE(dc, FLOAT128);
2436 CHECK_FPU_FEATURE(dc, FMUL);
2437 gen_op_load_fpr_QT0(QFPREG(rs1));
2438 gen_op_load_fpr_QT1(QFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmulq);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2442 gen_op_store_QT0_fpr(QFPREG(rd));
2443 break;
2444 case 0x4d: /* fdivs */
2445 gen_clear_float_exceptions();
2446 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2447 cpu_fpr[rs1], cpu_fpr[rs2]);
2448 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2449 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2450 break;
2451 case 0x4e:
2452 gen_op_load_fpr_DT0(DFPREG(rs1));
2453 gen_op_load_fpr_DT1(DFPREG(rs2));
2454 gen_clear_float_exceptions();
2455 tcg_gen_helper_0_0(helper_fdivd);
2456 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2457 gen_op_store_DT0_fpr(DFPREG(rd));
2458 break;
2459 case 0x4f: /* fdivq */
2460 CHECK_FPU_FEATURE(dc, FLOAT128);
2461 gen_op_load_fpr_QT0(QFPREG(rs1));
2462 gen_op_load_fpr_QT1(QFPREG(rs2));
2463 gen_clear_float_exceptions();
2464 tcg_gen_helper_0_0(helper_fdivq);
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2466 gen_op_store_QT0_fpr(QFPREG(rd));
2467 break;
2468 case 0x69: /* fsmuld */
2469 CHECK_FPU_FEATURE(dc, FSMULD);
2470 gen_clear_float_exceptions();
2471 tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
2472 cpu_fpr[rs2]);
2473 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2474 gen_op_store_DT0_fpr(DFPREG(rd));
2475 break;
2476 case 0x6e: /* fdmulq */
2477 CHECK_FPU_FEATURE(dc, FLOAT128);
2478 gen_op_load_fpr_DT0(DFPREG(rs1));
2479 gen_op_load_fpr_DT1(DFPREG(rs2));
2480 gen_clear_float_exceptions();
2481 tcg_gen_helper_0_0(helper_fdmulq);
2482 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2483 gen_op_store_QT0_fpr(QFPREG(rd));
2484 break;
2485 case 0xc4: /* fitos */
2486 gen_clear_float_exceptions();
2487 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2488 cpu_fpr[rs2]);
2489 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2490 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2491 break;
2492 case 0xc6: /* fdtos */
2493 gen_op_load_fpr_DT1(DFPREG(rs2));
2494 gen_clear_float_exceptions();
2495 tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
2496 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498 break;
2499 case 0xc7: /* fqtos */
2500 CHECK_FPU_FEATURE(dc, FLOAT128);
2501 gen_op_load_fpr_QT1(QFPREG(rs2));
2502 gen_clear_float_exceptions();
2503 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2504 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2505 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2506 break;
2507 case 0xc8: /* fitod */
2508 tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
2509 gen_op_store_DT0_fpr(DFPREG(rd));
2510 break;
2511 case 0xc9: /* fstod */
2512 tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
2513 gen_op_store_DT0_fpr(DFPREG(rd));
2514 break;
2515 case 0xcb: /* fqtod */
2516 CHECK_FPU_FEATURE(dc, FLOAT128);
2517 gen_op_load_fpr_QT1(QFPREG(rs2));
2518 gen_clear_float_exceptions();
2519 tcg_gen_helper_0_0(helper_fqtod);
2520 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 break;
2523 case 0xcc: /* fitoq */
2524 CHECK_FPU_FEATURE(dc, FLOAT128);
2525 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2526 gen_op_store_QT0_fpr(QFPREG(rd));
2527 break;
2528 case 0xcd: /* fstoq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2531 gen_op_store_QT0_fpr(QFPREG(rd));
2532 break;
2533 case 0xce: /* fdtoq */
2534 CHECK_FPU_FEATURE(dc, FLOAT128);
2535 gen_op_load_fpr_DT1(DFPREG(rs2));
2536 tcg_gen_helper_0_0(helper_fdtoq);
2537 gen_op_store_QT0_fpr(QFPREG(rd));
2538 break;
2539 case 0xd1: /* fstoi */
2540 gen_clear_float_exceptions();
2541 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2542 cpu_fpr[rs2]);
2543 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2544 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2545 break;
2546 case 0xd2: /* fdtoi */
2547 gen_op_load_fpr_DT1(DFPREG(rs2));
2548 gen_clear_float_exceptions();
2549 tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
2550 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2551 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2552 break;
2553 case 0xd3: /* fqtoi */
2554 CHECK_FPU_FEATURE(dc, FLOAT128);
2555 gen_op_load_fpr_QT1(QFPREG(rs2));
2556 gen_clear_float_exceptions();
2557 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2558 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2559 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560 break;
2561 #ifdef TARGET_SPARC64
2562 case 0x2: /* V9 fmovd */
2563 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2564 cpu_fpr[DFPREG(rs2)]);
2565 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2566 cpu_fpr[DFPREG(rs2) + 1]);
2567 break;
2568 case 0x3: /* V9 fmovq */
2569 CHECK_FPU_FEATURE(dc, FLOAT128);
2570 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2571 cpu_fpr[QFPREG(rs2)]);
2572 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2573 cpu_fpr[QFPREG(rs2) + 1]);
2574 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2575 cpu_fpr[QFPREG(rs2) + 2]);
2576 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2577 cpu_fpr[QFPREG(rs2) + 3]);
2578 break;
2579 case 0x6: /* V9 fnegd */
2580 gen_op_load_fpr_DT1(DFPREG(rs2));
2581 tcg_gen_helper_0_0(helper_fnegd);
2582 gen_op_store_DT0_fpr(DFPREG(rd));
2583 break;
2584 case 0x7: /* V9 fnegq */
2585 CHECK_FPU_FEATURE(dc, FLOAT128);
2586 gen_op_load_fpr_QT1(QFPREG(rs2));
2587 tcg_gen_helper_0_0(helper_fnegq);
2588 gen_op_store_QT0_fpr(QFPREG(rd));
2589 break;
2590 case 0xa: /* V9 fabsd */
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2592 tcg_gen_helper_0_0(helper_fabsd);
2593 gen_op_store_DT0_fpr(DFPREG(rd));
2594 break;
2595 case 0xb: /* V9 fabsq */
2596 CHECK_FPU_FEATURE(dc, FLOAT128);
2597 gen_op_load_fpr_QT1(QFPREG(rs2));
2598 tcg_gen_helper_0_0(helper_fabsq);
2599 gen_op_store_QT0_fpr(QFPREG(rd));
2600 break;
2601 case 0x81: /* V9 fstox */
2602 gen_clear_float_exceptions();
2603 tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
2604 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2605 gen_op_store_DT0_fpr(DFPREG(rd));
2606 break;
2607 case 0x82: /* V9 fdtox */
2608 gen_op_load_fpr_DT1(DFPREG(rs2));
2609 gen_clear_float_exceptions();
2610 tcg_gen_helper_0_0(helper_fdtox);
2611 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2612 gen_op_store_DT0_fpr(DFPREG(rd));
2613 break;
2614 case 0x83: /* V9 fqtox */
2615 CHECK_FPU_FEATURE(dc, FLOAT128);
2616 gen_op_load_fpr_QT1(QFPREG(rs2));
2617 gen_clear_float_exceptions();
2618 tcg_gen_helper_0_0(helper_fqtox);
2619 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2620 gen_op_store_DT0_fpr(DFPREG(rd));
2621 break;
2622 case 0x84: /* V9 fxtos */
2623 gen_op_load_fpr_DT1(DFPREG(rs2));
2624 gen_clear_float_exceptions();
2625 tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
2626 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2627 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2628 break;
2629 case 0x88: /* V9 fxtod */
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fxtod);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_DT0_fpr(DFPREG(rd));
2635 break;
2636 case 0x8c: /* V9 fxtoq */
2637 CHECK_FPU_FEATURE(dc, FLOAT128);
2638 gen_op_load_fpr_DT1(DFPREG(rs2));
2639 gen_clear_float_exceptions();
2640 tcg_gen_helper_0_0(helper_fxtoq);
2641 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2642 gen_op_store_QT0_fpr(QFPREG(rd));
2643 break;
2644 #endif
2645 default:
2646 goto illegal_insn;
2648 } else if (xop == 0x35) { /* FPU Operations */
2649 #ifdef TARGET_SPARC64
2650 int cond;
2651 #endif
2652 if (gen_trap_ifnofpu(dc, cpu_cond))
2653 goto jmp_insn;
2654 gen_op_clear_ieee_excp_and_FTT();
2655 rs1 = GET_FIELD(insn, 13, 17);
2656 rs2 = GET_FIELD(insn, 27, 31);
2657 xop = GET_FIELD(insn, 18, 26);
2658 #ifdef TARGET_SPARC64
2659 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2660 int l1;
2662 l1 = gen_new_label();
2663 cond = GET_FIELD_SP(insn, 14, 17);
2664 cpu_src1 = get_src1(insn, cpu_src1);
2665 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2666 0, l1);
2667 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2668 gen_set_label(l1);
2669 break;
2670 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2671 int l1;
2673 l1 = gen_new_label();
2674 cond = GET_FIELD_SP(insn, 14, 17);
2675 cpu_src1 = get_src1(insn, cpu_src1);
2676 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2677 0, l1);
2678 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2679 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2680 gen_set_label(l1);
2681 break;
2682 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2683 int l1;
2685 CHECK_FPU_FEATURE(dc, FLOAT128);
2686 l1 = gen_new_label();
2687 cond = GET_FIELD_SP(insn, 14, 17);
2688 cpu_src1 = get_src1(insn, cpu_src1);
2689 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2690 0, l1);
2691 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2692 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2693 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2694 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2695 gen_set_label(l1);
2696 break;
2698 #endif
2699 switch (xop) {
2700 #ifdef TARGET_SPARC64
2701 #define FMOVSCC(fcc) \
2703 TCGv r_cond; \
2704 int l1; \
2706 l1 = gen_new_label(); \
2707 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2708 cond = GET_FIELD_SP(insn, 14, 17); \
2709 gen_fcond(r_cond, fcc, cond); \
2710 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2711 0, l1); \
2712 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2713 gen_set_label(l1); \
2714 tcg_temp_free(r_cond); \
2716 #define FMOVDCC(fcc) \
2718 TCGv r_cond; \
2719 int l1; \
2721 l1 = gen_new_label(); \
2722 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2723 cond = GET_FIELD_SP(insn, 14, 17); \
2724 gen_fcond(r_cond, fcc, cond); \
2725 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2726 0, l1); \
2727 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2728 cpu_fpr[DFPREG(rs2)]); \
2729 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2730 cpu_fpr[DFPREG(rs2) + 1]); \
2731 gen_set_label(l1); \
2732 tcg_temp_free(r_cond); \
2734 #define FMOVQCC(fcc) \
2736 TCGv r_cond; \
2737 int l1; \
2739 l1 = gen_new_label(); \
2740 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2741 cond = GET_FIELD_SP(insn, 14, 17); \
2742 gen_fcond(r_cond, fcc, cond); \
2743 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2744 0, l1); \
2745 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2746 cpu_fpr[QFPREG(rs2)]); \
2747 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2748 cpu_fpr[QFPREG(rs2) + 1]); \
2749 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2750 cpu_fpr[QFPREG(rs2) + 2]); \
2751 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2752 cpu_fpr[QFPREG(rs2) + 3]); \
2753 gen_set_label(l1); \
2754 tcg_temp_free(r_cond); \
2756 case 0x001: /* V9 fmovscc %fcc0 */
2757 FMOVSCC(0);
2758 break;
2759 case 0x002: /* V9 fmovdcc %fcc0 */
2760 FMOVDCC(0);
2761 break;
2762 case 0x003: /* V9 fmovqcc %fcc0 */
2763 CHECK_FPU_FEATURE(dc, FLOAT128);
2764 FMOVQCC(0);
2765 break;
2766 case 0x041: /* V9 fmovscc %fcc1 */
2767 FMOVSCC(1);
2768 break;
2769 case 0x042: /* V9 fmovdcc %fcc1 */
2770 FMOVDCC(1);
2771 break;
2772 case 0x043: /* V9 fmovqcc %fcc1 */
2773 CHECK_FPU_FEATURE(dc, FLOAT128);
2774 FMOVQCC(1);
2775 break;
2776 case 0x081: /* V9 fmovscc %fcc2 */
2777 FMOVSCC(2);
2778 break;
2779 case 0x082: /* V9 fmovdcc %fcc2 */
2780 FMOVDCC(2);
2781 break;
2782 case 0x083: /* V9 fmovqcc %fcc2 */
2783 CHECK_FPU_FEATURE(dc, FLOAT128);
2784 FMOVQCC(2);
2785 break;
2786 case 0x0c1: /* V9 fmovscc %fcc3 */
2787 FMOVSCC(3);
2788 break;
2789 case 0x0c2: /* V9 fmovdcc %fcc3 */
2790 FMOVDCC(3);
2791 break;
2792 case 0x0c3: /* V9 fmovqcc %fcc3 */
2793 CHECK_FPU_FEATURE(dc, FLOAT128);
2794 FMOVQCC(3);
2795 break;
2796 #undef FMOVSCC
2797 #undef FMOVDCC
2798 #undef FMOVQCC
2799 #define FMOVCC(size_FDQ, icc) \
2801 TCGv r_cond; \
2802 int l1; \
2804 l1 = gen_new_label(); \
2805 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2806 cond = GET_FIELD_SP(insn, 14, 17); \
2807 gen_cond(r_cond, icc, cond); \
2808 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2809 0, l1); \
2810 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2811 (glue(size_FDQ, FPREG(rs2))); \
2812 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2813 (glue(size_FDQ, FPREG(rd))); \
2814 gen_set_label(l1); \
2815 tcg_temp_free(r_cond); \
2817 #define FMOVSCC(icc) \
2819 TCGv r_cond; \
2820 int l1; \
2822 l1 = gen_new_label(); \
2823 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2824 cond = GET_FIELD_SP(insn, 14, 17); \
2825 gen_cond(r_cond, icc, cond); \
2826 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2827 0, l1); \
2828 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2829 gen_set_label(l1); \
2830 tcg_temp_free(r_cond); \
2832 #define FMOVDCC(icc) \
2834 TCGv r_cond; \
2835 int l1; \
2837 l1 = gen_new_label(); \
2838 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2839 cond = GET_FIELD_SP(insn, 14, 17); \
2840 gen_cond(r_cond, icc, cond); \
2841 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2842 0, l1); \
2843 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2844 cpu_fpr[DFPREG(rs2)]); \
2845 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2846 cpu_fpr[DFPREG(rs2) + 1]); \
2847 gen_set_label(l1); \
2848 tcg_temp_free(r_cond); \
2850 #define FMOVQCC(icc) \
2852 TCGv r_cond; \
2853 int l1; \
2855 l1 = gen_new_label(); \
2856 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2857 cond = GET_FIELD_SP(insn, 14, 17); \
2858 gen_cond(r_cond, icc, cond); \
2859 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2860 0, l1); \
2861 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2862 cpu_fpr[QFPREG(rs2)]); \
2863 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2864 cpu_fpr[QFPREG(rs2) + 1]); \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2866 cpu_fpr[QFPREG(rs2) + 2]); \
2867 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2868 cpu_fpr[QFPREG(rs2) + 3]); \
2869 gen_set_label(l1); \
2870 tcg_temp_free(r_cond); \
2873 case 0x101: /* V9 fmovscc %icc */
2874 FMOVSCC(0);
2875 break;
2876 case 0x102: /* V9 fmovdcc %icc */
2877 FMOVDCC(0);
2878 case 0x103: /* V9 fmovqcc %icc */
2879 CHECK_FPU_FEATURE(dc, FLOAT128);
2880 FMOVQCC(0);
2881 break;
2882 case 0x181: /* V9 fmovscc %xcc */
2883 FMOVSCC(1);
2884 break;
2885 case 0x182: /* V9 fmovdcc %xcc */
2886 FMOVDCC(1);
2887 break;
2888 case 0x183: /* V9 fmovqcc %xcc */
2889 CHECK_FPU_FEATURE(dc, FLOAT128);
2890 FMOVQCC(1);
2891 break;
2892 #undef FMOVSCC
2893 #undef FMOVDCC
2894 #undef FMOVQCC
2895 #endif
2896 case 0x51: /* fcmps, V9 %fcc */
2897 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2898 break;
2899 case 0x52: /* fcmpd, V9 %fcc */
2900 gen_op_load_fpr_DT0(DFPREG(rs1));
2901 gen_op_load_fpr_DT1(DFPREG(rs2));
2902 gen_op_fcmpd(rd & 3);
2903 break;
2904 case 0x53: /* fcmpq, V9 %fcc */
2905 CHECK_FPU_FEATURE(dc, FLOAT128);
2906 gen_op_load_fpr_QT0(QFPREG(rs1));
2907 gen_op_load_fpr_QT1(QFPREG(rs2));
2908 gen_op_fcmpq(rd & 3);
2909 break;
2910 case 0x55: /* fcmpes, V9 %fcc */
2911 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2912 break;
2913 case 0x56: /* fcmped, V9 %fcc */
2914 gen_op_load_fpr_DT0(DFPREG(rs1));
2915 gen_op_load_fpr_DT1(DFPREG(rs2));
2916 gen_op_fcmped(rd & 3);
2917 break;
2918 case 0x57: /* fcmpeq, V9 %fcc */
2919 CHECK_FPU_FEATURE(dc, FLOAT128);
2920 gen_op_load_fpr_QT0(QFPREG(rs1));
2921 gen_op_load_fpr_QT1(QFPREG(rs2));
2922 gen_op_fcmpeq(rd & 3);
2923 break;
2924 default:
2925 goto illegal_insn;
2927 } else if (xop == 0x2) {
2928 // clr/mov shortcut
2930 rs1 = GET_FIELD(insn, 13, 17);
2931 if (rs1 == 0) {
2932 // or %g0, x, y -> mov T0, x; mov y, T0
2933 if (IS_IMM) { /* immediate */
2934 TCGv r_const;
2936 rs2 = GET_FIELDs(insn, 19, 31);
2937 r_const = tcg_const_tl((int)rs2);
2938 gen_movl_TN_reg(rd, r_const);
2939 tcg_temp_free(r_const);
2940 } else { /* register */
2941 rs2 = GET_FIELD(insn, 27, 31);
2942 gen_movl_reg_TN(rs2, cpu_dst);
2943 gen_movl_TN_reg(rd, cpu_dst);
2945 } else {
2946 cpu_src1 = get_src1(insn, cpu_src1);
2947 if (IS_IMM) { /* immediate */
2948 rs2 = GET_FIELDs(insn, 19, 31);
2949 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2950 gen_movl_TN_reg(rd, cpu_dst);
2951 } else { /* register */
2952 // or x, %g0, y -> mov T1, x; mov y, T1
2953 rs2 = GET_FIELD(insn, 27, 31);
2954 if (rs2 != 0) {
2955 gen_movl_reg_TN(rs2, cpu_src2);
2956 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2957 gen_movl_TN_reg(rd, cpu_dst);
2958 } else
2959 gen_movl_TN_reg(rd, cpu_src1);
2962 #ifdef TARGET_SPARC64
2963 } else if (xop == 0x25) { /* sll, V9 sllx */
2964 cpu_src1 = get_src1(insn, cpu_src1);
2965 if (IS_IMM) { /* immediate */
2966 rs2 = GET_FIELDs(insn, 20, 31);
2967 if (insn & (1 << 12)) {
2968 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2969 } else {
2970 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2972 } else { /* register */
2973 rs2 = GET_FIELD(insn, 27, 31);
2974 gen_movl_reg_TN(rs2, cpu_src2);
2975 if (insn & (1 << 12)) {
2976 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2977 } else {
2978 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2980 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2982 gen_movl_TN_reg(rd, cpu_dst);
2983 } else if (xop == 0x26) { /* srl, V9 srlx */
2984 cpu_src1 = get_src1(insn, cpu_src1);
2985 if (IS_IMM) { /* immediate */
2986 rs2 = GET_FIELDs(insn, 20, 31);
2987 if (insn & (1 << 12)) {
2988 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2989 } else {
2990 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2991 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2993 } else { /* register */
2994 rs2 = GET_FIELD(insn, 27, 31);
2995 gen_movl_reg_TN(rs2, cpu_src2);
2996 if (insn & (1 << 12)) {
2997 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2998 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2999 } else {
3000 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3001 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3002 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3005 gen_movl_TN_reg(rd, cpu_dst);
3006 } else if (xop == 0x27) { /* sra, V9 srax */
3007 cpu_src1 = get_src1(insn, cpu_src1);
3008 if (IS_IMM) { /* immediate */
3009 rs2 = GET_FIELDs(insn, 20, 31);
3010 if (insn & (1 << 12)) {
3011 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3012 } else {
3013 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3014 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3015 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3017 } else { /* register */
3018 rs2 = GET_FIELD(insn, 27, 31);
3019 gen_movl_reg_TN(rs2, cpu_src2);
3020 if (insn & (1 << 12)) {
3021 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3022 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3023 } else {
3024 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3025 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3026 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3027 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3030 gen_movl_TN_reg(rd, cpu_dst);
3031 #endif
3032 } else if (xop < 0x36) {
3033 cpu_src1 = get_src1(insn, cpu_src1);
3034 cpu_src2 = get_src2(insn, cpu_src2);
3035 if (xop < 0x20) {
3036 switch (xop & ~0x10) {
3037 case 0x0:
3038 if (xop & 0x10)
3039 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3040 else
3041 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3042 break;
3043 case 0x1:
3044 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3045 if (xop & 0x10)
3046 gen_op_logic_cc(cpu_dst);
3047 break;
3048 case 0x2:
3049 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3050 if (xop & 0x10)
3051 gen_op_logic_cc(cpu_dst);
3052 break;
3053 case 0x3:
3054 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3055 if (xop & 0x10)
3056 gen_op_logic_cc(cpu_dst);
3057 break;
3058 case 0x4:
3059 if (xop & 0x10)
3060 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3061 else
3062 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3063 break;
3064 case 0x5:
3065 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3066 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3067 if (xop & 0x10)
3068 gen_op_logic_cc(cpu_dst);
3069 break;
3070 case 0x6:
3071 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3072 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3073 if (xop & 0x10)
3074 gen_op_logic_cc(cpu_dst);
3075 break;
3076 case 0x7:
3077 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3078 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3079 if (xop & 0x10)
3080 gen_op_logic_cc(cpu_dst);
3081 break;
3082 case 0x8:
3083 if (xop & 0x10)
3084 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3085 else {
3086 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3087 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3088 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3090 break;
3091 #ifdef TARGET_SPARC64
3092 case 0x9: /* V9 mulx */
3093 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3094 break;
3095 #endif
3096 case 0xa:
3097 CHECK_IU_FEATURE(dc, MUL);
3098 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3099 if (xop & 0x10)
3100 gen_op_logic_cc(cpu_dst);
3101 break;
3102 case 0xb:
3103 CHECK_IU_FEATURE(dc, MUL);
3104 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3105 if (xop & 0x10)
3106 gen_op_logic_cc(cpu_dst);
3107 break;
3108 case 0xc:
3109 if (xop & 0x10)
3110 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3111 else {
3112 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3113 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3114 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3116 break;
3117 #ifdef TARGET_SPARC64
3118 case 0xd: /* V9 udivx */
3119 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3120 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3121 gen_trap_ifdivzero_tl(cpu_cc_src2);
3122 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3123 break;
3124 #endif
3125 case 0xe:
3126 CHECK_IU_FEATURE(dc, DIV);
3127 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3128 cpu_src2);
3129 if (xop & 0x10)
3130 gen_op_div_cc(cpu_dst);
3131 break;
3132 case 0xf:
3133 CHECK_IU_FEATURE(dc, DIV);
3134 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3135 cpu_src2);
3136 if (xop & 0x10)
3137 gen_op_div_cc(cpu_dst);
3138 break;
3139 default:
3140 goto illegal_insn;
3142 gen_movl_TN_reg(rd, cpu_dst);
3143 } else {
3144 switch (xop) {
3145 case 0x20: /* taddcc */
3146 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3147 gen_movl_TN_reg(rd, cpu_dst);
3148 break;
3149 case 0x21: /* tsubcc */
3150 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3151 gen_movl_TN_reg(rd, cpu_dst);
3152 break;
3153 case 0x22: /* taddcctv */
3154 save_state(dc, cpu_cond);
3155 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3156 gen_movl_TN_reg(rd, cpu_dst);
3157 break;
3158 case 0x23: /* tsubcctv */
3159 save_state(dc, cpu_cond);
3160 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3161 gen_movl_TN_reg(rd, cpu_dst);
3162 break;
3163 case 0x24: /* mulscc */
3164 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3165 gen_movl_TN_reg(rd, cpu_dst);
3166 break;
3167 #ifndef TARGET_SPARC64
3168 case 0x25: /* sll */
3169 if (IS_IMM) { /* immediate */
3170 rs2 = GET_FIELDs(insn, 20, 31);
3171 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3172 } else { /* register */
3173 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3174 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3176 gen_movl_TN_reg(rd, cpu_dst);
3177 break;
3178 case 0x26: /* srl */
3179 if (IS_IMM) { /* immediate */
3180 rs2 = GET_FIELDs(insn, 20, 31);
3181 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3182 } else { /* register */
3183 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3184 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3186 gen_movl_TN_reg(rd, cpu_dst);
3187 break;
3188 case 0x27: /* sra */
3189 if (IS_IMM) { /* immediate */
3190 rs2 = GET_FIELDs(insn, 20, 31);
3191 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3192 } else { /* register */
3193 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3194 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3196 gen_movl_TN_reg(rd, cpu_dst);
3197 break;
3198 #endif
3199 case 0x30:
3201 switch(rd) {
3202 case 0: /* wry */
3203 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3204 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3205 break;
3206 #ifndef TARGET_SPARC64
3207 case 0x01 ... 0x0f: /* undefined in the
3208 SPARCv8 manual, nop
3209 on the microSPARC
3210 II */
3211 case 0x10 ... 0x1f: /* implementation-dependent
3212 in the SPARCv8
3213 manual, nop on the
3214 microSPARC II */
3215 break;
3216 #else
3217 case 0x2: /* V9 wrccr */
3218 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3219 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3220 break;
3221 case 0x3: /* V9 wrasi */
3222 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3223 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3224 break;
3225 case 0x6: /* V9 wrfprs */
3226 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3227 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3228 save_state(dc, cpu_cond);
3229 gen_op_next_insn();
3230 tcg_gen_exit_tb(0);
3231 dc->is_br = 1;
3232 break;
3233 case 0xf: /* V9 sir, nop if user */
3234 #if !defined(CONFIG_USER_ONLY)
3235 if (supervisor(dc))
3236 ; // XXX
3237 #endif
3238 break;
3239 case 0x13: /* Graphics Status */
3240 if (gen_trap_ifnofpu(dc, cpu_cond))
3241 goto jmp_insn;
3242 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3243 break;
3244 case 0x17: /* Tick compare */
3245 #if !defined(CONFIG_USER_ONLY)
3246 if (!supervisor(dc))
3247 goto illegal_insn;
3248 #endif
3250 TCGv r_tickptr;
3252 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3253 cpu_src2);
3254 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3255 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3256 offsetof(CPUState, tick));
3257 tcg_gen_helper_0_2(helper_tick_set_limit,
3258 r_tickptr, cpu_tick_cmpr);
3259 tcg_temp_free(r_tickptr);
3261 break;
3262 case 0x18: /* System tick */
3263 #if !defined(CONFIG_USER_ONLY)
3264 if (!supervisor(dc))
3265 goto illegal_insn;
3266 #endif
3268 TCGv r_tickptr;
3270 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3271 cpu_src2);
3272 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3273 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3274 offsetof(CPUState, stick));
3275 tcg_gen_helper_0_2(helper_tick_set_count,
3276 r_tickptr, cpu_dst);
3277 tcg_temp_free(r_tickptr);
3279 break;
3280 case 0x19: /* System tick compare */
3281 #if !defined(CONFIG_USER_ONLY)
3282 if (!supervisor(dc))
3283 goto illegal_insn;
3284 #endif
3286 TCGv r_tickptr;
3288 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3289 cpu_src2);
3290 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3291 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3292 offsetof(CPUState, stick));
3293 tcg_gen_helper_0_2(helper_tick_set_limit,
3294 r_tickptr, cpu_stick_cmpr);
3295 tcg_temp_free(r_tickptr);
3297 break;
3299 case 0x10: /* Performance Control */
3300 case 0x11: /* Performance Instrumentation
3301 Counter */
3302 case 0x12: /* Dispatch Control */
3303 case 0x14: /* Softint set */
3304 case 0x15: /* Softint clear */
3305 case 0x16: /* Softint write */
3306 #endif
3307 default:
3308 goto illegal_insn;
3311 break;
3312 #if !defined(CONFIG_USER_ONLY)
3313 case 0x31: /* wrpsr, V9 saved, restored */
3315 if (!supervisor(dc))
3316 goto priv_insn;
3317 #ifdef TARGET_SPARC64
3318 switch (rd) {
3319 case 0:
3320 tcg_gen_helper_0_0(helper_saved);
3321 break;
3322 case 1:
3323 tcg_gen_helper_0_0(helper_restored);
3324 break;
3325 case 2: /* UA2005 allclean */
3326 case 3: /* UA2005 otherw */
3327 case 4: /* UA2005 normalw */
3328 case 5: /* UA2005 invalw */
3329 // XXX
3330 default:
3331 goto illegal_insn;
3333 #else
3334 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3335 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3336 save_state(dc, cpu_cond);
3337 gen_op_next_insn();
3338 tcg_gen_exit_tb(0);
3339 dc->is_br = 1;
3340 #endif
3342 break;
3343 case 0x32: /* wrwim, V9 wrpr */
3345 if (!supervisor(dc))
3346 goto priv_insn;
3347 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3348 #ifdef TARGET_SPARC64
3349 switch (rd) {
3350 case 0: // tpc
3352 TCGv r_tsptr;
3354 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3355 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3356 offsetof(CPUState, tsptr));
3357 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3358 offsetof(trap_state, tpc));
3359 tcg_temp_free(r_tsptr);
3361 break;
3362 case 1: // tnpc
3364 TCGv r_tsptr;
3366 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3367 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3368 offsetof(CPUState, tsptr));
3369 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3370 offsetof(trap_state, tnpc));
3371 tcg_temp_free(r_tsptr);
3373 break;
3374 case 2: // tstate
3376 TCGv r_tsptr;
3378 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3379 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3380 offsetof(CPUState, tsptr));
3381 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3382 offsetof(trap_state,
3383 tstate));
3384 tcg_temp_free(r_tsptr);
3386 break;
3387 case 3: // tt
3389 TCGv r_tsptr;
3391 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3392 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3393 offsetof(CPUState, tsptr));
3394 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3395 offsetof(trap_state, tt));
3396 tcg_temp_free(r_tsptr);
3398 break;
3399 case 4: // tick
3401 TCGv r_tickptr;
3403 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3404 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3405 offsetof(CPUState, tick));
3406 tcg_gen_helper_0_2(helper_tick_set_count,
3407 r_tickptr, cpu_tmp0);
3408 tcg_temp_free(r_tickptr);
3410 break;
3411 case 5: // tba
3412 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3413 break;
3414 case 6: // pstate
3415 save_state(dc, cpu_cond);
3416 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3417 gen_op_next_insn();
3418 tcg_gen_exit_tb(0);
3419 dc->is_br = 1;
3420 break;
3421 case 7: // tl
3422 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3423 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3424 offsetof(CPUSPARCState, tl));
3425 break;
3426 case 8: // pil
3427 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3428 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3429 offsetof(CPUSPARCState,
3430 psrpil));
3431 break;
3432 case 9: // cwp
3433 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3434 break;
3435 case 10: // cansave
3436 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3437 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3438 offsetof(CPUSPARCState,
3439 cansave));
3440 break;
3441 case 11: // canrestore
3442 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3443 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3444 offsetof(CPUSPARCState,
3445 canrestore));
3446 break;
3447 case 12: // cleanwin
3448 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3449 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3450 offsetof(CPUSPARCState,
3451 cleanwin));
3452 break;
3453 case 13: // otherwin
3454 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3455 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3456 offsetof(CPUSPARCState,
3457 otherwin));
3458 break;
3459 case 14: // wstate
3460 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3461 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3462 offsetof(CPUSPARCState,
3463 wstate));
3464 break;
3465 case 16: // UA2005 gl
3466 CHECK_IU_FEATURE(dc, GL);
3467 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3468 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3469 offsetof(CPUSPARCState, gl));
3470 break;
3471 case 26: // UA2005 strand status
3472 CHECK_IU_FEATURE(dc, HYPV);
3473 if (!hypervisor(dc))
3474 goto priv_insn;
3475 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3476 break;
3477 default:
3478 goto illegal_insn;
3480 #else
3481 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3482 if (dc->def->nwindows != 32)
3483 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3484 (1 << dc->def->nwindows) - 1);
3485 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3486 #endif
3488 break;
3489 case 0x33: /* wrtbr, UA2005 wrhpr */
3491 #ifndef TARGET_SPARC64
3492 if (!supervisor(dc))
3493 goto priv_insn;
3494 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3495 #else
3496 CHECK_IU_FEATURE(dc, HYPV);
3497 if (!hypervisor(dc))
3498 goto priv_insn;
3499 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3500 switch (rd) {
3501 case 0: // hpstate
3502 // XXX gen_op_wrhpstate();
3503 save_state(dc, cpu_cond);
3504 gen_op_next_insn();
3505 tcg_gen_exit_tb(0);
3506 dc->is_br = 1;
3507 break;
3508 case 1: // htstate
3509 // XXX gen_op_wrhtstate();
3510 break;
3511 case 3: // hintp
3512 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3513 break;
3514 case 5: // htba
3515 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3516 break;
3517 case 31: // hstick_cmpr
3519 TCGv r_tickptr;
3521 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3522 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3523 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3524 offsetof(CPUState, hstick));
3525 tcg_gen_helper_0_2(helper_tick_set_limit,
3526 r_tickptr, cpu_hstick_cmpr);
3527 tcg_temp_free(r_tickptr);
3529 break;
3530 case 6: // hver readonly
3531 default:
3532 goto illegal_insn;
3534 #endif
3536 break;
3537 #endif
3538 #ifdef TARGET_SPARC64
3539 case 0x2c: /* V9 movcc */
3541 int cc = GET_FIELD_SP(insn, 11, 12);
3542 int cond = GET_FIELD_SP(insn, 14, 17);
3543 TCGv r_cond;
3544 int l1;
3546 r_cond = tcg_temp_new(TCG_TYPE_TL);
3547 if (insn & (1 << 18)) {
3548 if (cc == 0)
3549 gen_cond(r_cond, 0, cond);
3550 else if (cc == 2)
3551 gen_cond(r_cond, 1, cond);
3552 else
3553 goto illegal_insn;
3554 } else {
3555 gen_fcond(r_cond, cc, cond);
3558 l1 = gen_new_label();
3560 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3561 if (IS_IMM) { /* immediate */
3562 TCGv r_const;
3564 rs2 = GET_FIELD_SPs(insn, 0, 10);
3565 r_const = tcg_const_tl((int)rs2);
3566 gen_movl_TN_reg(rd, r_const);
3567 tcg_temp_free(r_const);
3568 } else {
3569 rs2 = GET_FIELD_SP(insn, 0, 4);
3570 gen_movl_reg_TN(rs2, cpu_tmp0);
3571 gen_movl_TN_reg(rd, cpu_tmp0);
3573 gen_set_label(l1);
3574 tcg_temp_free(r_cond);
3575 break;
3577 case 0x2d: /* V9 sdivx */
3578 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3579 gen_movl_TN_reg(rd, cpu_dst);
3580 break;
3581 case 0x2e: /* V9 popc */
3583 cpu_src2 = get_src2(insn, cpu_src2);
3584 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3585 cpu_src2);
3586 gen_movl_TN_reg(rd, cpu_dst);
3588 case 0x2f: /* V9 movr */
3590 int cond = GET_FIELD_SP(insn, 10, 12);
3591 int l1;
3593 cpu_src1 = get_src1(insn, cpu_src1);
3595 l1 = gen_new_label();
3597 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3598 cpu_src1, 0, l1);
3599 if (IS_IMM) { /* immediate */
3600 TCGv r_const;
3602 rs2 = GET_FIELD_SPs(insn, 0, 9);
3603 r_const = tcg_const_tl((int)rs2);
3604 gen_movl_TN_reg(rd, r_const);
3605 tcg_temp_free(r_const);
3606 } else {
3607 rs2 = GET_FIELD_SP(insn, 0, 4);
3608 gen_movl_reg_TN(rs2, cpu_tmp0);
3609 gen_movl_TN_reg(rd, cpu_tmp0);
3611 gen_set_label(l1);
3612 break;
3614 #endif
3615 default:
3616 goto illegal_insn;
3619 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3620 #ifdef TARGET_SPARC64
3621 int opf = GET_FIELD_SP(insn, 5, 13);
3622 rs1 = GET_FIELD(insn, 13, 17);
3623 rs2 = GET_FIELD(insn, 27, 31);
3624 if (gen_trap_ifnofpu(dc, cpu_cond))
3625 goto jmp_insn;
3627 switch (opf) {
3628 case 0x000: /* VIS I edge8cc */
3629 case 0x001: /* VIS II edge8n */
3630 case 0x002: /* VIS I edge8lcc */
3631 case 0x003: /* VIS II edge8ln */
3632 case 0x004: /* VIS I edge16cc */
3633 case 0x005: /* VIS II edge16n */
3634 case 0x006: /* VIS I edge16lcc */
3635 case 0x007: /* VIS II edge16ln */
3636 case 0x008: /* VIS I edge32cc */
3637 case 0x009: /* VIS II edge32n */
3638 case 0x00a: /* VIS I edge32lcc */
3639 case 0x00b: /* VIS II edge32ln */
3640 // XXX
3641 goto illegal_insn;
3642 case 0x010: /* VIS I array8 */
3643 CHECK_FPU_FEATURE(dc, VIS1);
3644 cpu_src1 = get_src1(insn, cpu_src1);
3645 gen_movl_reg_TN(rs2, cpu_src2);
3646 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3647 cpu_src2);
3648 gen_movl_TN_reg(rd, cpu_dst);
3649 break;
3650 case 0x012: /* VIS I array16 */
3651 CHECK_FPU_FEATURE(dc, VIS1);
3652 cpu_src1 = get_src1(insn, cpu_src1);
3653 gen_movl_reg_TN(rs2, cpu_src2);
3654 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3655 cpu_src2);
3656 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3657 gen_movl_TN_reg(rd, cpu_dst);
3658 break;
3659 case 0x014: /* VIS I array32 */
3660 CHECK_FPU_FEATURE(dc, VIS1);
3661 cpu_src1 = get_src1(insn, cpu_src1);
3662 gen_movl_reg_TN(rs2, cpu_src2);
3663 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3664 cpu_src2);
3665 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3666 gen_movl_TN_reg(rd, cpu_dst);
3667 break;
3668 case 0x018: /* VIS I alignaddr */
3669 CHECK_FPU_FEATURE(dc, VIS1);
3670 cpu_src1 = get_src1(insn, cpu_src1);
3671 gen_movl_reg_TN(rs2, cpu_src2);
3672 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3673 cpu_src2);
3674 gen_movl_TN_reg(rd, cpu_dst);
3675 break;
3676 case 0x019: /* VIS II bmask */
3677 case 0x01a: /* VIS I alignaddrl */
3678 // XXX
3679 goto illegal_insn;
3680 case 0x020: /* VIS I fcmple16 */
3681 CHECK_FPU_FEATURE(dc, VIS1);
3682 gen_op_load_fpr_DT0(DFPREG(rs1));
3683 gen_op_load_fpr_DT1(DFPREG(rs2));
3684 tcg_gen_helper_0_0(helper_fcmple16);
3685 gen_op_store_DT0_fpr(DFPREG(rd));
3686 break;
3687 case 0x022: /* VIS I fcmpne16 */
3688 CHECK_FPU_FEATURE(dc, VIS1);
3689 gen_op_load_fpr_DT0(DFPREG(rs1));
3690 gen_op_load_fpr_DT1(DFPREG(rs2));
3691 tcg_gen_helper_0_0(helper_fcmpne16);
3692 gen_op_store_DT0_fpr(DFPREG(rd));
3693 break;
3694 case 0x024: /* VIS I fcmple32 */
3695 CHECK_FPU_FEATURE(dc, VIS1);
3696 gen_op_load_fpr_DT0(DFPREG(rs1));
3697 gen_op_load_fpr_DT1(DFPREG(rs2));
3698 tcg_gen_helper_0_0(helper_fcmple32);
3699 gen_op_store_DT0_fpr(DFPREG(rd));
3700 break;
3701 case 0x026: /* VIS I fcmpne32 */
3702 CHECK_FPU_FEATURE(dc, VIS1);
3703 gen_op_load_fpr_DT0(DFPREG(rs1));
3704 gen_op_load_fpr_DT1(DFPREG(rs2));
3705 tcg_gen_helper_0_0(helper_fcmpne32);
3706 gen_op_store_DT0_fpr(DFPREG(rd));
3707 break;
3708 case 0x028: /* VIS I fcmpgt16 */
3709 CHECK_FPU_FEATURE(dc, VIS1);
3710 gen_op_load_fpr_DT0(DFPREG(rs1));
3711 gen_op_load_fpr_DT1(DFPREG(rs2));
3712 tcg_gen_helper_0_0(helper_fcmpgt16);
3713 gen_op_store_DT0_fpr(DFPREG(rd));
3714 break;
3715 case 0x02a: /* VIS I fcmpeq16 */
3716 CHECK_FPU_FEATURE(dc, VIS1);
3717 gen_op_load_fpr_DT0(DFPREG(rs1));
3718 gen_op_load_fpr_DT1(DFPREG(rs2));
3719 tcg_gen_helper_0_0(helper_fcmpeq16);
3720 gen_op_store_DT0_fpr(DFPREG(rd));
3721 break;
3722 case 0x02c: /* VIS I fcmpgt32 */
3723 CHECK_FPU_FEATURE(dc, VIS1);
3724 gen_op_load_fpr_DT0(DFPREG(rs1));
3725 gen_op_load_fpr_DT1(DFPREG(rs2));
3726 tcg_gen_helper_0_0(helper_fcmpgt32);
3727 gen_op_store_DT0_fpr(DFPREG(rd));
3728 break;
3729 case 0x02e: /* VIS I fcmpeq32 */
3730 CHECK_FPU_FEATURE(dc, VIS1);
3731 gen_op_load_fpr_DT0(DFPREG(rs1));
3732 gen_op_load_fpr_DT1(DFPREG(rs2));
3733 tcg_gen_helper_0_0(helper_fcmpeq32);
3734 gen_op_store_DT0_fpr(DFPREG(rd));
3735 break;
3736 case 0x031: /* VIS I fmul8x16 */
3737 CHECK_FPU_FEATURE(dc, VIS1);
3738 gen_op_load_fpr_DT0(DFPREG(rs1));
3739 gen_op_load_fpr_DT1(DFPREG(rs2));
3740 tcg_gen_helper_0_0(helper_fmul8x16);
3741 gen_op_store_DT0_fpr(DFPREG(rd));
3742 break;
3743 case 0x033: /* VIS I fmul8x16au */
3744 CHECK_FPU_FEATURE(dc, VIS1);
3745 gen_op_load_fpr_DT0(DFPREG(rs1));
3746 gen_op_load_fpr_DT1(DFPREG(rs2));
3747 tcg_gen_helper_0_0(helper_fmul8x16au);
3748 gen_op_store_DT0_fpr(DFPREG(rd));
3749 break;
3750 case 0x035: /* VIS I fmul8x16al */
3751 CHECK_FPU_FEATURE(dc, VIS1);
3752 gen_op_load_fpr_DT0(DFPREG(rs1));
3753 gen_op_load_fpr_DT1(DFPREG(rs2));
3754 tcg_gen_helper_0_0(helper_fmul8x16al);
3755 gen_op_store_DT0_fpr(DFPREG(rd));
3756 break;
3757 case 0x036: /* VIS I fmul8sux16 */
3758 CHECK_FPU_FEATURE(dc, VIS1);
3759 gen_op_load_fpr_DT0(DFPREG(rs1));
3760 gen_op_load_fpr_DT1(DFPREG(rs2));
3761 tcg_gen_helper_0_0(helper_fmul8sux16);
3762 gen_op_store_DT0_fpr(DFPREG(rd));
3763 break;
3764 case 0x037: /* VIS I fmul8ulx16 */
3765 CHECK_FPU_FEATURE(dc, VIS1);
3766 gen_op_load_fpr_DT0(DFPREG(rs1));
3767 gen_op_load_fpr_DT1(DFPREG(rs2));
3768 tcg_gen_helper_0_0(helper_fmul8ulx16);
3769 gen_op_store_DT0_fpr(DFPREG(rd));
3770 break;
3771 case 0x038: /* VIS I fmuld8sux16 */
3772 CHECK_FPU_FEATURE(dc, VIS1);
3773 gen_op_load_fpr_DT0(DFPREG(rs1));
3774 gen_op_load_fpr_DT1(DFPREG(rs2));
3775 tcg_gen_helper_0_0(helper_fmuld8sux16);
3776 gen_op_store_DT0_fpr(DFPREG(rd));
3777 break;
3778 case 0x039: /* VIS I fmuld8ulx16 */
3779 CHECK_FPU_FEATURE(dc, VIS1);
3780 gen_op_load_fpr_DT0(DFPREG(rs1));
3781 gen_op_load_fpr_DT1(DFPREG(rs2));
3782 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3783 gen_op_store_DT0_fpr(DFPREG(rd));
3784 break;
3785 case 0x03a: /* VIS I fpack32 */
3786 case 0x03b: /* VIS I fpack16 */
3787 case 0x03d: /* VIS I fpackfix */
3788 case 0x03e: /* VIS I pdist */
3789 // XXX
3790 goto illegal_insn;
3791 case 0x048: /* VIS I faligndata */
3792 CHECK_FPU_FEATURE(dc, VIS1);
3793 gen_op_load_fpr_DT0(DFPREG(rs1));
3794 gen_op_load_fpr_DT1(DFPREG(rs2));
3795 tcg_gen_helper_0_0(helper_faligndata);
3796 gen_op_store_DT0_fpr(DFPREG(rd));
3797 break;
3798 case 0x04b: /* VIS I fpmerge */
3799 CHECK_FPU_FEATURE(dc, VIS1);
3800 gen_op_load_fpr_DT0(DFPREG(rs1));
3801 gen_op_load_fpr_DT1(DFPREG(rs2));
3802 tcg_gen_helper_0_0(helper_fpmerge);
3803 gen_op_store_DT0_fpr(DFPREG(rd));
3804 break;
3805 case 0x04c: /* VIS II bshuffle */
3806 // XXX
3807 goto illegal_insn;
3808 case 0x04d: /* VIS I fexpand */
3809 CHECK_FPU_FEATURE(dc, VIS1);
3810 gen_op_load_fpr_DT0(DFPREG(rs1));
3811 gen_op_load_fpr_DT1(DFPREG(rs2));
3812 tcg_gen_helper_0_0(helper_fexpand);
3813 gen_op_store_DT0_fpr(DFPREG(rd));
3814 break;
3815 case 0x050: /* VIS I fpadd16 */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 gen_op_load_fpr_DT0(DFPREG(rs1));
3818 gen_op_load_fpr_DT1(DFPREG(rs2));
3819 tcg_gen_helper_0_0(helper_fpadd16);
3820 gen_op_store_DT0_fpr(DFPREG(rd));
3821 break;
3822 case 0x051: /* VIS I fpadd16s */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3825 cpu_fpr[rs1], cpu_fpr[rs2]);
3826 break;
3827 case 0x052: /* VIS I fpadd32 */
3828 CHECK_FPU_FEATURE(dc, VIS1);
3829 gen_op_load_fpr_DT0(DFPREG(rs1));
3830 gen_op_load_fpr_DT1(DFPREG(rs2));
3831 tcg_gen_helper_0_0(helper_fpadd32);
3832 gen_op_store_DT0_fpr(DFPREG(rd));
3833 break;
3834 case 0x053: /* VIS I fpadd32s */
3835 CHECK_FPU_FEATURE(dc, VIS1);
3836 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3837 cpu_fpr[rs1], cpu_fpr[rs2]);
3838 break;
3839 case 0x054: /* VIS I fpsub16 */
3840 CHECK_FPU_FEATURE(dc, VIS1);
3841 gen_op_load_fpr_DT0(DFPREG(rs1));
3842 gen_op_load_fpr_DT1(DFPREG(rs2));
3843 tcg_gen_helper_0_0(helper_fpsub16);
3844 gen_op_store_DT0_fpr(DFPREG(rd));
3845 break;
3846 case 0x055: /* VIS I fpsub16s */
3847 CHECK_FPU_FEATURE(dc, VIS1);
3848 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3849 cpu_fpr[rs1], cpu_fpr[rs2]);
3850 break;
3851 case 0x056: /* VIS I fpsub32 */
3852 CHECK_FPU_FEATURE(dc, VIS1);
3853 gen_op_load_fpr_DT0(DFPREG(rs1));
3854 gen_op_load_fpr_DT1(DFPREG(rs2));
3855 tcg_gen_helper_0_0(helper_fpsub32);
3856 gen_op_store_DT0_fpr(DFPREG(rd));
3857 break;
3858 case 0x057: /* VIS I fpsub32s */
3859 CHECK_FPU_FEATURE(dc, VIS1);
3860 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3861 cpu_fpr[rs1], cpu_fpr[rs2]);
3862 break;
3863 case 0x060: /* VIS I fzero */
3864 CHECK_FPU_FEATURE(dc, VIS1);
3865 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3866 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3867 break;
3868 case 0x061: /* VIS I fzeros */
3869 CHECK_FPU_FEATURE(dc, VIS1);
3870 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3871 break;
3872 case 0x062: /* VIS I fnor */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3875 cpu_fpr[DFPREG(rs2)]);
3876 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3877 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3878 cpu_fpr[DFPREG(rs2) + 1]);
3879 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3880 break;
3881 case 0x063: /* VIS I fnors */
3882 CHECK_FPU_FEATURE(dc, VIS1);
3883 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3884 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3885 break;
3886 case 0x064: /* VIS I fandnot2 */
3887 CHECK_FPU_FEATURE(dc, VIS1);
3888 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3889 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3890 cpu_fpr[DFPREG(rs2)]);
3891 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3892 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3893 cpu_fpr[DFPREG(rs2) + 1]);
3894 break;
3895 case 0x065: /* VIS I fandnot2s */
3896 CHECK_FPU_FEATURE(dc, VIS1);
3897 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3898 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3899 break;
3900 case 0x066: /* VIS I fnot2 */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3903 -1);
3904 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3905 cpu_fpr[DFPREG(rs2) + 1], -1);
3906 break;
3907 case 0x067: /* VIS I fnot2s */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3910 break;
3911 case 0x068: /* VIS I fandnot1 */
3912 CHECK_FPU_FEATURE(dc, VIS1);
3913 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3914 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3915 cpu_fpr[DFPREG(rs1)]);
3916 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3917 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3918 cpu_fpr[DFPREG(rs1) + 1]);
3919 break;
3920 case 0x069: /* VIS I fandnot1s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3923 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3924 break;
3925 case 0x06a: /* VIS I fnot1 */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3928 -1);
3929 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3930 cpu_fpr[DFPREG(rs1) + 1], -1);
3931 break;
3932 case 0x06b: /* VIS I fnot1s */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3935 break;
3936 case 0x06c: /* VIS I fxor */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3939 cpu_fpr[DFPREG(rs2)]);
3940 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3941 cpu_fpr[DFPREG(rs1) + 1],
3942 cpu_fpr[DFPREG(rs2) + 1]);
3943 break;
3944 case 0x06d: /* VIS I fxors */
3945 CHECK_FPU_FEATURE(dc, VIS1);
3946 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3947 break;
3948 case 0x06e: /* VIS I fnand */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3951 cpu_fpr[DFPREG(rs2)]);
3952 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3953 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3954 cpu_fpr[DFPREG(rs2) + 1]);
3955 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3956 break;
3957 case 0x06f: /* VIS I fnands */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3960 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3961 break;
3962 case 0x070: /* VIS I fand */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3965 cpu_fpr[DFPREG(rs2)]);
3966 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3967 cpu_fpr[DFPREG(rs1) + 1],
3968 cpu_fpr[DFPREG(rs2) + 1]);
3969 break;
3970 case 0x071: /* VIS I fands */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3973 break;
3974 case 0x072: /* VIS I fxnor */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3977 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3978 cpu_fpr[DFPREG(rs1)]);
3979 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3980 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3981 cpu_fpr[DFPREG(rs1) + 1]);
3982 break;
3983 case 0x073: /* VIS I fxnors */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3986 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3987 break;
3988 case 0x074: /* VIS I fsrc1 */
3989 CHECK_FPU_FEATURE(dc, VIS1);
3990 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3991 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3992 cpu_fpr[DFPREG(rs1) + 1]);
3993 break;
3994 case 0x075: /* VIS I fsrc1s */
3995 CHECK_FPU_FEATURE(dc, VIS1);
3996 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3997 break;
3998 case 0x076: /* VIS I fornot2 */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
4001 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4002 cpu_fpr[DFPREG(rs2)]);
4003 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
4004 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4005 cpu_fpr[DFPREG(rs2) + 1]);
4006 break;
4007 case 0x077: /* VIS I fornot2s */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4010 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4011 break;
4012 case 0x078: /* VIS I fsrc2 */
4013 CHECK_FPU_FEATURE(dc, VIS1);
4014 gen_op_load_fpr_DT0(DFPREG(rs2));
4015 gen_op_store_DT0_fpr(DFPREG(rd));
4016 break;
4017 case 0x079: /* VIS I fsrc2s */
4018 CHECK_FPU_FEATURE(dc, VIS1);
4019 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4020 break;
4021 case 0x07a: /* VIS I fornot1 */
4022 CHECK_FPU_FEATURE(dc, VIS1);
4023 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4024 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4025 cpu_fpr[DFPREG(rs1)]);
4026 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4027 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4028 cpu_fpr[DFPREG(rs1) + 1]);
4029 break;
4030 case 0x07b: /* VIS I fornot1s */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4033 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4034 break;
4035 case 0x07c: /* VIS I for */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4038 cpu_fpr[DFPREG(rs2)]);
4039 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4040 cpu_fpr[DFPREG(rs1) + 1],
4041 cpu_fpr[DFPREG(rs2) + 1]);
4042 break;
4043 case 0x07d: /* VIS I fors */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4046 break;
4047 case 0x07e: /* VIS I fone */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4050 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4051 break;
4052 case 0x07f: /* VIS I fones */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4055 break;
4056 case 0x080: /* VIS I shutdown */
4057 case 0x081: /* VIS II siam */
4058 // XXX
4059 goto illegal_insn;
4060 default:
4061 goto illegal_insn;
4063 #else
4064 goto ncp_insn;
4065 #endif
4066 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4067 #ifdef TARGET_SPARC64
4068 goto illegal_insn;
4069 #else
4070 goto ncp_insn;
4071 #endif
4072 #ifdef TARGET_SPARC64
4073 } else if (xop == 0x39) { /* V9 return */
4074 TCGv r_const;
4076 save_state(dc, cpu_cond);
4077 cpu_src1 = get_src1(insn, cpu_src1);
4078 if (IS_IMM) { /* immediate */
4079 rs2 = GET_FIELDs(insn, 19, 31);
4080 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4081 } else { /* register */
4082 rs2 = GET_FIELD(insn, 27, 31);
4083 if (rs2) {
4084 gen_movl_reg_TN(rs2, cpu_src2);
4085 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4086 } else
4087 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4089 tcg_gen_helper_0_0(helper_restore);
4090 gen_mov_pc_npc(dc, cpu_cond);
4091 r_const = tcg_const_i32(3);
4092 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4093 tcg_temp_free(r_const);
4094 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4095 dc->npc = DYNAMIC_PC;
4096 goto jmp_insn;
4097 #endif
4098 } else {
4099 cpu_src1 = get_src1(insn, cpu_src1);
4100 if (IS_IMM) { /* immediate */
4101 rs2 = GET_FIELDs(insn, 19, 31);
4102 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4103 } else { /* register */
4104 rs2 = GET_FIELD(insn, 27, 31);
4105 if (rs2) {
4106 gen_movl_reg_TN(rs2, cpu_src2);
4107 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4108 } else
4109 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4111 switch (xop) {
4112 case 0x38: /* jmpl */
4114 TCGv r_const;
4116 r_const = tcg_const_tl(dc->pc);
4117 gen_movl_TN_reg(rd, r_const);
4118 tcg_temp_free(r_const);
4119 gen_mov_pc_npc(dc, cpu_cond);
4120 r_const = tcg_const_i32(3);
4121 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4122 r_const);
4123 tcg_temp_free(r_const);
4124 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4125 dc->npc = DYNAMIC_PC;
4127 goto jmp_insn;
4128 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4129 case 0x39: /* rett, V9 return */
4131 TCGv r_const;
4133 if (!supervisor(dc))
4134 goto priv_insn;
4135 gen_mov_pc_npc(dc, cpu_cond);
4136 r_const = tcg_const_i32(3);
4137 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4138 r_const);
4139 tcg_temp_free(r_const);
4140 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4141 dc->npc = DYNAMIC_PC;
4142 tcg_gen_helper_0_0(helper_rett);
4144 goto jmp_insn;
4145 #endif
4146 case 0x3b: /* flush */
4147 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4148 goto unimp_flush;
4149 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4150 break;
4151 case 0x3c: /* save */
4152 save_state(dc, cpu_cond);
4153 tcg_gen_helper_0_0(helper_save);
4154 gen_movl_TN_reg(rd, cpu_dst);
4155 break;
4156 case 0x3d: /* restore */
4157 save_state(dc, cpu_cond);
4158 tcg_gen_helper_0_0(helper_restore);
4159 gen_movl_TN_reg(rd, cpu_dst);
4160 break;
4161 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4162 case 0x3e: /* V9 done/retry */
4164 switch (rd) {
4165 case 0:
4166 if (!supervisor(dc))
4167 goto priv_insn;
4168 dc->npc = DYNAMIC_PC;
4169 dc->pc = DYNAMIC_PC;
4170 tcg_gen_helper_0_0(helper_done);
4171 goto jmp_insn;
4172 case 1:
4173 if (!supervisor(dc))
4174 goto priv_insn;
4175 dc->npc = DYNAMIC_PC;
4176 dc->pc = DYNAMIC_PC;
4177 tcg_gen_helper_0_0(helper_retry);
4178 goto jmp_insn;
4179 default:
4180 goto illegal_insn;
4183 break;
4184 #endif
4185 default:
4186 goto illegal_insn;
4189 break;
4191 break;
4192 case 3: /* load/store instructions */
4194 unsigned int xop = GET_FIELD(insn, 7, 12);
4196 cpu_src1 = get_src1(insn, cpu_src1);
4197 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4198 rs2 = GET_FIELD(insn, 27, 31);
4199 gen_movl_reg_TN(rs2, cpu_src2);
4200 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4201 } else if (IS_IMM) { /* immediate */
4202 rs2 = GET_FIELDs(insn, 19, 31);
4203 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4204 } else { /* register */
4205 rs2 = GET_FIELD(insn, 27, 31);
4206 if (rs2 != 0) {
4207 gen_movl_reg_TN(rs2, cpu_src2);
4208 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4209 } else
4210 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4212 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4213 (xop > 0x17 && xop <= 0x1d ) ||
4214 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4215 switch (xop) {
4216 case 0x0: /* load unsigned word */
4217 gen_address_mask(dc, cpu_addr);
4218 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4219 break;
4220 case 0x1: /* load unsigned byte */
4221 gen_address_mask(dc, cpu_addr);
4222 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4223 break;
4224 case 0x2: /* load unsigned halfword */
4225 gen_address_mask(dc, cpu_addr);
4226 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4227 break;
4228 case 0x3: /* load double word */
4229 if (rd & 1)
4230 goto illegal_insn;
4231 else {
4232 TCGv r_const;
4234 save_state(dc, cpu_cond);
4235 r_const = tcg_const_i32(7);
4236 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4237 r_const); // XXX remove
4238 tcg_temp_free(r_const);
4239 gen_address_mask(dc, cpu_addr);
4240 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4241 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4242 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4243 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4244 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4245 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4246 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4248 break;
4249 case 0x9: /* load signed byte */
4250 gen_address_mask(dc, cpu_addr);
4251 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4252 break;
4253 case 0xa: /* load signed halfword */
4254 gen_address_mask(dc, cpu_addr);
4255 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4256 break;
4257 case 0xd: /* ldstub -- XXX: should be atomically */
4259 TCGv r_const;
4261 gen_address_mask(dc, cpu_addr);
4262 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4263 r_const = tcg_const_tl(0xff);
4264 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4265 tcg_temp_free(r_const);
4267 break;
4268 case 0x0f: /* swap register with memory. Also
4269 atomically */
4270 CHECK_IU_FEATURE(dc, SWAP);
4271 gen_movl_reg_TN(rd, cpu_val);
4272 gen_address_mask(dc, cpu_addr);
4273 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4274 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4275 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4276 break;
4277 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4278 case 0x10: /* load word alternate */
4279 #ifndef TARGET_SPARC64
4280 if (IS_IMM)
4281 goto illegal_insn;
4282 if (!supervisor(dc))
4283 goto priv_insn;
4284 #endif
4285 save_state(dc, cpu_cond);
4286 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4287 break;
4288 case 0x11: /* load unsigned byte alternate */
4289 #ifndef TARGET_SPARC64
4290 if (IS_IMM)
4291 goto illegal_insn;
4292 if (!supervisor(dc))
4293 goto priv_insn;
4294 #endif
4295 save_state(dc, cpu_cond);
4296 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4297 break;
4298 case 0x12: /* load unsigned halfword alternate */
4299 #ifndef TARGET_SPARC64
4300 if (IS_IMM)
4301 goto illegal_insn;
4302 if (!supervisor(dc))
4303 goto priv_insn;
4304 #endif
4305 save_state(dc, cpu_cond);
4306 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4307 break;
4308 case 0x13: /* load double word alternate */
4309 #ifndef TARGET_SPARC64
4310 if (IS_IMM)
4311 goto illegal_insn;
4312 if (!supervisor(dc))
4313 goto priv_insn;
4314 #endif
4315 if (rd & 1)
4316 goto illegal_insn;
4317 save_state(dc, cpu_cond);
4318 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4319 goto skip_move;
4320 case 0x19: /* load signed byte alternate */
4321 #ifndef TARGET_SPARC64
4322 if (IS_IMM)
4323 goto illegal_insn;
4324 if (!supervisor(dc))
4325 goto priv_insn;
4326 #endif
4327 save_state(dc, cpu_cond);
4328 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4329 break;
4330 case 0x1a: /* load signed halfword alternate */
4331 #ifndef TARGET_SPARC64
4332 if (IS_IMM)
4333 goto illegal_insn;
4334 if (!supervisor(dc))
4335 goto priv_insn;
4336 #endif
4337 save_state(dc, cpu_cond);
4338 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4339 break;
4340 case 0x1d: /* ldstuba -- XXX: should be atomically */
4341 #ifndef TARGET_SPARC64
4342 if (IS_IMM)
4343 goto illegal_insn;
4344 if (!supervisor(dc))
4345 goto priv_insn;
4346 #endif
4347 save_state(dc, cpu_cond);
4348 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4349 break;
4350 case 0x1f: /* swap reg with alt. memory. Also
4351 atomically */
4352 CHECK_IU_FEATURE(dc, SWAP);
4353 #ifndef TARGET_SPARC64
4354 if (IS_IMM)
4355 goto illegal_insn;
4356 if (!supervisor(dc))
4357 goto priv_insn;
4358 #endif
4359 save_state(dc, cpu_cond);
4360 gen_movl_reg_TN(rd, cpu_val);
4361 gen_swap_asi(cpu_val, cpu_addr, insn);
4362 break;
4364 #ifndef TARGET_SPARC64
4365 case 0x30: /* ldc */
4366 case 0x31: /* ldcsr */
4367 case 0x33: /* lddc */
4368 goto ncp_insn;
4369 #endif
4370 #endif
4371 #ifdef TARGET_SPARC64
4372 case 0x08: /* V9 ldsw */
4373 gen_address_mask(dc, cpu_addr);
4374 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4375 break;
4376 case 0x0b: /* V9 ldx */
4377 gen_address_mask(dc, cpu_addr);
4378 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4379 break;
4380 case 0x18: /* V9 ldswa */
4381 save_state(dc, cpu_cond);
4382 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4383 break;
4384 case 0x1b: /* V9 ldxa */
4385 save_state(dc, cpu_cond);
4386 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4387 break;
4388 case 0x2d: /* V9 prefetch, no effect */
4389 goto skip_move;
4390 case 0x30: /* V9 ldfa */
4391 save_state(dc, cpu_cond);
4392 gen_ldf_asi(cpu_addr, insn, 4, rd);
4393 goto skip_move;
4394 case 0x33: /* V9 lddfa */
4395 save_state(dc, cpu_cond);
4396 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4397 goto skip_move;
4398 case 0x3d: /* V9 prefetcha, no effect */
4399 goto skip_move;
4400 case 0x32: /* V9 ldqfa */
4401 CHECK_FPU_FEATURE(dc, FLOAT128);
4402 save_state(dc, cpu_cond);
4403 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4404 goto skip_move;
4405 #endif
4406 default:
4407 goto illegal_insn;
4409 gen_movl_TN_reg(rd, cpu_val);
4410 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4411 skip_move: ;
4412 #endif
4413 } else if (xop >= 0x20 && xop < 0x24) {
4414 if (gen_trap_ifnofpu(dc, cpu_cond))
4415 goto jmp_insn;
4416 save_state(dc, cpu_cond);
4417 switch (xop) {
4418 case 0x20: /* load fpreg */
4419 gen_address_mask(dc, cpu_addr);
4420 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4421 break;
4422 case 0x21: /* ldfsr, V9 ldxfsr */
4423 #ifdef TARGET_SPARC64
4424 gen_address_mask(dc, cpu_addr);
4425 if (rd == 1) {
4426 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4427 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4428 } else
4429 #else
4431 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4432 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4434 #endif
4435 break;
4436 case 0x22: /* load quad fpreg */
4438 TCGv r_const;
4440 CHECK_FPU_FEATURE(dc, FLOAT128);
4441 r_const = tcg_const_i32(dc->mem_idx);
4442 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4443 tcg_temp_free(r_const);
4444 gen_op_store_QT0_fpr(QFPREG(rd));
4446 break;
4447 case 0x23: /* load double fpreg */
4449 TCGv r_const;
4451 r_const = tcg_const_i32(dc->mem_idx);
4452 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4453 tcg_temp_free(r_const);
4454 gen_op_store_DT0_fpr(DFPREG(rd));
4456 break;
4457 default:
4458 goto illegal_insn;
4460 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4461 xop == 0xe || xop == 0x1e) {
4462 gen_movl_reg_TN(rd, cpu_val);
4463 switch (xop) {
4464 case 0x4: /* store word */
4465 gen_address_mask(dc, cpu_addr);
4466 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4467 break;
4468 case 0x5: /* store byte */
4469 gen_address_mask(dc, cpu_addr);
4470 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4471 break;
4472 case 0x6: /* store halfword */
4473 gen_address_mask(dc, cpu_addr);
4474 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4475 break;
4476 case 0x7: /* store double word */
4477 if (rd & 1)
4478 goto illegal_insn;
4479 else {
4480 TCGv r_low, r_const;
4482 save_state(dc, cpu_cond);
4483 gen_address_mask(dc, cpu_addr);
4484 r_const = tcg_const_i32(7);
4485 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4486 r_const); // XXX remove
4487 tcg_temp_free(r_const);
4488 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4489 r_low = tcg_temp_new(TCG_TYPE_I32);
4490 tcg_gen_trunc_tl_i32(r_low, cpu_tmp0);
4491 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_val);
4492 tcg_gen_concat_i32_i64(cpu_tmp64, r_low, cpu_tmp32);
4493 tcg_temp_free(r_low);
4494 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4496 break;
4497 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4498 case 0x14: /* store word alternate */
4499 #ifndef TARGET_SPARC64
4500 if (IS_IMM)
4501 goto illegal_insn;
4502 if (!supervisor(dc))
4503 goto priv_insn;
4504 #endif
4505 save_state(dc, cpu_cond);
4506 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4507 break;
4508 case 0x15: /* store byte alternate */
4509 #ifndef TARGET_SPARC64
4510 if (IS_IMM)
4511 goto illegal_insn;
4512 if (!supervisor(dc))
4513 goto priv_insn;
4514 #endif
4515 save_state(dc, cpu_cond);
4516 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4517 break;
4518 case 0x16: /* store halfword alternate */
4519 #ifndef TARGET_SPARC64
4520 if (IS_IMM)
4521 goto illegal_insn;
4522 if (!supervisor(dc))
4523 goto priv_insn;
4524 #endif
4525 save_state(dc, cpu_cond);
4526 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4527 break;
4528 case 0x17: /* store double word alternate */
4529 #ifndef TARGET_SPARC64
4530 if (IS_IMM)
4531 goto illegal_insn;
4532 if (!supervisor(dc))
4533 goto priv_insn;
4534 #endif
4535 if (rd & 1)
4536 goto illegal_insn;
4537 else {
4538 save_state(dc, cpu_cond);
4539 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4541 break;
4542 #endif
4543 #ifdef TARGET_SPARC64
4544 case 0x0e: /* V9 stx */
4545 gen_address_mask(dc, cpu_addr);
4546 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4547 break;
4548 case 0x1e: /* V9 stxa */
4549 save_state(dc, cpu_cond);
4550 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4551 break;
4552 #endif
4553 default:
4554 goto illegal_insn;
4556 } else if (xop > 0x23 && xop < 0x28) {
4557 if (gen_trap_ifnofpu(dc, cpu_cond))
4558 goto jmp_insn;
4559 save_state(dc, cpu_cond);
4560 switch (xop) {
4561 case 0x24: /* store fpreg */
4562 gen_address_mask(dc, cpu_addr);
4563 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4564 break;
4565 case 0x25: /* stfsr, V9 stxfsr */
4566 #ifdef TARGET_SPARC64
4567 gen_address_mask(dc, cpu_addr);
4568 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4569 if (rd == 1)
4570 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4571 else {
4572 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4573 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4575 #else
4576 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4577 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4578 #endif
4579 break;
4580 case 0x26:
4581 #ifdef TARGET_SPARC64
4582 /* V9 stqf, store quad fpreg */
4584 TCGv r_const;
4586 CHECK_FPU_FEATURE(dc, FLOAT128);
4587 gen_op_load_fpr_QT0(QFPREG(rd));
4588 r_const = tcg_const_i32(dc->mem_idx);
4589 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4590 tcg_temp_free(r_const);
4592 break;
4593 #else /* !TARGET_SPARC64 */
4594 /* stdfq, store floating point queue */
4595 #if defined(CONFIG_USER_ONLY)
4596 goto illegal_insn;
4597 #else
4598 if (!supervisor(dc))
4599 goto priv_insn;
4600 if (gen_trap_ifnofpu(dc, cpu_cond))
4601 goto jmp_insn;
4602 goto nfq_insn;
4603 #endif
4604 #endif
4605 case 0x27: /* store double fpreg */
4607 TCGv r_const;
4609 gen_op_load_fpr_DT0(DFPREG(rd));
4610 r_const = tcg_const_i32(dc->mem_idx);
4611 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4612 tcg_temp_free(r_const);
4614 break;
4615 default:
4616 goto illegal_insn;
4618 } else if (xop > 0x33 && xop < 0x3f) {
4619 save_state(dc, cpu_cond);
4620 switch (xop) {
4621 #ifdef TARGET_SPARC64
4622 case 0x34: /* V9 stfa */
4623 gen_stf_asi(cpu_addr, insn, 4, rd);
4624 break;
4625 case 0x36: /* V9 stqfa */
4627 TCGv r_const;
4629 CHECK_FPU_FEATURE(dc, FLOAT128);
4630 r_const = tcg_const_i32(7);
4631 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4632 r_const);
4633 tcg_temp_free(r_const);
4634 gen_op_load_fpr_QT0(QFPREG(rd));
4635 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4637 break;
4638 case 0x37: /* V9 stdfa */
4639 gen_op_load_fpr_DT0(DFPREG(rd));
4640 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4641 break;
4642 case 0x3c: /* V9 casa */
4643 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4644 gen_movl_TN_reg(rd, cpu_val);
4645 break;
4646 case 0x3e: /* V9 casxa */
4647 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4648 gen_movl_TN_reg(rd, cpu_val);
4649 break;
4650 #else
4651 case 0x34: /* stc */
4652 case 0x35: /* stcsr */
4653 case 0x36: /* stdcq */
4654 case 0x37: /* stdc */
4655 goto ncp_insn;
4656 #endif
4657 default:
4658 goto illegal_insn;
4661 else
4662 goto illegal_insn;
4664 break;
4666 /* default case for non jump instructions */
4667 if (dc->npc == DYNAMIC_PC) {
4668 dc->pc = DYNAMIC_PC;
4669 gen_op_next_insn();
4670 } else if (dc->npc == JUMP_PC) {
4671 /* we can do a static jump */
4672 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4673 dc->is_br = 1;
4674 } else {
4675 dc->pc = dc->npc;
4676 dc->npc = dc->npc + 4;
4678 jmp_insn:
4679 return;
4680 illegal_insn:
4682 TCGv r_const;
4684 save_state(dc, cpu_cond);
4685 r_const = tcg_const_i32(TT_ILL_INSN);
4686 tcg_gen_helper_0_1(raise_exception, r_const);
4687 tcg_temp_free(r_const);
4688 dc->is_br = 1;
4690 return;
4691 unimp_flush:
4693 TCGv r_const;
4695 save_state(dc, cpu_cond);
4696 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4697 tcg_gen_helper_0_1(raise_exception, r_const);
4698 tcg_temp_free(r_const);
4699 dc->is_br = 1;
4701 return;
4702 #if !defined(CONFIG_USER_ONLY)
4703 priv_insn:
4705 TCGv r_const;
4707 save_state(dc, cpu_cond);
4708 r_const = tcg_const_i32(TT_PRIV_INSN);
4709 tcg_gen_helper_0_1(raise_exception, r_const);
4710 tcg_temp_free(r_const);
4711 dc->is_br = 1;
4713 return;
4714 #endif
4715 nfpu_insn:
4716 save_state(dc, cpu_cond);
4717 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4718 dc->is_br = 1;
4719 return;
4720 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4721 nfq_insn:
4722 save_state(dc, cpu_cond);
4723 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4724 dc->is_br = 1;
4725 return;
4726 #endif
4727 #ifndef TARGET_SPARC64
4728 ncp_insn:
4730 TCGv r_const;
4732 save_state(dc, cpu_cond);
4733 r_const = tcg_const_i32(TT_NCP_INSN);
4734 tcg_gen_helper_0_1(raise_exception, r_const);
4735 tcg_temp_free(r_const);
4736 dc->is_br = 1;
4738 return;
4739 #endif
4742 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4743 int spc, CPUSPARCState *env)
4745 target_ulong pc_start, last_pc;
4746 uint16_t *gen_opc_end;
4747 DisasContext dc1, *dc = &dc1;
4748 int j, lj = -1;
4749 int num_insns;
4750 int max_insns;
4752 memset(dc, 0, sizeof(DisasContext));
4753 dc->tb = tb;
4754 pc_start = tb->pc;
4755 dc->pc = pc_start;
4756 last_pc = dc->pc;
4757 dc->npc = (target_ulong) tb->cs_base;
4758 dc->mem_idx = cpu_mmu_index(env);
4759 dc->def = env->def;
4760 if ((dc->def->features & CPU_FEATURE_FLOAT))
4761 dc->fpu_enabled = cpu_fpu_enabled(env);
4762 else
4763 dc->fpu_enabled = 0;
4764 #ifdef TARGET_SPARC64
4765 dc->address_mask_32bit = env->pstate & PS_AM;
4766 #endif
4767 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4769 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4770 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4771 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4773 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4775 // loads and stores
4776 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4777 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4779 num_insns = 0;
4780 max_insns = tb->cflags & CF_COUNT_MASK;
4781 if (max_insns == 0)
4782 max_insns = CF_COUNT_MASK;
4783 gen_icount_start();
4784 do {
4785 if (env->nb_breakpoints > 0) {
4786 for(j = 0; j < env->nb_breakpoints; j++) {
4787 if (env->breakpoints[j] == dc->pc) {
4788 if (dc->pc != pc_start)
4789 save_state(dc, cpu_cond);
4790 tcg_gen_helper_0_0(helper_debug);
4791 tcg_gen_exit_tb(0);
4792 dc->is_br = 1;
4793 goto exit_gen_loop;
4797 if (spc) {
4798 if (loglevel > 0)
4799 fprintf(logfile, "Search PC...\n");
4800 j = gen_opc_ptr - gen_opc_buf;
4801 if (lj < j) {
4802 lj++;
4803 while (lj < j)
4804 gen_opc_instr_start[lj++] = 0;
4805 gen_opc_pc[lj] = dc->pc;
4806 gen_opc_npc[lj] = dc->npc;
4807 gen_opc_instr_start[lj] = 1;
4808 gen_opc_icount[lj] = num_insns;
4811 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4812 gen_io_start();
4813 last_pc = dc->pc;
4814 disas_sparc_insn(dc);
4815 num_insns++;
4817 if (dc->is_br)
4818 break;
4819 /* if the next PC is different, we abort now */
4820 if (dc->pc != (last_pc + 4))
4821 break;
4822 /* if we reach a page boundary, we stop generation so that the
4823 PC of a TT_TFAULT exception is always in the right page */
4824 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4825 break;
4826 /* if single step mode, we generate only one instruction and
4827 generate an exception */
4828 if (env->singlestep_enabled) {
4829 tcg_gen_movi_tl(cpu_pc, dc->pc);
4830 tcg_gen_exit_tb(0);
4831 break;
4833 } while ((gen_opc_ptr < gen_opc_end) &&
4834 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4835 num_insns < max_insns);
4837 exit_gen_loop:
4838 tcg_temp_free(cpu_addr);
4839 tcg_temp_free(cpu_val);
4840 tcg_temp_free(cpu_dst);
4841 tcg_temp_free(cpu_tmp64);
4842 tcg_temp_free(cpu_tmp32);
4843 tcg_temp_free(cpu_tmp0);
4844 if (tb->cflags & CF_LAST_IO)
4845 gen_io_end();
4846 if (!dc->is_br) {
4847 if (dc->pc != DYNAMIC_PC &&
4848 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4849 /* static PC and NPC: we can use direct chaining */
4850 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4851 } else {
4852 if (dc->pc != DYNAMIC_PC)
4853 tcg_gen_movi_tl(cpu_pc, dc->pc);
4854 save_npc(dc, cpu_cond);
4855 tcg_gen_exit_tb(0);
4858 gen_icount_end(tb, num_insns);
4859 *gen_opc_ptr = INDEX_op_end;
4860 if (spc) {
4861 j = gen_opc_ptr - gen_opc_buf;
4862 lj++;
4863 while (lj <= j)
4864 gen_opc_instr_start[lj++] = 0;
4865 #if 0
4866 if (loglevel > 0) {
4867 page_dump(logfile);
4869 #endif
4870 gen_opc_jump_pc[0] = dc->jump_pc[0];
4871 gen_opc_jump_pc[1] = dc->jump_pc[1];
4872 } else {
4873 tb->size = last_pc + 4 - pc_start;
4874 tb->icount = num_insns;
4876 #ifdef DEBUG_DISAS
4877 if (loglevel & CPU_LOG_TB_IN_ASM) {
4878 fprintf(logfile, "--------------\n");
4879 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4880 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4881 fprintf(logfile, "\n");
4883 #endif
4886 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4888 gen_intermediate_code_internal(tb, 0, env);
4891 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4893 gen_intermediate_code_internal(tb, 1, env);
4896 void gen_intermediate_code_init(CPUSPARCState *env)
4898 unsigned int i;
4899 static int inited;
4900 static const char * const gregnames[8] = {
4901 NULL, // g0 not used
4902 "g1",
4903 "g2",
4904 "g3",
4905 "g4",
4906 "g5",
4907 "g6",
4908 "g7",
4910 static const char * const fregnames[64] = {
4911 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4912 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4913 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4914 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4915 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4916 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4917 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4918 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4921 /* init various static tables */
4922 if (!inited) {
4923 inited = 1;
4925 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4926 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4927 offsetof(CPUState, regwptr),
4928 "regwptr");
4929 #ifdef TARGET_SPARC64
4930 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4931 TCG_AREG0, offsetof(CPUState, xcc),
4932 "xcc");
4933 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4934 TCG_AREG0, offsetof(CPUState, asi),
4935 "asi");
4936 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4937 TCG_AREG0, offsetof(CPUState, fprs),
4938 "fprs");
4939 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4940 TCG_AREG0, offsetof(CPUState, gsr),
4941 "gsr");
4942 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4943 TCG_AREG0,
4944 offsetof(CPUState, tick_cmpr),
4945 "tick_cmpr");
4946 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4947 TCG_AREG0,
4948 offsetof(CPUState, stick_cmpr),
4949 "stick_cmpr");
4950 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4951 TCG_AREG0,
4952 offsetof(CPUState, hstick_cmpr),
4953 "hstick_cmpr");
4954 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4955 offsetof(CPUState, hintp),
4956 "hintp");
4957 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4958 offsetof(CPUState, htba),
4959 "htba");
4960 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4961 offsetof(CPUState, hver),
4962 "hver");
4963 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4964 offsetof(CPUState, ssr), "ssr");
4965 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4966 offsetof(CPUState, version), "ver");
4967 #else
4968 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4969 TCG_AREG0, offsetof(CPUState, wim),
4970 "wim");
4971 #endif
4972 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4973 TCG_AREG0, offsetof(CPUState, cond),
4974 "cond");
4975 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4976 TCG_AREG0, offsetof(CPUState, cc_src),
4977 "cc_src");
4978 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4979 offsetof(CPUState, cc_src2),
4980 "cc_src2");
4981 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4982 TCG_AREG0, offsetof(CPUState, cc_dst),
4983 "cc_dst");
4984 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4985 TCG_AREG0, offsetof(CPUState, psr),
4986 "psr");
4987 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4988 TCG_AREG0, offsetof(CPUState, fsr),
4989 "fsr");
4990 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4991 TCG_AREG0, offsetof(CPUState, pc),
4992 "pc");
4993 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4994 TCG_AREG0, offsetof(CPUState, npc),
4995 "npc");
4996 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
4997 TCG_AREG0, offsetof(CPUState, y), "y");
4998 #ifndef CONFIG_USER_ONLY
4999 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
5000 TCG_AREG0, offsetof(CPUState, tbr),
5001 "tbr");
5002 #endif
5003 for (i = 1; i < 8; i++)
5004 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5005 offsetof(CPUState, gregs[i]),
5006 gregnames[i]);
5007 for (i = 0; i < TARGET_FPREGS; i++)
5008 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5009 offsetof(CPUState, fpr[i]),
5010 fregnames[i]);
5012 /* register helpers */
5014 #undef DEF_HELPER
5015 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5016 #include "helper.h"
5020 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5021 unsigned long searched_pc, int pc_pos, void *puc)
5023 target_ulong npc;
5024 env->pc = gen_opc_pc[pc_pos];
5025 npc = gen_opc_npc[pc_pos];
5026 if (npc == 1) {
5027 /* dynamic NPC: already stored */
5028 } else if (npc == 2) {
5029 target_ulong t2 = (target_ulong)(unsigned long)puc;
5030 /* jump PC: use T2 and the jump targets of the translation */
5031 if (t2)
5032 env->npc = gen_opc_jump_pc[0];
5033 else
5034 env->npc = gen_opc_jump_pc[1];
5035 } else {
5036 env->npc = npc;