Fix mulscc with high bits set in either src1 or src2
[qemu/mini2440.git] / target-sparc / translate.c
blobfa34d271333622213e8a014d85b915efc824fc31
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define DEBUG_DISAS
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
96 static int sign_extend(int x, int len)
98 len = 32 - len;
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
108 offsetof(CPU_DoubleU, l.upper));
109 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
110 offsetof(CPU_DoubleU, l.lower));
113 static void gen_op_load_fpr_DT1(unsigned int src)
115 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
116 offsetof(CPU_DoubleU, l.upper));
117 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
118 offsetof(CPU_DoubleU, l.lower));
121 static void gen_op_store_DT0_fpr(unsigned int dst)
123 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.upper));
125 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
126 offsetof(CPU_DoubleU, l.lower));
129 static void gen_op_load_fpr_QT0(unsigned int src)
131 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
132 offsetof(CPU_QuadU, l.upmost));
133 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
134 offsetof(CPU_QuadU, l.upper));
135 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
136 offsetof(CPU_QuadU, l.lower));
137 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
138 offsetof(CPU_QuadU, l.lowest));
141 static void gen_op_load_fpr_QT1(unsigned int src)
143 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
144 offsetof(CPU_QuadU, l.upmost));
145 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
146 offsetof(CPU_QuadU, l.upper));
147 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
148 offsetof(CPU_QuadU, l.lower));
149 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
150 offsetof(CPU_QuadU, l.lowest));
153 static void gen_op_store_QT0_fpr(unsigned int dst)
155 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.upmost));
157 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
160 offsetof(CPU_QuadU, l.lower));
161 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
162 offsetof(CPU_QuadU, l.lowest));
165 /* moves */
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
170 #endif
171 #else
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
175 #else
176 #endif
177 #endif
179 #ifdef TARGET_SPARC64
180 #ifndef TARGET_ABI32
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
182 #else
183 #define AM_CHECK(dc) (1)
184 #endif
185 #endif
187 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
189 #ifdef TARGET_SPARC64
190 if (AM_CHECK(dc))
191 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
192 #endif
195 static inline void gen_movl_reg_TN(int reg, TCGv tn)
197 if (reg == 0)
198 tcg_gen_movi_tl(tn, 0);
199 else if (reg < 8)
200 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
201 else {
202 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
206 static inline void gen_movl_TN_reg(int reg, TCGv tn)
208 if (reg == 0)
209 return;
210 else if (reg < 8)
211 tcg_gen_mov_tl(cpu_gregs[reg], tn);
212 else {
213 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_goto_tb(DisasContext *s, int tb_num,
218 target_ulong pc, target_ulong npc)
220 TranslationBlock *tb;
222 tb = s->tb;
223 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
224 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num);
227 tcg_gen_movi_tl(cpu_pc, pc);
228 tcg_gen_movi_tl(cpu_npc, npc);
229 tcg_gen_exit_tb((long)tb + tb_num);
230 } else {
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc, pc);
233 tcg_gen_movi_tl(cpu_npc, npc);
234 tcg_gen_exit_tb(0);
238 // XXX suboptimal
239 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
241 tcg_gen_extu_i32_tl(reg, src);
242 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
243 tcg_gen_andi_tl(reg, reg, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
248 tcg_gen_extu_i32_tl(reg, src);
249 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
250 tcg_gen_andi_tl(reg, reg, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc, 0);
277 #endif
279 /* old op:
280 if (!T0)
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
283 env->psr |= PSR_NEG;
285 static inline void gen_cc_NZ_icc(TCGv dst)
287 TCGv r_temp;
288 int l1, l2;
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 r_temp = tcg_temp_new(TCG_TYPE_TL);
293 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
294 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
295 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
296 gen_set_label(l1);
297 tcg_gen_ext_i32_tl(r_temp, dst);
298 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
299 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
300 gen_set_label(l2);
301 tcg_temp_free(r_temp);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst)
307 int l1, l2;
309 l1 = gen_new_label();
310 l2 = gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
312 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
313 gen_set_label(l1);
314 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
315 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
316 gen_set_label(l2);
318 #endif
320 /* old op:
321 if (T0 < src1)
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
326 TCGv r_temp1, r_temp2;
327 int l1;
329 l1 = gen_new_label();
330 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
331 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
332 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
333 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
334 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
335 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
336 gen_set_label(l1);
337 tcg_temp_free(r_temp1);
338 tcg_temp_free(r_temp2);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
344 int l1;
346 l1 = gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
348 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
349 gen_set_label(l1);
351 #endif
353 /* old op:
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
355 env->psr |= PSR_OVF;
357 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
359 TCGv r_temp;
361 r_temp = tcg_temp_new(TCG_TYPE_TL);
362 tcg_gen_xor_tl(r_temp, src1, src2);
363 tcg_gen_xori_tl(r_temp, r_temp, -1);
364 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
365 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
366 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
368 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
369 tcg_temp_free(r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
376 TCGv r_temp;
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_temp_free(r_temp);
387 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
389 #endif
391 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
393 TCGv r_temp, r_const;
394 int l1;
396 l1 = gen_new_label();
398 r_temp = tcg_temp_new(TCG_TYPE_TL);
399 tcg_gen_xor_tl(r_temp, src1, src2);
400 tcg_gen_xori_tl(r_temp, r_temp, -1);
401 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
402 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
403 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
405 r_const = tcg_const_i32(TT_TOVF);
406 tcg_gen_helper_0_1(raise_exception, r_const);
407 tcg_temp_free(r_const);
408 gen_set_label(l1);
409 tcg_temp_free(r_temp);
412 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
414 int l1;
416 l1 = gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0, src1, src2);
418 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
421 gen_set_label(l1);
424 static inline void gen_tag_tv(TCGv src1, TCGv src2)
426 int l1;
427 TCGv r_const;
429 l1 = gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0, src1, src2);
431 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
433 r_const = tcg_const_i32(TT_TOVF);
434 tcg_gen_helper_0_1(raise_exception, r_const);
435 tcg_temp_free(r_const);
436 gen_set_label(l1);
439 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
441 tcg_gen_mov_tl(cpu_cc_src, src1);
442 tcg_gen_mov_tl(cpu_cc_src2, src2);
443 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
444 gen_cc_clear_icc();
445 gen_cc_NZ_icc(cpu_cc_dst);
446 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
447 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 #ifdef TARGET_SPARC64
449 gen_cc_clear_xcc();
450 gen_cc_NZ_xcc(cpu_cc_dst);
451 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
452 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453 #endif
454 tcg_gen_mov_tl(dst, cpu_cc_dst);
457 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
459 tcg_gen_mov_tl(cpu_cc_src, src1);
460 tcg_gen_mov_tl(cpu_cc_src2, src2);
461 gen_mov_reg_C(cpu_tmp0, cpu_psr);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
463 gen_cc_clear_icc();
464 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
465 #ifdef TARGET_SPARC64
466 gen_cc_clear_xcc();
467 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
468 #endif
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
470 gen_cc_NZ_icc(cpu_cc_dst);
471 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst);
475 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
476 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 #endif
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
483 tcg_gen_mov_tl(cpu_cc_src, src1);
484 tcg_gen_mov_tl(cpu_cc_src2, src2);
485 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
486 gen_cc_clear_icc();
487 gen_cc_NZ_icc(cpu_cc_dst);
488 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
489 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
491 #ifdef TARGET_SPARC64
492 gen_cc_clear_xcc();
493 gen_cc_NZ_xcc(cpu_cc_dst);
494 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
495 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 #endif
497 tcg_gen_mov_tl(dst, cpu_cc_dst);
500 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
502 tcg_gen_mov_tl(cpu_cc_src, src1);
503 tcg_gen_mov_tl(cpu_cc_src2, src2);
504 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
505 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 gen_cc_clear_icc();
508 gen_cc_NZ_icc(cpu_cc_dst);
509 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
510 #ifdef TARGET_SPARC64
511 gen_cc_clear_xcc();
512 gen_cc_NZ_xcc(cpu_cc_dst);
513 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
514 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
515 #endif
516 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 /* old op:
520 if (src1 < T1)
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
525 TCGv r_temp1, r_temp2;
526 int l1;
528 l1 = gen_new_label();
529 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
530 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
531 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
532 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
533 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
534 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
535 gen_set_label(l1);
536 tcg_temp_free(r_temp1);
537 tcg_temp_free(r_temp2);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
543 int l1;
545 l1 = gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
547 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
548 gen_set_label(l1);
550 #endif
552 /* old op:
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
554 env->psr |= PSR_OVF;
556 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
558 TCGv r_temp;
560 r_temp = tcg_temp_new(TCG_TYPE_TL);
561 tcg_gen_xor_tl(r_temp, src1, src2);
562 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
563 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
564 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
566 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
567 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
568 tcg_temp_free(r_temp);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
574 TCGv r_temp;
576 r_temp = tcg_temp_new(TCG_TYPE_TL);
577 tcg_gen_xor_tl(r_temp, src1, src2);
578 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
579 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
580 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
582 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
583 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
584 tcg_temp_free(r_temp);
586 #endif
588 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
590 TCGv r_temp, r_const;
591 int l1;
593 l1 = gen_new_label();
595 r_temp = tcg_temp_new(TCG_TYPE_TL);
596 tcg_gen_xor_tl(r_temp, src1, src2);
597 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
598 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
599 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
601 r_const = tcg_const_i32(TT_TOVF);
602 tcg_gen_helper_0_1(raise_exception, r_const);
603 tcg_temp_free(r_const);
604 gen_set_label(l1);
605 tcg_temp_free(r_temp);
608 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
610 tcg_gen_mov_tl(cpu_cc_src, src1);
611 tcg_gen_mov_tl(cpu_cc_src2, src2);
612 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
613 gen_cc_clear_icc();
614 gen_cc_NZ_icc(cpu_cc_dst);
615 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
616 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 #ifdef TARGET_SPARC64
618 gen_cc_clear_xcc();
619 gen_cc_NZ_xcc(cpu_cc_dst);
620 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
621 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
622 #endif
623 tcg_gen_mov_tl(dst, cpu_cc_dst);
626 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
628 tcg_gen_mov_tl(cpu_cc_src, src1);
629 tcg_gen_mov_tl(cpu_cc_src2, src2);
630 gen_mov_reg_C(cpu_tmp0, cpu_psr);
631 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
632 gen_cc_clear_icc();
633 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
634 #ifdef TARGET_SPARC64
635 gen_cc_clear_xcc();
636 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
637 #endif
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
639 gen_cc_NZ_icc(cpu_cc_dst);
640 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
641 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst);
644 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
645 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
646 #endif
647 tcg_gen_mov_tl(dst, cpu_cc_dst);
650 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
652 tcg_gen_mov_tl(cpu_cc_src, src1);
653 tcg_gen_mov_tl(cpu_cc_src2, src2);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
660 #ifdef TARGET_SPARC64
661 gen_cc_clear_xcc();
662 gen_cc_NZ_xcc(cpu_cc_dst);
663 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
664 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
665 #endif
666 tcg_gen_mov_tl(dst, cpu_cc_dst);
669 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
671 tcg_gen_mov_tl(cpu_cc_src, src1);
672 tcg_gen_mov_tl(cpu_cc_src2, src2);
673 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
674 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 gen_cc_clear_icc();
677 gen_cc_NZ_icc(cpu_cc_dst);
678 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
679 #ifdef TARGET_SPARC64
680 gen_cc_clear_xcc();
681 gen_cc_NZ_xcc(cpu_cc_dst);
682 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
683 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684 #endif
685 tcg_gen_mov_tl(dst, cpu_cc_dst);
688 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
690 TCGv r_temp;
691 int l1;
693 l1 = gen_new_label();
694 r_temp = tcg_temp_new(TCG_TYPE_TL);
696 /* old op:
697 if (!(env->y & 1))
698 T1 = 0;
700 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
701 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
704 tcg_gen_movi_tl(cpu_cc_src2, 0);
705 gen_set_label(l1);
707 // b2 = T0 & 1;
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
710 tcg_gen_shli_tl(r_temp, r_temp, 31);
711 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
712 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
714 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
716 // b1 = N ^ V;
717 gen_mov_reg_N(cpu_tmp0, cpu_psr);
718 gen_mov_reg_V(r_temp, cpu_psr);
719 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
720 tcg_temp_free(r_temp);
722 // T0 = (b1 << 31) | (T0 >> 1);
723 // src1 = T0;
724 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
725 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
726 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
731 gen_cc_clear_icc();
732 gen_cc_NZ_icc(cpu_cc_dst);
733 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
735 tcg_gen_mov_tl(dst, cpu_cc_dst);
738 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
740 TCGv r_temp, r_temp2;
742 r_temp = tcg_temp_new(TCG_TYPE_I64);
743 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
745 tcg_gen_extu_i32_i64(r_temp, src2);
746 tcg_gen_extu_i32_i64(r_temp2, src1);
747 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
749 tcg_gen_shri_i64(r_temp, r_temp2, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
751 tcg_temp_free(r_temp);
752 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst, r_temp2);
755 #else
756 tcg_gen_trunc_i64_tl(dst, r_temp2);
757 #endif
758 tcg_temp_free(r_temp2);
761 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
763 TCGv r_temp, r_temp2;
765 r_temp = tcg_temp_new(TCG_TYPE_I64);
766 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
768 tcg_gen_ext_i32_i64(r_temp, src2);
769 tcg_gen_ext_i32_i64(r_temp2, src1);
770 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
772 tcg_gen_shri_i64(r_temp, r_temp2, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
774 tcg_temp_free(r_temp);
775 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst, r_temp2);
778 #else
779 tcg_gen_trunc_i64_tl(dst, r_temp2);
780 #endif
781 tcg_temp_free(r_temp2);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
787 TCGv r_const;
788 int l1;
790 l1 = gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
792 r_const = tcg_const_i32(TT_DIV_ZERO);
793 tcg_gen_helper_0_1(raise_exception, r_const);
794 tcg_temp_free(r_const);
795 gen_set_label(l1);
798 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
800 int l1, l2;
802 l1 = gen_new_label();
803 l2 = gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src, src1);
805 tcg_gen_mov_tl(cpu_cc_src2, src2);
806 gen_trap_ifdivzero_tl(cpu_cc_src2);
807 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
808 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
809 tcg_gen_movi_i64(dst, INT64_MIN);
810 tcg_gen_br(l2);
811 gen_set_label(l1);
812 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
813 gen_set_label(l2);
815 #endif
817 static inline void gen_op_div_cc(TCGv dst)
819 int l1;
821 tcg_gen_mov_tl(cpu_cc_dst, dst);
822 gen_cc_clear_icc();
823 gen_cc_NZ_icc(cpu_cc_dst);
824 l1 = gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
826 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
827 gen_set_label(l1);
830 static inline void gen_op_logic_cc(TCGv dst)
832 tcg_gen_mov_tl(cpu_cc_dst, dst);
834 gen_cc_clear_icc();
835 gen_cc_NZ_icc(cpu_cc_dst);
836 #ifdef TARGET_SPARC64
837 gen_cc_clear_xcc();
838 gen_cc_NZ_xcc(cpu_cc_dst);
839 #endif
842 // 1
843 static inline void gen_op_eval_ba(TCGv dst)
845 tcg_gen_movi_tl(dst, 1);
848 // Z
849 static inline void gen_op_eval_be(TCGv dst, TCGv src)
851 gen_mov_reg_Z(dst, src);
854 // Z | (N ^ V)
855 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
857 gen_mov_reg_N(cpu_tmp0, src);
858 gen_mov_reg_V(dst, src);
859 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
860 gen_mov_reg_Z(cpu_tmp0, src);
861 tcg_gen_or_tl(dst, dst, cpu_tmp0);
864 // N ^ V
865 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
867 gen_mov_reg_V(cpu_tmp0, src);
868 gen_mov_reg_N(dst, src);
869 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
872 // C | Z
873 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
875 gen_mov_reg_Z(cpu_tmp0, src);
876 gen_mov_reg_C(dst, src);
877 tcg_gen_or_tl(dst, dst, cpu_tmp0);
880 // C
881 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
883 gen_mov_reg_C(dst, src);
886 // V
887 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
889 gen_mov_reg_V(dst, src);
892 // 0
893 static inline void gen_op_eval_bn(TCGv dst)
895 tcg_gen_movi_tl(dst, 0);
898 // N
899 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
901 gen_mov_reg_N(dst, src);
904 // !Z
905 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
907 gen_mov_reg_Z(dst, src);
908 tcg_gen_xori_tl(dst, dst, 0x1);
911 // !(Z | (N ^ V))
912 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
914 gen_mov_reg_N(cpu_tmp0, src);
915 gen_mov_reg_V(dst, src);
916 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
917 gen_mov_reg_Z(cpu_tmp0, src);
918 tcg_gen_or_tl(dst, dst, cpu_tmp0);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !(N ^ V)
923 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
925 gen_mov_reg_V(cpu_tmp0, src);
926 gen_mov_reg_N(dst, src);
927 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
928 tcg_gen_xori_tl(dst, dst, 0x1);
931 // !(C | Z)
932 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
934 gen_mov_reg_Z(cpu_tmp0, src);
935 gen_mov_reg_C(dst, src);
936 tcg_gen_or_tl(dst, dst, cpu_tmp0);
937 tcg_gen_xori_tl(dst, dst, 0x1);
940 // !C
941 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
943 gen_mov_reg_C(dst, src);
944 tcg_gen_xori_tl(dst, dst, 0x1);
947 // !N
948 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
950 gen_mov_reg_N(dst, src);
951 tcg_gen_xori_tl(dst, dst, 0x1);
954 // !V
955 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
957 gen_mov_reg_V(dst, src);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 FPSR bit field FCC1 | FCC0:
966 3 unordered
968 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
969 unsigned int fcc_offset)
971 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
972 tcg_gen_andi_tl(reg, reg, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
976 unsigned int fcc_offset)
978 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
979 tcg_gen_andi_tl(reg, reg, 0x1);
982 // !0: FCC0 | FCC1
983 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
986 gen_mov_reg_FCC0(dst, src, fcc_offset);
987 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
988 tcg_gen_or_tl(dst, dst, cpu_tmp0);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC0(dst, src, fcc_offset);
996 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
997 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1000 // 1 or 3: FCC0
1001 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1002 unsigned int fcc_offset)
1004 gen_mov_reg_FCC0(dst, src, fcc_offset);
1007 // 1: FCC0 & !FCC1
1008 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1014 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 // 2 or 3: FCC1
1018 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1019 unsigned int fcc_offset)
1021 gen_mov_reg_FCC1(dst, src, fcc_offset);
1024 // 2: !FCC0 & FCC1
1025 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1026 unsigned int fcc_offset)
1028 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 tcg_gen_xori_tl(dst, dst, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1031 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1034 // 3: FCC0 & FCC1
1035 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1036 unsigned int fcc_offset)
1038 gen_mov_reg_FCC0(dst, src, fcc_offset);
1039 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1040 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1045 unsigned int fcc_offset)
1047 gen_mov_reg_FCC0(dst, src, fcc_offset);
1048 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1049 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1055 unsigned int fcc_offset)
1057 gen_mov_reg_FCC0(dst, src, fcc_offset);
1058 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1059 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1060 tcg_gen_xori_tl(dst, dst, 0x1);
1063 // 0 or 2: !FCC0
1064 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1065 unsigned int fcc_offset)
1067 gen_mov_reg_FCC0(dst, src, fcc_offset);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1073 unsigned int fcc_offset)
1075 gen_mov_reg_FCC0(dst, src, fcc_offset);
1076 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1077 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1078 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1079 tcg_gen_xori_tl(dst, dst, 0x1);
1082 // 0 or 1: !FCC1
1083 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1084 unsigned int fcc_offset)
1086 gen_mov_reg_FCC1(dst, src, fcc_offset);
1087 tcg_gen_xori_tl(dst, dst, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1092 unsigned int fcc_offset)
1094 gen_mov_reg_FCC0(dst, src, fcc_offset);
1095 tcg_gen_xori_tl(dst, dst, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1097 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC0(dst, src, fcc_offset);
1106 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1107 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1111 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1112 target_ulong pc2, TCGv r_cond)
1114 int l1;
1116 l1 = gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1120 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1122 gen_set_label(l1);
1123 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1126 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1127 target_ulong pc2, TCGv r_cond)
1129 int l1;
1131 l1 = gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1135 gen_goto_tb(dc, 0, pc2, pc1);
1137 gen_set_label(l1);
1138 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1141 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1142 TCGv r_cond)
1144 int l1, l2;
1146 l1 = gen_new_label();
1147 l2 = gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1151 tcg_gen_movi_tl(cpu_npc, npc1);
1152 tcg_gen_br(l2);
1154 gen_set_label(l1);
1155 tcg_gen_movi_tl(cpu_npc, npc2);
1156 gen_set_label(l2);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext *dc, TCGv cond)
1163 if (dc->npc == JUMP_PC) {
1164 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1165 dc->npc = DYNAMIC_PC;
1169 static inline void save_npc(DisasContext *dc, TCGv cond)
1171 if (dc->npc == JUMP_PC) {
1172 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1173 dc->npc = DYNAMIC_PC;
1174 } else if (dc->npc != DYNAMIC_PC) {
1175 tcg_gen_movi_tl(cpu_npc, dc->npc);
1179 static inline void save_state(DisasContext *dc, TCGv cond)
1181 tcg_gen_movi_tl(cpu_pc, dc->pc);
1182 save_npc(dc, cond);
1185 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1187 if (dc->npc == JUMP_PC) {
1188 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1189 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1190 dc->pc = DYNAMIC_PC;
1191 } else if (dc->npc == DYNAMIC_PC) {
1192 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1193 dc->pc = DYNAMIC_PC;
1194 } else {
1195 dc->pc = dc->npc;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1202 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1205 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1207 TCGv r_src;
1209 #ifdef TARGET_SPARC64
1210 if (cc)
1211 r_src = cpu_xcc;
1212 else
1213 r_src = cpu_psr;
1214 #else
1215 r_src = cpu_psr;
1216 #endif
1217 switch (cond) {
1218 case 0x0:
1219 gen_op_eval_bn(r_dst);
1220 break;
1221 case 0x1:
1222 gen_op_eval_be(r_dst, r_src);
1223 break;
1224 case 0x2:
1225 gen_op_eval_ble(r_dst, r_src);
1226 break;
1227 case 0x3:
1228 gen_op_eval_bl(r_dst, r_src);
1229 break;
1230 case 0x4:
1231 gen_op_eval_bleu(r_dst, r_src);
1232 break;
1233 case 0x5:
1234 gen_op_eval_bcs(r_dst, r_src);
1235 break;
1236 case 0x6:
1237 gen_op_eval_bneg(r_dst, r_src);
1238 break;
1239 case 0x7:
1240 gen_op_eval_bvs(r_dst, r_src);
1241 break;
1242 case 0x8:
1243 gen_op_eval_ba(r_dst);
1244 break;
1245 case 0x9:
1246 gen_op_eval_bne(r_dst, r_src);
1247 break;
1248 case 0xa:
1249 gen_op_eval_bg(r_dst, r_src);
1250 break;
1251 case 0xb:
1252 gen_op_eval_bge(r_dst, r_src);
1253 break;
1254 case 0xc:
1255 gen_op_eval_bgu(r_dst, r_src);
1256 break;
1257 case 0xd:
1258 gen_op_eval_bcc(r_dst, r_src);
1259 break;
1260 case 0xe:
1261 gen_op_eval_bpos(r_dst, r_src);
1262 break;
1263 case 0xf:
1264 gen_op_eval_bvc(r_dst, r_src);
1265 break;
1269 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1271 unsigned int offset;
1273 switch (cc) {
1274 default:
1275 case 0x0:
1276 offset = 0;
1277 break;
1278 case 0x1:
1279 offset = 32 - 10;
1280 break;
1281 case 0x2:
1282 offset = 34 - 10;
1283 break;
1284 case 0x3:
1285 offset = 36 - 10;
1286 break;
1289 switch (cond) {
1290 case 0x0:
1291 gen_op_eval_bn(r_dst);
1292 break;
1293 case 0x1:
1294 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0x2:
1297 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0x3:
1300 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1301 break;
1302 case 0x4:
1303 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0x5:
1306 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0x6:
1309 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1310 break;
1311 case 0x7:
1312 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1313 break;
1314 case 0x8:
1315 gen_op_eval_ba(r_dst);
1316 break;
1317 case 0x9:
1318 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1319 break;
1320 case 0xa:
1321 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1322 break;
1323 case 0xb:
1324 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1325 break;
1326 case 0xc:
1327 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1328 break;
1329 case 0xd:
1330 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1331 break;
1332 case 0xe:
1333 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1334 break;
1335 case 0xf:
1336 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1337 break;
1341 #ifdef TARGET_SPARC64
1342 // Inverted logic
1343 static const int gen_tcg_cond_reg[8] = {
1345 TCG_COND_NE,
1346 TCG_COND_GT,
1347 TCG_COND_GE,
1349 TCG_COND_EQ,
1350 TCG_COND_LE,
1351 TCG_COND_LT,
1354 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1356 int l1;
1358 l1 = gen_new_label();
1359 tcg_gen_movi_tl(r_dst, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1361 tcg_gen_movi_tl(r_dst, 1);
1362 gen_set_label(l1);
1364 #endif
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1368 TCGv r_cond)
1370 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1371 target_ulong target = dc->pc + offset;
1373 if (cond == 0x0) {
1374 /* unconditional not taken */
1375 if (a) {
1376 dc->pc = dc->npc + 4;
1377 dc->npc = dc->pc + 4;
1378 } else {
1379 dc->pc = dc->npc;
1380 dc->npc = dc->pc + 4;
1382 } else if (cond == 0x8) {
1383 /* unconditional taken */
1384 if (a) {
1385 dc->pc = target;
1386 dc->npc = dc->pc + 4;
1387 } else {
1388 dc->pc = dc->npc;
1389 dc->npc = target;
1391 } else {
1392 flush_cond(dc, r_cond);
1393 gen_cond(r_cond, cc, cond);
1394 if (a) {
1395 gen_branch_a(dc, target, dc->npc, r_cond);
1396 dc->is_br = 1;
1397 } else {
1398 dc->pc = dc->npc;
1399 dc->jump_pc[0] = target;
1400 dc->jump_pc[1] = dc->npc + 4;
1401 dc->npc = JUMP_PC;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1408 TCGv r_cond)
1410 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1411 target_ulong target = dc->pc + offset;
1413 if (cond == 0x0) {
1414 /* unconditional not taken */
1415 if (a) {
1416 dc->pc = dc->npc + 4;
1417 dc->npc = dc->pc + 4;
1418 } else {
1419 dc->pc = dc->npc;
1420 dc->npc = dc->pc + 4;
1422 } else if (cond == 0x8) {
1423 /* unconditional taken */
1424 if (a) {
1425 dc->pc = target;
1426 dc->npc = dc->pc + 4;
1427 } else {
1428 dc->pc = dc->npc;
1429 dc->npc = target;
1431 } else {
1432 flush_cond(dc, r_cond);
1433 gen_fcond(r_cond, cc, cond);
1434 if (a) {
1435 gen_branch_a(dc, target, dc->npc, r_cond);
1436 dc->is_br = 1;
1437 } else {
1438 dc->pc = dc->npc;
1439 dc->jump_pc[0] = target;
1440 dc->jump_pc[1] = dc->npc + 4;
1441 dc->npc = JUMP_PC;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1449 TCGv r_cond, TCGv r_reg)
1451 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1452 target_ulong target = dc->pc + offset;
1454 flush_cond(dc, r_cond);
1455 gen_cond_reg(r_cond, cond, r_reg);
1456 if (a) {
1457 gen_branch_a(dc, target, dc->npc, r_cond);
1458 dc->is_br = 1;
1459 } else {
1460 dc->pc = dc->npc;
1461 dc->jump_pc[0] = target;
1462 dc->jump_pc[1] = dc->npc + 4;
1463 dc->npc = JUMP_PC;
1467 static GenOpFunc * const gen_fcmpd[4] = {
1468 helper_fcmpd,
1469 helper_fcmpd_fcc1,
1470 helper_fcmpd_fcc2,
1471 helper_fcmpd_fcc3,
1474 static GenOpFunc * const gen_fcmpq[4] = {
1475 helper_fcmpq,
1476 helper_fcmpq_fcc1,
1477 helper_fcmpq_fcc2,
1478 helper_fcmpq_fcc3,
1481 static GenOpFunc * const gen_fcmped[4] = {
1482 helper_fcmped,
1483 helper_fcmped_fcc1,
1484 helper_fcmped_fcc2,
1485 helper_fcmped_fcc3,
1488 static GenOpFunc * const gen_fcmpeq[4] = {
1489 helper_fcmpeq,
1490 helper_fcmpeq_fcc1,
1491 helper_fcmpeq_fcc2,
1492 helper_fcmpeq_fcc3,
1495 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1497 switch (fccno) {
1498 case 0:
1499 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1500 break;
1501 case 1:
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1503 break;
1504 case 2:
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1506 break;
1507 case 3:
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1509 break;
1513 static inline void gen_op_fcmpd(int fccno)
1515 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1518 static inline void gen_op_fcmpq(int fccno)
1520 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1523 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1525 switch (fccno) {
1526 case 0:
1527 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1528 break;
1529 case 1:
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1531 break;
1532 case 2:
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1534 break;
1535 case 3:
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1537 break;
1541 static inline void gen_op_fcmped(int fccno)
1543 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1546 static inline void gen_op_fcmpeq(int fccno)
1548 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1551 #else
1553 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1555 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1558 static inline void gen_op_fcmpd(int fccno)
1560 tcg_gen_helper_0_0(helper_fcmpd);
1563 static inline void gen_op_fcmpq(int fccno)
1565 tcg_gen_helper_0_0(helper_fcmpq);
1568 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1570 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1573 static inline void gen_op_fcmped(int fccno)
1575 tcg_gen_helper_0_0(helper_fcmped);
1578 static inline void gen_op_fcmpeq(int fccno)
1580 tcg_gen_helper_0_0(helper_fcmpeq);
1582 #endif
1584 static inline void gen_op_fpexception_im(int fsr_flags)
1586 TCGv r_const;
1588 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1589 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1590 r_const = tcg_const_i32(TT_FP_EXCP);
1591 tcg_gen_helper_0_1(raise_exception, r_const);
1592 tcg_temp_free(r_const);
1595 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc->fpu_enabled) {
1599 TCGv r_const;
1601 save_state(dc, r_cond);
1602 r_const = tcg_const_i32(TT_NFPU_INSN);
1603 tcg_gen_helper_0_1(raise_exception, r_const);
1604 tcg_temp_free(r_const);
1605 dc->is_br = 1;
1606 return 1;
1608 #endif
1609 return 0;
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1622 /* asi moves */
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1626 int asi;
1627 TCGv r_asi;
1629 if (IS_IMM) {
1630 r_asi = tcg_temp_new(TCG_TYPE_I32);
1631 tcg_gen_mov_i32(r_asi, cpu_asi);
1632 } else {
1633 asi = GET_FIELD(insn, 19, 26);
1634 r_asi = tcg_const_i32(asi);
1636 return r_asi;
1639 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1640 int sign)
1642 TCGv r_asi, r_size, r_sign;
1644 r_asi = gen_get_asi(insn, addr);
1645 r_size = tcg_const_i32(size);
1646 r_sign = tcg_const_i32(sign);
1647 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1648 tcg_temp_free(r_sign);
1649 tcg_temp_free(r_size);
1650 tcg_temp_free(r_asi);
1653 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1655 TCGv r_asi, r_size;
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1660 tcg_temp_free(r_size);
1661 tcg_temp_free(r_asi);
1664 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1666 TCGv r_asi, r_size, r_rd;
1668 r_asi = gen_get_asi(insn, addr);
1669 r_size = tcg_const_i32(size);
1670 r_rd = tcg_const_i32(rd);
1671 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1672 tcg_temp_free(r_rd);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1692 TCGv r_asi, r_size, r_sign;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(4);
1696 r_sign = tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1698 tcg_temp_free(r_sign);
1699 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1700 tcg_temp_free(r_size);
1701 tcg_temp_free(r_asi);
1702 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1705 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1707 TCGv r_asi, r_rd;
1709 r_asi = gen_get_asi(insn, addr);
1710 r_rd = tcg_const_i32(rd);
1711 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1712 tcg_temp_free(r_rd);
1713 tcg_temp_free(r_asi);
1716 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1718 TCGv r_temp, r_asi, r_size;
1720 r_temp = tcg_temp_new(TCG_TYPE_TL);
1721 gen_movl_reg_TN(rd + 1, r_temp);
1722 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1723 r_temp);
1724 tcg_temp_free(r_temp);
1725 r_asi = gen_get_asi(insn, addr);
1726 r_size = tcg_const_i32(8);
1727 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1728 tcg_temp_free(r_size);
1729 tcg_temp_free(r_asi);
1732 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1733 int rd)
1735 TCGv r_val1, r_asi;
1737 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1738 gen_movl_reg_TN(rd, r_val1);
1739 r_asi = gen_get_asi(insn, addr);
1740 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1741 tcg_temp_free(r_asi);
1742 tcg_temp_free(r_val1);
1745 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1746 int rd)
1748 TCGv r_asi;
1750 gen_movl_reg_TN(rd, cpu_tmp64);
1751 r_asi = gen_get_asi(insn, addr);
1752 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1753 tcg_temp_free(r_asi);
1756 #elif !defined(CONFIG_USER_ONLY)
1758 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1759 int sign)
1761 TCGv r_asi, r_size, r_sign;
1763 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1764 r_size = tcg_const_i32(size);
1765 r_sign = tcg_const_i32(sign);
1766 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1767 tcg_temp_free(r_sign);
1768 tcg_temp_free(r_size);
1769 tcg_temp_free(r_asi);
1770 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1773 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1775 TCGv r_asi, r_size;
1777 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1778 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1779 r_size = tcg_const_i32(size);
1780 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1781 tcg_temp_free(r_size);
1782 tcg_temp_free(r_asi);
1785 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1787 TCGv r_asi, r_size, r_sign;
1789 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1790 r_size = tcg_const_i32(4);
1791 r_sign = tcg_const_i32(0);
1792 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1793 tcg_temp_free(r_sign);
1794 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1795 tcg_temp_free(r_size);
1796 tcg_temp_free(r_asi);
1797 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1800 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1802 TCGv r_asi, r_size, r_sign;
1804 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1805 r_size = tcg_const_i32(8);
1806 r_sign = tcg_const_i32(0);
1807 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1808 tcg_temp_free(r_sign);
1809 tcg_temp_free(r_size);
1810 tcg_temp_free(r_asi);
1811 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1812 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1813 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1814 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1815 gen_movl_TN_reg(rd, hi);
1818 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1820 TCGv r_temp, r_asi, r_size;
1822 r_temp = tcg_temp_new(TCG_TYPE_TL);
1823 gen_movl_reg_TN(rd + 1, r_temp);
1824 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1825 tcg_temp_free(r_temp);
1826 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1827 r_size = tcg_const_i32(8);
1828 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1829 tcg_temp_free(r_size);
1830 tcg_temp_free(r_asi);
1832 #endif
1834 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1835 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1837 TCGv r_val, r_asi, r_size;
1839 gen_ld_asi(dst, addr, insn, 1, 0);
1841 r_val = tcg_const_i64(0xffULL);
1842 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1843 r_size = tcg_const_i32(1);
1844 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1845 tcg_temp_free(r_size);
1846 tcg_temp_free(r_asi);
1847 tcg_temp_free(r_val);
1849 #endif
1851 static inline TCGv get_src1(unsigned int insn, TCGv def)
1853 TCGv r_rs1 = def;
1854 unsigned int rs1;
1856 rs1 = GET_FIELD(insn, 13, 17);
1857 if (rs1 == 0)
1858 r_rs1 = tcg_const_tl(0); // XXX how to free?
1859 else if (rs1 < 8)
1860 r_rs1 = cpu_gregs[rs1];
1861 else
1862 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1863 return r_rs1;
1866 static inline TCGv get_src2(unsigned int insn, TCGv def)
1868 TCGv r_rs2 = def;
1869 unsigned int rs2;
1871 if (IS_IMM) { /* immediate */
1872 rs2 = GET_FIELDs(insn, 19, 31);
1873 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1874 } else { /* register */
1875 rs2 = GET_FIELD(insn, 27, 31);
1876 if (rs2 == 0)
1877 r_rs2 = tcg_const_tl(0); // XXX how to free?
1878 else if (rs2 < 8)
1879 r_rs2 = cpu_gregs[rs2];
1880 else
1881 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1883 return r_rs2;
1886 #define CHECK_IU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1888 goto illegal_insn;
1889 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1890 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1891 goto nfpu_insn;
1893 /* before an instruction, dc->pc must be static */
1894 static void disas_sparc_insn(DisasContext * dc)
1896 unsigned int insn, opc, rs1, rs2, rd;
1898 if (unlikely(loglevel & CPU_LOG_TB_OP))
1899 tcg_gen_debug_insn_start(dc->pc);
1900 insn = ldl_code(dc->pc);
1901 opc = GET_FIELD(insn, 0, 1);
1903 rd = GET_FIELD(insn, 2, 6);
1905 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1906 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1908 switch (opc) {
1909 case 0: /* branches/sethi */
1911 unsigned int xop = GET_FIELD(insn, 7, 9);
1912 int32_t target;
1913 switch (xop) {
1914 #ifdef TARGET_SPARC64
1915 case 0x1: /* V9 BPcc */
1917 int cc;
1919 target = GET_FIELD_SP(insn, 0, 18);
1920 target = sign_extend(target, 18);
1921 target <<= 2;
1922 cc = GET_FIELD_SP(insn, 20, 21);
1923 if (cc == 0)
1924 do_branch(dc, target, insn, 0, cpu_cond);
1925 else if (cc == 2)
1926 do_branch(dc, target, insn, 1, cpu_cond);
1927 else
1928 goto illegal_insn;
1929 goto jmp_insn;
1931 case 0x3: /* V9 BPr */
1933 target = GET_FIELD_SP(insn, 0, 13) |
1934 (GET_FIELD_SP(insn, 20, 21) << 14);
1935 target = sign_extend(target, 16);
1936 target <<= 2;
1937 cpu_src1 = get_src1(insn, cpu_src1);
1938 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1939 goto jmp_insn;
1941 case 0x5: /* V9 FBPcc */
1943 int cc = GET_FIELD_SP(insn, 20, 21);
1944 if (gen_trap_ifnofpu(dc, cpu_cond))
1945 goto jmp_insn;
1946 target = GET_FIELD_SP(insn, 0, 18);
1947 target = sign_extend(target, 19);
1948 target <<= 2;
1949 do_fbranch(dc, target, insn, cc, cpu_cond);
1950 goto jmp_insn;
1952 #else
1953 case 0x7: /* CBN+x */
1955 goto ncp_insn;
1957 #endif
1958 case 0x2: /* BN+x */
1960 target = GET_FIELD(insn, 10, 31);
1961 target = sign_extend(target, 22);
1962 target <<= 2;
1963 do_branch(dc, target, insn, 0, cpu_cond);
1964 goto jmp_insn;
1966 case 0x6: /* FBN+x */
1968 if (gen_trap_ifnofpu(dc, cpu_cond))
1969 goto jmp_insn;
1970 target = GET_FIELD(insn, 10, 31);
1971 target = sign_extend(target, 22);
1972 target <<= 2;
1973 do_fbranch(dc, target, insn, 0, cpu_cond);
1974 goto jmp_insn;
1976 case 0x4: /* SETHI */
1977 if (rd) { // nop
1978 uint32_t value = GET_FIELD(insn, 10, 31);
1979 TCGv r_const;
1981 r_const = tcg_const_tl(value << 10);
1982 gen_movl_TN_reg(rd, r_const);
1983 tcg_temp_free(r_const);
1985 break;
1986 case 0x0: /* UNIMPL */
1987 default:
1988 goto illegal_insn;
1990 break;
1992 break;
1993 case 1:
1994 /*CALL*/ {
1995 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1996 TCGv r_const;
1998 r_const = tcg_const_tl(dc->pc);
1999 gen_movl_TN_reg(15, r_const);
2000 tcg_temp_free(r_const);
2001 target += dc->pc;
2002 gen_mov_pc_npc(dc, cpu_cond);
2003 dc->npc = target;
2005 goto jmp_insn;
2006 case 2: /* FPU & Logical Operations */
2008 unsigned int xop = GET_FIELD(insn, 7, 12);
2009 if (xop == 0x3a) { /* generate trap */
2010 int cond;
2012 cpu_src1 = get_src1(insn, cpu_src1);
2013 if (IS_IMM) {
2014 rs2 = GET_FIELD(insn, 25, 31);
2015 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2016 } else {
2017 rs2 = GET_FIELD(insn, 27, 31);
2018 if (rs2 != 0) {
2019 gen_movl_reg_TN(rs2, cpu_src2);
2020 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2021 } else
2022 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2024 cond = GET_FIELD(insn, 3, 6);
2025 if (cond == 0x8) {
2026 save_state(dc, cpu_cond);
2027 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2028 } else if (cond != 0) {
2029 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2030 #ifdef TARGET_SPARC64
2031 /* V9 icc/xcc */
2032 int cc = GET_FIELD_SP(insn, 11, 12);
2034 save_state(dc, cpu_cond);
2035 if (cc == 0)
2036 gen_cond(r_cond, 0, cond);
2037 else if (cc == 2)
2038 gen_cond(r_cond, 1, cond);
2039 else
2040 goto illegal_insn;
2041 #else
2042 save_state(dc, cpu_cond);
2043 gen_cond(r_cond, 0, cond);
2044 #endif
2045 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2046 tcg_temp_free(r_cond);
2048 gen_op_next_insn();
2049 tcg_gen_exit_tb(0);
2050 dc->is_br = 1;
2051 goto jmp_insn;
2052 } else if (xop == 0x28) {
2053 rs1 = GET_FIELD(insn, 13, 17);
2054 switch(rs1) {
2055 case 0: /* rdy */
2056 #ifndef TARGET_SPARC64
2057 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2058 manual, rdy on the microSPARC
2059 II */
2060 case 0x0f: /* stbar in the SPARCv8 manual,
2061 rdy on the microSPARC II */
2062 case 0x10 ... 0x1f: /* implementation-dependent in the
2063 SPARCv8 manual, rdy on the
2064 microSPARC II */
2065 #endif
2066 gen_movl_TN_reg(rd, cpu_y);
2067 break;
2068 #ifdef TARGET_SPARC64
2069 case 0x2: /* V9 rdccr */
2070 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2071 gen_movl_TN_reg(rd, cpu_dst);
2072 break;
2073 case 0x3: /* V9 rdasi */
2074 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2075 gen_movl_TN_reg(rd, cpu_dst);
2076 break;
2077 case 0x4: /* V9 rdtick */
2079 TCGv r_tickptr;
2081 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2082 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2083 offsetof(CPUState, tick));
2084 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2085 r_tickptr);
2086 tcg_temp_free(r_tickptr);
2087 gen_movl_TN_reg(rd, cpu_dst);
2089 break;
2090 case 0x5: /* V9 rdpc */
2092 TCGv r_const;
2094 r_const = tcg_const_tl(dc->pc);
2095 gen_movl_TN_reg(rd, r_const);
2096 tcg_temp_free(r_const);
2098 break;
2099 case 0x6: /* V9 rdfprs */
2100 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2101 gen_movl_TN_reg(rd, cpu_dst);
2102 break;
2103 case 0xf: /* V9 membar */
2104 break; /* no effect */
2105 case 0x13: /* Graphics Status */
2106 if (gen_trap_ifnofpu(dc, cpu_cond))
2107 goto jmp_insn;
2108 gen_movl_TN_reg(rd, cpu_gsr);
2109 break;
2110 case 0x17: /* Tick compare */
2111 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2112 break;
2113 case 0x18: /* System tick */
2115 TCGv r_tickptr;
2117 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2118 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2119 offsetof(CPUState, stick));
2120 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2121 r_tickptr);
2122 tcg_temp_free(r_tickptr);
2123 gen_movl_TN_reg(rd, cpu_dst);
2125 break;
2126 case 0x19: /* System tick compare */
2127 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2128 break;
2129 case 0x10: /* Performance Control */
2130 case 0x11: /* Performance Instrumentation Counter */
2131 case 0x12: /* Dispatch Control */
2132 case 0x14: /* Softint set, WO */
2133 case 0x15: /* Softint clear, WO */
2134 case 0x16: /* Softint write */
2135 #endif
2136 default:
2137 goto illegal_insn;
2139 #if !defined(CONFIG_USER_ONLY)
2140 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2141 #ifndef TARGET_SPARC64
2142 if (!supervisor(dc))
2143 goto priv_insn;
2144 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2145 #else
2146 CHECK_IU_FEATURE(dc, HYPV);
2147 if (!hypervisor(dc))
2148 goto priv_insn;
2149 rs1 = GET_FIELD(insn, 13, 17);
2150 switch (rs1) {
2151 case 0: // hpstate
2152 // gen_op_rdhpstate();
2153 break;
2154 case 1: // htstate
2155 // gen_op_rdhtstate();
2156 break;
2157 case 3: // hintp
2158 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2159 break;
2160 case 5: // htba
2161 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2162 break;
2163 case 6: // hver
2164 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2165 break;
2166 case 31: // hstick_cmpr
2167 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2168 break;
2169 default:
2170 goto illegal_insn;
2172 #endif
2173 gen_movl_TN_reg(rd, cpu_dst);
2174 break;
2175 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2176 if (!supervisor(dc))
2177 goto priv_insn;
2178 #ifdef TARGET_SPARC64
2179 rs1 = GET_FIELD(insn, 13, 17);
2180 switch (rs1) {
2181 case 0: // tpc
2183 TCGv r_tsptr;
2185 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2186 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2187 offsetof(CPUState, tsptr));
2188 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2189 offsetof(trap_state, tpc));
2190 tcg_temp_free(r_tsptr);
2192 break;
2193 case 1: // tnpc
2195 TCGv r_tsptr;
2197 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2198 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2199 offsetof(CPUState, tsptr));
2200 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2201 offsetof(trap_state, tnpc));
2202 tcg_temp_free(r_tsptr);
2204 break;
2205 case 2: // tstate
2207 TCGv r_tsptr;
2209 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2210 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2211 offsetof(CPUState, tsptr));
2212 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2213 offsetof(trap_state, tstate));
2214 tcg_temp_free(r_tsptr);
2216 break;
2217 case 3: // tt
2219 TCGv r_tsptr;
2221 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2222 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2223 offsetof(CPUState, tsptr));
2224 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2225 offsetof(trap_state, tt));
2226 tcg_temp_free(r_tsptr);
2228 break;
2229 case 4: // tick
2231 TCGv r_tickptr;
2233 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2234 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2235 offsetof(CPUState, tick));
2236 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2237 r_tickptr);
2238 gen_movl_TN_reg(rd, cpu_tmp0);
2239 tcg_temp_free(r_tickptr);
2241 break;
2242 case 5: // tba
2243 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2244 break;
2245 case 6: // pstate
2246 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2247 offsetof(CPUSPARCState, pstate));
2248 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2249 break;
2250 case 7: // tl
2251 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2252 offsetof(CPUSPARCState, tl));
2253 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2254 break;
2255 case 8: // pil
2256 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2257 offsetof(CPUSPARCState, psrpil));
2258 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2259 break;
2260 case 9: // cwp
2261 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2262 break;
2263 case 10: // cansave
2264 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2265 offsetof(CPUSPARCState, cansave));
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267 break;
2268 case 11: // canrestore
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, canrestore));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2272 break;
2273 case 12: // cleanwin
2274 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2275 offsetof(CPUSPARCState, cleanwin));
2276 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2277 break;
2278 case 13: // otherwin
2279 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2280 offsetof(CPUSPARCState, otherwin));
2281 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2282 break;
2283 case 14: // wstate
2284 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285 offsetof(CPUSPARCState, wstate));
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287 break;
2288 case 16: // UA2005 gl
2289 CHECK_IU_FEATURE(dc, GL);
2290 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291 offsetof(CPUSPARCState, gl));
2292 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293 break;
2294 case 26: // UA2005 strand status
2295 CHECK_IU_FEATURE(dc, HYPV);
2296 if (!hypervisor(dc))
2297 goto priv_insn;
2298 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2299 break;
2300 case 31: // ver
2301 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2302 break;
2303 case 15: // fq
2304 default:
2305 goto illegal_insn;
2307 #else
2308 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2309 #endif
2310 gen_movl_TN_reg(rd, cpu_tmp0);
2311 break;
2312 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2313 #ifdef TARGET_SPARC64
2314 save_state(dc, cpu_cond);
2315 tcg_gen_helper_0_0(helper_flushw);
2316 #else
2317 if (!supervisor(dc))
2318 goto priv_insn;
2319 gen_movl_TN_reg(rd, cpu_tbr);
2320 #endif
2321 break;
2322 #endif
2323 } else if (xop == 0x34) { /* FPU Operations */
2324 if (gen_trap_ifnofpu(dc, cpu_cond))
2325 goto jmp_insn;
2326 gen_op_clear_ieee_excp_and_FTT();
2327 rs1 = GET_FIELD(insn, 13, 17);
2328 rs2 = GET_FIELD(insn, 27, 31);
2329 xop = GET_FIELD(insn, 18, 26);
2330 switch (xop) {
2331 case 0x1: /* fmovs */
2332 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2333 break;
2334 case 0x5: /* fnegs */
2335 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2336 cpu_fpr[rs2]);
2337 break;
2338 case 0x9: /* fabss */
2339 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2340 cpu_fpr[rs2]);
2341 break;
2342 case 0x29: /* fsqrts */
2343 CHECK_FPU_FEATURE(dc, FSQRT);
2344 gen_clear_float_exceptions();
2345 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2346 cpu_fpr[rs2]);
2347 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2348 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2349 break;
2350 case 0x2a: /* fsqrtd */
2351 CHECK_FPU_FEATURE(dc, FSQRT);
2352 gen_op_load_fpr_DT1(DFPREG(rs2));
2353 gen_clear_float_exceptions();
2354 tcg_gen_helper_0_0(helper_fsqrtd);
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2356 gen_op_store_DT0_fpr(DFPREG(rd));
2357 break;
2358 case 0x2b: /* fsqrtq */
2359 CHECK_FPU_FEATURE(dc, FLOAT128);
2360 gen_op_load_fpr_QT1(QFPREG(rs2));
2361 gen_clear_float_exceptions();
2362 tcg_gen_helper_0_0(helper_fsqrtq);
2363 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2364 gen_op_store_QT0_fpr(QFPREG(rd));
2365 break;
2366 case 0x41: /* fadds */
2367 gen_clear_float_exceptions();
2368 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2369 cpu_fpr[rs1], cpu_fpr[rs2]);
2370 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2371 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2372 break;
2373 case 0x42:
2374 gen_op_load_fpr_DT0(DFPREG(rs1));
2375 gen_op_load_fpr_DT1(DFPREG(rs2));
2376 gen_clear_float_exceptions();
2377 tcg_gen_helper_0_0(helper_faddd);
2378 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2379 gen_op_store_DT0_fpr(DFPREG(rd));
2380 break;
2381 case 0x43: /* faddq */
2382 CHECK_FPU_FEATURE(dc, FLOAT128);
2383 gen_op_load_fpr_QT0(QFPREG(rs1));
2384 gen_op_load_fpr_QT1(QFPREG(rs2));
2385 gen_clear_float_exceptions();
2386 tcg_gen_helper_0_0(helper_faddq);
2387 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2388 gen_op_store_QT0_fpr(QFPREG(rd));
2389 break;
2390 case 0x45: /* fsubs */
2391 gen_clear_float_exceptions();
2392 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2393 cpu_fpr[rs1], cpu_fpr[rs2]);
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2395 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2396 break;
2397 case 0x46:
2398 gen_op_load_fpr_DT0(DFPREG(rs1));
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_clear_float_exceptions();
2401 tcg_gen_helper_0_0(helper_fsubd);
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0x47: /* fsubq */
2406 CHECK_FPU_FEATURE(dc, FLOAT128);
2407 gen_op_load_fpr_QT0(QFPREG(rs1));
2408 gen_op_load_fpr_QT1(QFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 tcg_gen_helper_0_0(helper_fsubq);
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 case 0x49: /* fmuls */
2415 CHECK_FPU_FEATURE(dc, FMUL);
2416 gen_clear_float_exceptions();
2417 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2418 cpu_fpr[rs1], cpu_fpr[rs2]);
2419 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2420 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2421 break;
2422 case 0x4a: /* fmuld */
2423 CHECK_FPU_FEATURE(dc, FMUL);
2424 gen_op_load_fpr_DT0(DFPREG(rs1));
2425 gen_op_load_fpr_DT1(DFPREG(rs2));
2426 gen_clear_float_exceptions();
2427 tcg_gen_helper_0_0(helper_fmuld);
2428 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2429 gen_op_store_DT0_fpr(DFPREG(rd));
2430 break;
2431 case 0x4b: /* fmulq */
2432 CHECK_FPU_FEATURE(dc, FLOAT128);
2433 CHECK_FPU_FEATURE(dc, FMUL);
2434 gen_op_load_fpr_QT0(QFPREG(rs1));
2435 gen_op_load_fpr_QT1(QFPREG(rs2));
2436 gen_clear_float_exceptions();
2437 tcg_gen_helper_0_0(helper_fmulq);
2438 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2439 gen_op_store_QT0_fpr(QFPREG(rd));
2440 break;
2441 case 0x4d: /* fdivs */
2442 gen_clear_float_exceptions();
2443 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2444 cpu_fpr[rs1], cpu_fpr[rs2]);
2445 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2446 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2447 break;
2448 case 0x4e:
2449 gen_op_load_fpr_DT0(DFPREG(rs1));
2450 gen_op_load_fpr_DT1(DFPREG(rs2));
2451 gen_clear_float_exceptions();
2452 tcg_gen_helper_0_0(helper_fdivd);
2453 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2454 gen_op_store_DT0_fpr(DFPREG(rd));
2455 break;
2456 case 0x4f: /* fdivq */
2457 CHECK_FPU_FEATURE(dc, FLOAT128);
2458 gen_op_load_fpr_QT0(QFPREG(rs1));
2459 gen_op_load_fpr_QT1(QFPREG(rs2));
2460 gen_clear_float_exceptions();
2461 tcg_gen_helper_0_0(helper_fdivq);
2462 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2463 gen_op_store_QT0_fpr(QFPREG(rd));
2464 break;
2465 case 0x69: /* fsmuld */
2466 CHECK_FPU_FEATURE(dc, FSMULD);
2467 gen_clear_float_exceptions();
2468 tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
2469 cpu_fpr[rs2]);
2470 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2471 gen_op_store_DT0_fpr(DFPREG(rd));
2472 break;
2473 case 0x6e: /* fdmulq */
2474 CHECK_FPU_FEATURE(dc, FLOAT128);
2475 gen_op_load_fpr_DT0(DFPREG(rs1));
2476 gen_op_load_fpr_DT1(DFPREG(rs2));
2477 gen_clear_float_exceptions();
2478 tcg_gen_helper_0_0(helper_fdmulq);
2479 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2480 gen_op_store_QT0_fpr(QFPREG(rd));
2481 break;
2482 case 0xc4: /* fitos */
2483 gen_clear_float_exceptions();
2484 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2485 cpu_fpr[rs2]);
2486 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2487 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2488 break;
2489 case 0xc6: /* fdtos */
2490 gen_op_load_fpr_DT1(DFPREG(rs2));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2494 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2495 break;
2496 case 0xc7: /* fqtos */
2497 CHECK_FPU_FEATURE(dc, FLOAT128);
2498 gen_op_load_fpr_QT1(QFPREG(rs2));
2499 gen_clear_float_exceptions();
2500 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2501 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2502 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2503 break;
2504 case 0xc8: /* fitod */
2505 tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
2506 gen_op_store_DT0_fpr(DFPREG(rd));
2507 break;
2508 case 0xc9: /* fstod */
2509 tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
2510 gen_op_store_DT0_fpr(DFPREG(rd));
2511 break;
2512 case 0xcb: /* fqtod */
2513 CHECK_FPU_FEATURE(dc, FLOAT128);
2514 gen_op_load_fpr_QT1(QFPREG(rs2));
2515 gen_clear_float_exceptions();
2516 tcg_gen_helper_0_0(helper_fqtod);
2517 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2518 gen_op_store_DT0_fpr(DFPREG(rd));
2519 break;
2520 case 0xcc: /* fitoq */
2521 CHECK_FPU_FEATURE(dc, FLOAT128);
2522 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2523 gen_op_store_QT0_fpr(QFPREG(rd));
2524 break;
2525 case 0xcd: /* fstoq */
2526 CHECK_FPU_FEATURE(dc, FLOAT128);
2527 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2528 gen_op_store_QT0_fpr(QFPREG(rd));
2529 break;
2530 case 0xce: /* fdtoq */
2531 CHECK_FPU_FEATURE(dc, FLOAT128);
2532 gen_op_load_fpr_DT1(DFPREG(rs2));
2533 tcg_gen_helper_0_0(helper_fdtoq);
2534 gen_op_store_QT0_fpr(QFPREG(rd));
2535 break;
2536 case 0xd1: /* fstoi */
2537 gen_clear_float_exceptions();
2538 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2539 cpu_fpr[rs2]);
2540 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2541 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2542 break;
2543 case 0xd2: /* fdtoi */
2544 gen_op_load_fpr_DT1(DFPREG(rs2));
2545 gen_clear_float_exceptions();
2546 tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
2547 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2548 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2549 break;
2550 case 0xd3: /* fqtoi */
2551 CHECK_FPU_FEATURE(dc, FLOAT128);
2552 gen_op_load_fpr_QT1(QFPREG(rs2));
2553 gen_clear_float_exceptions();
2554 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2555 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2556 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2557 break;
2558 #ifdef TARGET_SPARC64
2559 case 0x2: /* V9 fmovd */
2560 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2561 cpu_fpr[DFPREG(rs2)]);
2562 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2563 cpu_fpr[DFPREG(rs2) + 1]);
2564 break;
2565 case 0x3: /* V9 fmovq */
2566 CHECK_FPU_FEATURE(dc, FLOAT128);
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2568 cpu_fpr[QFPREG(rs2)]);
2569 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2570 cpu_fpr[QFPREG(rs2) + 1]);
2571 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2572 cpu_fpr[QFPREG(rs2) + 2]);
2573 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2574 cpu_fpr[QFPREG(rs2) + 3]);
2575 break;
2576 case 0x6: /* V9 fnegd */
2577 gen_op_load_fpr_DT1(DFPREG(rs2));
2578 tcg_gen_helper_0_0(helper_fnegd);
2579 gen_op_store_DT0_fpr(DFPREG(rd));
2580 break;
2581 case 0x7: /* V9 fnegq */
2582 CHECK_FPU_FEATURE(dc, FLOAT128);
2583 gen_op_load_fpr_QT1(QFPREG(rs2));
2584 tcg_gen_helper_0_0(helper_fnegq);
2585 gen_op_store_QT0_fpr(QFPREG(rd));
2586 break;
2587 case 0xa: /* V9 fabsd */
2588 gen_op_load_fpr_DT1(DFPREG(rs2));
2589 tcg_gen_helper_0_0(helper_fabsd);
2590 gen_op_store_DT0_fpr(DFPREG(rd));
2591 break;
2592 case 0xb: /* V9 fabsq */
2593 CHECK_FPU_FEATURE(dc, FLOAT128);
2594 gen_op_load_fpr_QT1(QFPREG(rs2));
2595 tcg_gen_helper_0_0(helper_fabsq);
2596 gen_op_store_QT0_fpr(QFPREG(rd));
2597 break;
2598 case 0x81: /* V9 fstox */
2599 gen_clear_float_exceptions();
2600 tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
2601 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2602 gen_op_store_DT0_fpr(DFPREG(rd));
2603 break;
2604 case 0x82: /* V9 fdtox */
2605 gen_op_load_fpr_DT1(DFPREG(rs2));
2606 gen_clear_float_exceptions();
2607 tcg_gen_helper_0_0(helper_fdtox);
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2609 gen_op_store_DT0_fpr(DFPREG(rd));
2610 break;
2611 case 0x83: /* V9 fqtox */
2612 CHECK_FPU_FEATURE(dc, FLOAT128);
2613 gen_op_load_fpr_QT1(QFPREG(rs2));
2614 gen_clear_float_exceptions();
2615 tcg_gen_helper_0_0(helper_fqtox);
2616 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2617 gen_op_store_DT0_fpr(DFPREG(rd));
2618 break;
2619 case 0x84: /* V9 fxtos */
2620 gen_op_load_fpr_DT1(DFPREG(rs2));
2621 gen_clear_float_exceptions();
2622 tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
2623 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2624 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2625 break;
2626 case 0x88: /* V9 fxtod */
2627 gen_op_load_fpr_DT1(DFPREG(rs2));
2628 gen_clear_float_exceptions();
2629 tcg_gen_helper_0_0(helper_fxtod);
2630 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2631 gen_op_store_DT0_fpr(DFPREG(rd));
2632 break;
2633 case 0x8c: /* V9 fxtoq */
2634 CHECK_FPU_FEATURE(dc, FLOAT128);
2635 gen_op_load_fpr_DT1(DFPREG(rs2));
2636 gen_clear_float_exceptions();
2637 tcg_gen_helper_0_0(helper_fxtoq);
2638 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2639 gen_op_store_QT0_fpr(QFPREG(rd));
2640 break;
2641 #endif
2642 default:
2643 goto illegal_insn;
2645 } else if (xop == 0x35) { /* FPU Operations */
2646 #ifdef TARGET_SPARC64
2647 int cond;
2648 #endif
2649 if (gen_trap_ifnofpu(dc, cpu_cond))
2650 goto jmp_insn;
2651 gen_op_clear_ieee_excp_and_FTT();
2652 rs1 = GET_FIELD(insn, 13, 17);
2653 rs2 = GET_FIELD(insn, 27, 31);
2654 xop = GET_FIELD(insn, 18, 26);
2655 #ifdef TARGET_SPARC64
2656 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2657 int l1;
2659 l1 = gen_new_label();
2660 cond = GET_FIELD_SP(insn, 14, 17);
2661 cpu_src1 = get_src1(insn, cpu_src1);
2662 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2663 0, l1);
2664 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2665 gen_set_label(l1);
2666 break;
2667 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2668 int l1;
2670 l1 = gen_new_label();
2671 cond = GET_FIELD_SP(insn, 14, 17);
2672 cpu_src1 = get_src1(insn, cpu_src1);
2673 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2674 0, l1);
2675 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2676 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2677 gen_set_label(l1);
2678 break;
2679 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2680 int l1;
2682 CHECK_FPU_FEATURE(dc, FLOAT128);
2683 l1 = gen_new_label();
2684 cond = GET_FIELD_SP(insn, 14, 17);
2685 cpu_src1 = get_src1(insn, cpu_src1);
2686 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2687 0, l1);
2688 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2689 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2690 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2691 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2692 gen_set_label(l1);
2693 break;
2695 #endif
2696 switch (xop) {
2697 #ifdef TARGET_SPARC64
2698 #define FMOVSCC(fcc) \
2700 TCGv r_cond; \
2701 int l1; \
2703 l1 = gen_new_label(); \
2704 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2705 cond = GET_FIELD_SP(insn, 14, 17); \
2706 gen_fcond(r_cond, fcc, cond); \
2707 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2708 0, l1); \
2709 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2710 gen_set_label(l1); \
2711 tcg_temp_free(r_cond); \
2713 #define FMOVDCC(fcc) \
2715 TCGv r_cond; \
2716 int l1; \
2718 l1 = gen_new_label(); \
2719 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2720 cond = GET_FIELD_SP(insn, 14, 17); \
2721 gen_fcond(r_cond, fcc, cond); \
2722 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2723 0, l1); \
2724 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2725 cpu_fpr[DFPREG(rs2)]); \
2726 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2727 cpu_fpr[DFPREG(rs2) + 1]); \
2728 gen_set_label(l1); \
2729 tcg_temp_free(r_cond); \
2731 #define FMOVQCC(fcc) \
2733 TCGv r_cond; \
2734 int l1; \
2736 l1 = gen_new_label(); \
2737 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2738 cond = GET_FIELD_SP(insn, 14, 17); \
2739 gen_fcond(r_cond, fcc, cond); \
2740 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2741 0, l1); \
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2743 cpu_fpr[QFPREG(rs2)]); \
2744 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2745 cpu_fpr[QFPREG(rs2) + 1]); \
2746 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2747 cpu_fpr[QFPREG(rs2) + 2]); \
2748 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2749 cpu_fpr[QFPREG(rs2) + 3]); \
2750 gen_set_label(l1); \
2751 tcg_temp_free(r_cond); \
2753 case 0x001: /* V9 fmovscc %fcc0 */
2754 FMOVSCC(0);
2755 break;
2756 case 0x002: /* V9 fmovdcc %fcc0 */
2757 FMOVDCC(0);
2758 break;
2759 case 0x003: /* V9 fmovqcc %fcc0 */
2760 CHECK_FPU_FEATURE(dc, FLOAT128);
2761 FMOVQCC(0);
2762 break;
2763 case 0x041: /* V9 fmovscc %fcc1 */
2764 FMOVSCC(1);
2765 break;
2766 case 0x042: /* V9 fmovdcc %fcc1 */
2767 FMOVDCC(1);
2768 break;
2769 case 0x043: /* V9 fmovqcc %fcc1 */
2770 CHECK_FPU_FEATURE(dc, FLOAT128);
2771 FMOVQCC(1);
2772 break;
2773 case 0x081: /* V9 fmovscc %fcc2 */
2774 FMOVSCC(2);
2775 break;
2776 case 0x082: /* V9 fmovdcc %fcc2 */
2777 FMOVDCC(2);
2778 break;
2779 case 0x083: /* V9 fmovqcc %fcc2 */
2780 CHECK_FPU_FEATURE(dc, FLOAT128);
2781 FMOVQCC(2);
2782 break;
2783 case 0x0c1: /* V9 fmovscc %fcc3 */
2784 FMOVSCC(3);
2785 break;
2786 case 0x0c2: /* V9 fmovdcc %fcc3 */
2787 FMOVDCC(3);
2788 break;
2789 case 0x0c3: /* V9 fmovqcc %fcc3 */
2790 CHECK_FPU_FEATURE(dc, FLOAT128);
2791 FMOVQCC(3);
2792 break;
2793 #undef FMOVSCC
2794 #undef FMOVDCC
2795 #undef FMOVQCC
2796 #define FMOVCC(size_FDQ, icc) \
2798 TCGv r_cond; \
2799 int l1; \
2801 l1 = gen_new_label(); \
2802 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2803 cond = GET_FIELD_SP(insn, 14, 17); \
2804 gen_cond(r_cond, icc, cond); \
2805 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2806 0, l1); \
2807 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2808 (glue(size_FDQ, FPREG(rs2))); \
2809 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2810 (glue(size_FDQ, FPREG(rd))); \
2811 gen_set_label(l1); \
2812 tcg_temp_free(r_cond); \
2814 #define FMOVSCC(icc) \
2816 TCGv r_cond; \
2817 int l1; \
2819 l1 = gen_new_label(); \
2820 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2821 cond = GET_FIELD_SP(insn, 14, 17); \
2822 gen_cond(r_cond, icc, cond); \
2823 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2824 0, l1); \
2825 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2826 gen_set_label(l1); \
2827 tcg_temp_free(r_cond); \
2829 #define FMOVDCC(icc) \
2831 TCGv r_cond; \
2832 int l1; \
2834 l1 = gen_new_label(); \
2835 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2836 cond = GET_FIELD_SP(insn, 14, 17); \
2837 gen_cond(r_cond, icc, cond); \
2838 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2839 0, l1); \
2840 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2841 cpu_fpr[DFPREG(rs2)]); \
2842 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2843 cpu_fpr[DFPREG(rs2) + 1]); \
2844 gen_set_label(l1); \
2845 tcg_temp_free(r_cond); \
2847 #define FMOVQCC(icc) \
2849 TCGv r_cond; \
2850 int l1; \
2852 l1 = gen_new_label(); \
2853 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2854 cond = GET_FIELD_SP(insn, 14, 17); \
2855 gen_cond(r_cond, icc, cond); \
2856 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2857 0, l1); \
2858 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2859 cpu_fpr[QFPREG(rs2)]); \
2860 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2861 cpu_fpr[QFPREG(rs2) + 1]); \
2862 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2863 cpu_fpr[QFPREG(rs2) + 2]); \
2864 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2865 cpu_fpr[QFPREG(rs2) + 3]); \
2866 gen_set_label(l1); \
2867 tcg_temp_free(r_cond); \
2870 case 0x101: /* V9 fmovscc %icc */
2871 FMOVSCC(0);
2872 break;
2873 case 0x102: /* V9 fmovdcc %icc */
2874 FMOVDCC(0);
2875 case 0x103: /* V9 fmovqcc %icc */
2876 CHECK_FPU_FEATURE(dc, FLOAT128);
2877 FMOVQCC(0);
2878 break;
2879 case 0x181: /* V9 fmovscc %xcc */
2880 FMOVSCC(1);
2881 break;
2882 case 0x182: /* V9 fmovdcc %xcc */
2883 FMOVDCC(1);
2884 break;
2885 case 0x183: /* V9 fmovqcc %xcc */
2886 CHECK_FPU_FEATURE(dc, FLOAT128);
2887 FMOVQCC(1);
2888 break;
2889 #undef FMOVSCC
2890 #undef FMOVDCC
2891 #undef FMOVQCC
2892 #endif
2893 case 0x51: /* fcmps, V9 %fcc */
2894 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2895 break;
2896 case 0x52: /* fcmpd, V9 %fcc */
2897 gen_op_load_fpr_DT0(DFPREG(rs1));
2898 gen_op_load_fpr_DT1(DFPREG(rs2));
2899 gen_op_fcmpd(rd & 3);
2900 break;
2901 case 0x53: /* fcmpq, V9 %fcc */
2902 CHECK_FPU_FEATURE(dc, FLOAT128);
2903 gen_op_load_fpr_QT0(QFPREG(rs1));
2904 gen_op_load_fpr_QT1(QFPREG(rs2));
2905 gen_op_fcmpq(rd & 3);
2906 break;
2907 case 0x55: /* fcmpes, V9 %fcc */
2908 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2909 break;
2910 case 0x56: /* fcmped, V9 %fcc */
2911 gen_op_load_fpr_DT0(DFPREG(rs1));
2912 gen_op_load_fpr_DT1(DFPREG(rs2));
2913 gen_op_fcmped(rd & 3);
2914 break;
2915 case 0x57: /* fcmpeq, V9 %fcc */
2916 CHECK_FPU_FEATURE(dc, FLOAT128);
2917 gen_op_load_fpr_QT0(QFPREG(rs1));
2918 gen_op_load_fpr_QT1(QFPREG(rs2));
2919 gen_op_fcmpeq(rd & 3);
2920 break;
2921 default:
2922 goto illegal_insn;
2924 } else if (xop == 0x2) {
2925 // clr/mov shortcut
2927 rs1 = GET_FIELD(insn, 13, 17);
2928 if (rs1 == 0) {
2929 // or %g0, x, y -> mov T0, x; mov y, T0
2930 if (IS_IMM) { /* immediate */
2931 TCGv r_const;
2933 rs2 = GET_FIELDs(insn, 19, 31);
2934 r_const = tcg_const_tl((int)rs2);
2935 gen_movl_TN_reg(rd, r_const);
2936 tcg_temp_free(r_const);
2937 } else { /* register */
2938 rs2 = GET_FIELD(insn, 27, 31);
2939 gen_movl_reg_TN(rs2, cpu_dst);
2940 gen_movl_TN_reg(rd, cpu_dst);
2942 } else {
2943 cpu_src1 = get_src1(insn, cpu_src1);
2944 if (IS_IMM) { /* immediate */
2945 rs2 = GET_FIELDs(insn, 19, 31);
2946 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2947 gen_movl_TN_reg(rd, cpu_dst);
2948 } else { /* register */
2949 // or x, %g0, y -> mov T1, x; mov y, T1
2950 rs2 = GET_FIELD(insn, 27, 31);
2951 if (rs2 != 0) {
2952 gen_movl_reg_TN(rs2, cpu_src2);
2953 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2954 gen_movl_TN_reg(rd, cpu_dst);
2955 } else
2956 gen_movl_TN_reg(rd, cpu_src1);
2959 #ifdef TARGET_SPARC64
2960 } else if (xop == 0x25) { /* sll, V9 sllx */
2961 cpu_src1 = get_src1(insn, cpu_src1);
2962 if (IS_IMM) { /* immediate */
2963 rs2 = GET_FIELDs(insn, 20, 31);
2964 if (insn & (1 << 12)) {
2965 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2966 } else {
2967 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2969 } else { /* register */
2970 rs2 = GET_FIELD(insn, 27, 31);
2971 gen_movl_reg_TN(rs2, cpu_src2);
2972 if (insn & (1 << 12)) {
2973 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2974 } else {
2975 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2977 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2979 gen_movl_TN_reg(rd, cpu_dst);
2980 } else if (xop == 0x26) { /* srl, V9 srlx */
2981 cpu_src1 = get_src1(insn, cpu_src1);
2982 if (IS_IMM) { /* immediate */
2983 rs2 = GET_FIELDs(insn, 20, 31);
2984 if (insn & (1 << 12)) {
2985 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2986 } else {
2987 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2988 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2990 } else { /* register */
2991 rs2 = GET_FIELD(insn, 27, 31);
2992 gen_movl_reg_TN(rs2, cpu_src2);
2993 if (insn & (1 << 12)) {
2994 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2995 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2996 } else {
2997 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2998 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2999 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3002 gen_movl_TN_reg(rd, cpu_dst);
3003 } else if (xop == 0x27) { /* sra, V9 srax */
3004 cpu_src1 = get_src1(insn, cpu_src1);
3005 if (IS_IMM) { /* immediate */
3006 rs2 = GET_FIELDs(insn, 20, 31);
3007 if (insn & (1 << 12)) {
3008 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3009 } else {
3010 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3011 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3012 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3014 } else { /* register */
3015 rs2 = GET_FIELD(insn, 27, 31);
3016 gen_movl_reg_TN(rs2, cpu_src2);
3017 if (insn & (1 << 12)) {
3018 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3019 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3020 } else {
3021 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3022 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3023 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3024 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3027 gen_movl_TN_reg(rd, cpu_dst);
3028 #endif
3029 } else if (xop < 0x36) {
3030 cpu_src1 = get_src1(insn, cpu_src1);
3031 cpu_src2 = get_src2(insn, cpu_src2);
3032 if (xop < 0x20) {
3033 switch (xop & ~0x10) {
3034 case 0x0:
3035 if (xop & 0x10)
3036 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3037 else
3038 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3039 break;
3040 case 0x1:
3041 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3042 if (xop & 0x10)
3043 gen_op_logic_cc(cpu_dst);
3044 break;
3045 case 0x2:
3046 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3047 if (xop & 0x10)
3048 gen_op_logic_cc(cpu_dst);
3049 break;
3050 case 0x3:
3051 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3052 if (xop & 0x10)
3053 gen_op_logic_cc(cpu_dst);
3054 break;
3055 case 0x4:
3056 if (xop & 0x10)
3057 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3058 else
3059 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3060 break;
3061 case 0x5:
3062 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3063 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3064 if (xop & 0x10)
3065 gen_op_logic_cc(cpu_dst);
3066 break;
3067 case 0x6:
3068 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3069 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3070 if (xop & 0x10)
3071 gen_op_logic_cc(cpu_dst);
3072 break;
3073 case 0x7:
3074 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3075 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3076 if (xop & 0x10)
3077 gen_op_logic_cc(cpu_dst);
3078 break;
3079 case 0x8:
3080 if (xop & 0x10)
3081 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3082 else {
3083 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3084 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3085 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3087 break;
3088 #ifdef TARGET_SPARC64
3089 case 0x9: /* V9 mulx */
3090 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3091 break;
3092 #endif
3093 case 0xa:
3094 CHECK_IU_FEATURE(dc, MUL);
3095 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3096 if (xop & 0x10)
3097 gen_op_logic_cc(cpu_dst);
3098 break;
3099 case 0xb:
3100 CHECK_IU_FEATURE(dc, MUL);
3101 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3102 if (xop & 0x10)
3103 gen_op_logic_cc(cpu_dst);
3104 break;
3105 case 0xc:
3106 if (xop & 0x10)
3107 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3108 else {
3109 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3110 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3111 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3113 break;
3114 #ifdef TARGET_SPARC64
3115 case 0xd: /* V9 udivx */
3116 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3117 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3118 gen_trap_ifdivzero_tl(cpu_cc_src2);
3119 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3120 break;
3121 #endif
3122 case 0xe:
3123 CHECK_IU_FEATURE(dc, DIV);
3124 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3125 cpu_src2);
3126 if (xop & 0x10)
3127 gen_op_div_cc(cpu_dst);
3128 break;
3129 case 0xf:
3130 CHECK_IU_FEATURE(dc, DIV);
3131 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3132 cpu_src2);
3133 if (xop & 0x10)
3134 gen_op_div_cc(cpu_dst);
3135 break;
3136 default:
3137 goto illegal_insn;
3139 gen_movl_TN_reg(rd, cpu_dst);
3140 } else {
3141 switch (xop) {
3142 case 0x20: /* taddcc */
3143 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3144 gen_movl_TN_reg(rd, cpu_dst);
3145 break;
3146 case 0x21: /* tsubcc */
3147 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3148 gen_movl_TN_reg(rd, cpu_dst);
3149 break;
3150 case 0x22: /* taddcctv */
3151 save_state(dc, cpu_cond);
3152 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3153 gen_movl_TN_reg(rd, cpu_dst);
3154 break;
3155 case 0x23: /* tsubcctv */
3156 save_state(dc, cpu_cond);
3157 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3158 gen_movl_TN_reg(rd, cpu_dst);
3159 break;
3160 case 0x24: /* mulscc */
3161 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3162 gen_movl_TN_reg(rd, cpu_dst);
3163 break;
3164 #ifndef TARGET_SPARC64
3165 case 0x25: /* sll */
3166 if (IS_IMM) { /* immediate */
3167 rs2 = GET_FIELDs(insn, 20, 31);
3168 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3169 } else { /* register */
3170 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3171 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3173 gen_movl_TN_reg(rd, cpu_dst);
3174 break;
3175 case 0x26: /* srl */
3176 if (IS_IMM) { /* immediate */
3177 rs2 = GET_FIELDs(insn, 20, 31);
3178 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3179 } else { /* register */
3180 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3181 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3183 gen_movl_TN_reg(rd, cpu_dst);
3184 break;
3185 case 0x27: /* sra */
3186 if (IS_IMM) { /* immediate */
3187 rs2 = GET_FIELDs(insn, 20, 31);
3188 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3189 } else { /* register */
3190 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3191 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3193 gen_movl_TN_reg(rd, cpu_dst);
3194 break;
3195 #endif
3196 case 0x30:
3198 switch(rd) {
3199 case 0: /* wry */
3200 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3201 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3202 break;
3203 #ifndef TARGET_SPARC64
3204 case 0x01 ... 0x0f: /* undefined in the
3205 SPARCv8 manual, nop
3206 on the microSPARC
3207 II */
3208 case 0x10 ... 0x1f: /* implementation-dependent
3209 in the SPARCv8
3210 manual, nop on the
3211 microSPARC II */
3212 break;
3213 #else
3214 case 0x2: /* V9 wrccr */
3215 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3216 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3217 break;
3218 case 0x3: /* V9 wrasi */
3219 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3220 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3221 break;
3222 case 0x6: /* V9 wrfprs */
3223 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3224 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3225 save_state(dc, cpu_cond);
3226 gen_op_next_insn();
3227 tcg_gen_exit_tb(0);
3228 dc->is_br = 1;
3229 break;
3230 case 0xf: /* V9 sir, nop if user */
3231 #if !defined(CONFIG_USER_ONLY)
3232 if (supervisor(dc))
3233 ; // XXX
3234 #endif
3235 break;
3236 case 0x13: /* Graphics Status */
3237 if (gen_trap_ifnofpu(dc, cpu_cond))
3238 goto jmp_insn;
3239 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3240 break;
3241 case 0x17: /* Tick compare */
3242 #if !defined(CONFIG_USER_ONLY)
3243 if (!supervisor(dc))
3244 goto illegal_insn;
3245 #endif
3247 TCGv r_tickptr;
3249 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3250 cpu_src2);
3251 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3252 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3253 offsetof(CPUState, tick));
3254 tcg_gen_helper_0_2(helper_tick_set_limit,
3255 r_tickptr, cpu_tick_cmpr);
3256 tcg_temp_free(r_tickptr);
3258 break;
3259 case 0x18: /* System tick */
3260 #if !defined(CONFIG_USER_ONLY)
3261 if (!supervisor(dc))
3262 goto illegal_insn;
3263 #endif
3265 TCGv r_tickptr;
3267 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3268 cpu_src2);
3269 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3270 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3271 offsetof(CPUState, stick));
3272 tcg_gen_helper_0_2(helper_tick_set_count,
3273 r_tickptr, cpu_dst);
3274 tcg_temp_free(r_tickptr);
3276 break;
3277 case 0x19: /* System tick compare */
3278 #if !defined(CONFIG_USER_ONLY)
3279 if (!supervisor(dc))
3280 goto illegal_insn;
3281 #endif
3283 TCGv r_tickptr;
3285 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3286 cpu_src2);
3287 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3288 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3289 offsetof(CPUState, stick));
3290 tcg_gen_helper_0_2(helper_tick_set_limit,
3291 r_tickptr, cpu_stick_cmpr);
3292 tcg_temp_free(r_tickptr);
3294 break;
3296 case 0x10: /* Performance Control */
3297 case 0x11: /* Performance Instrumentation
3298 Counter */
3299 case 0x12: /* Dispatch Control */
3300 case 0x14: /* Softint set */
3301 case 0x15: /* Softint clear */
3302 case 0x16: /* Softint write */
3303 #endif
3304 default:
3305 goto illegal_insn;
3308 break;
3309 #if !defined(CONFIG_USER_ONLY)
3310 case 0x31: /* wrpsr, V9 saved, restored */
3312 if (!supervisor(dc))
3313 goto priv_insn;
3314 #ifdef TARGET_SPARC64
3315 switch (rd) {
3316 case 0:
3317 tcg_gen_helper_0_0(helper_saved);
3318 break;
3319 case 1:
3320 tcg_gen_helper_0_0(helper_restored);
3321 break;
3322 case 2: /* UA2005 allclean */
3323 case 3: /* UA2005 otherw */
3324 case 4: /* UA2005 normalw */
3325 case 5: /* UA2005 invalw */
3326 // XXX
3327 default:
3328 goto illegal_insn;
3330 #else
3331 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3332 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3333 save_state(dc, cpu_cond);
3334 gen_op_next_insn();
3335 tcg_gen_exit_tb(0);
3336 dc->is_br = 1;
3337 #endif
3339 break;
3340 case 0x32: /* wrwim, V9 wrpr */
3342 if (!supervisor(dc))
3343 goto priv_insn;
3344 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3345 #ifdef TARGET_SPARC64
3346 switch (rd) {
3347 case 0: // tpc
3349 TCGv r_tsptr;
3351 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3352 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3353 offsetof(CPUState, tsptr));
3354 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3355 offsetof(trap_state, tpc));
3356 tcg_temp_free(r_tsptr);
3358 break;
3359 case 1: // tnpc
3361 TCGv r_tsptr;
3363 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3364 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3365 offsetof(CPUState, tsptr));
3366 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3367 offsetof(trap_state, tnpc));
3368 tcg_temp_free(r_tsptr);
3370 break;
3371 case 2: // tstate
3373 TCGv r_tsptr;
3375 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3376 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3377 offsetof(CPUState, tsptr));
3378 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3379 offsetof(trap_state,
3380 tstate));
3381 tcg_temp_free(r_tsptr);
3383 break;
3384 case 3: // tt
3386 TCGv r_tsptr;
3388 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3389 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3390 offsetof(CPUState, tsptr));
3391 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3392 offsetof(trap_state, tt));
3393 tcg_temp_free(r_tsptr);
3395 break;
3396 case 4: // tick
3398 TCGv r_tickptr;
3400 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3401 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3402 offsetof(CPUState, tick));
3403 tcg_gen_helper_0_2(helper_tick_set_count,
3404 r_tickptr, cpu_tmp0);
3405 tcg_temp_free(r_tickptr);
3407 break;
3408 case 5: // tba
3409 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3410 break;
3411 case 6: // pstate
3412 save_state(dc, cpu_cond);
3413 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3414 gen_op_next_insn();
3415 tcg_gen_exit_tb(0);
3416 dc->is_br = 1;
3417 break;
3418 case 7: // tl
3419 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3420 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3421 offsetof(CPUSPARCState, tl));
3422 break;
3423 case 8: // pil
3424 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3425 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3426 offsetof(CPUSPARCState,
3427 psrpil));
3428 break;
3429 case 9: // cwp
3430 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3431 break;
3432 case 10: // cansave
3433 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3434 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3435 offsetof(CPUSPARCState,
3436 cansave));
3437 break;
3438 case 11: // canrestore
3439 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3440 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3441 offsetof(CPUSPARCState,
3442 canrestore));
3443 break;
3444 case 12: // cleanwin
3445 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3446 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3447 offsetof(CPUSPARCState,
3448 cleanwin));
3449 break;
3450 case 13: // otherwin
3451 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3452 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3453 offsetof(CPUSPARCState,
3454 otherwin));
3455 break;
3456 case 14: // wstate
3457 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3458 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3459 offsetof(CPUSPARCState,
3460 wstate));
3461 break;
3462 case 16: // UA2005 gl
3463 CHECK_IU_FEATURE(dc, GL);
3464 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3465 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3466 offsetof(CPUSPARCState, gl));
3467 break;
3468 case 26: // UA2005 strand status
3469 CHECK_IU_FEATURE(dc, HYPV);
3470 if (!hypervisor(dc))
3471 goto priv_insn;
3472 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3473 break;
3474 default:
3475 goto illegal_insn;
3477 #else
3478 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3479 if (dc->def->nwindows != 32)
3480 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3481 (1 << dc->def->nwindows) - 1);
3482 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3483 #endif
3485 break;
3486 case 0x33: /* wrtbr, UA2005 wrhpr */
3488 #ifndef TARGET_SPARC64
3489 if (!supervisor(dc))
3490 goto priv_insn;
3491 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3492 #else
3493 CHECK_IU_FEATURE(dc, HYPV);
3494 if (!hypervisor(dc))
3495 goto priv_insn;
3496 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3497 switch (rd) {
3498 case 0: // hpstate
3499 // XXX gen_op_wrhpstate();
3500 save_state(dc, cpu_cond);
3501 gen_op_next_insn();
3502 tcg_gen_exit_tb(0);
3503 dc->is_br = 1;
3504 break;
3505 case 1: // htstate
3506 // XXX gen_op_wrhtstate();
3507 break;
3508 case 3: // hintp
3509 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3510 break;
3511 case 5: // htba
3512 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3513 break;
3514 case 31: // hstick_cmpr
3516 TCGv r_tickptr;
3518 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3519 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3520 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3521 offsetof(CPUState, hstick));
3522 tcg_gen_helper_0_2(helper_tick_set_limit,
3523 r_tickptr, cpu_hstick_cmpr);
3524 tcg_temp_free(r_tickptr);
3526 break;
3527 case 6: // hver readonly
3528 default:
3529 goto illegal_insn;
3531 #endif
3533 break;
3534 #endif
3535 #ifdef TARGET_SPARC64
3536 case 0x2c: /* V9 movcc */
3538 int cc = GET_FIELD_SP(insn, 11, 12);
3539 int cond = GET_FIELD_SP(insn, 14, 17);
3540 TCGv r_cond;
3541 int l1;
3543 r_cond = tcg_temp_new(TCG_TYPE_TL);
3544 if (insn & (1 << 18)) {
3545 if (cc == 0)
3546 gen_cond(r_cond, 0, cond);
3547 else if (cc == 2)
3548 gen_cond(r_cond, 1, cond);
3549 else
3550 goto illegal_insn;
3551 } else {
3552 gen_fcond(r_cond, cc, cond);
3555 l1 = gen_new_label();
3557 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3558 if (IS_IMM) { /* immediate */
3559 TCGv r_const;
3561 rs2 = GET_FIELD_SPs(insn, 0, 10);
3562 r_const = tcg_const_tl((int)rs2);
3563 gen_movl_TN_reg(rd, r_const);
3564 tcg_temp_free(r_const);
3565 } else {
3566 rs2 = GET_FIELD_SP(insn, 0, 4);
3567 gen_movl_reg_TN(rs2, cpu_tmp0);
3568 gen_movl_TN_reg(rd, cpu_tmp0);
3570 gen_set_label(l1);
3571 tcg_temp_free(r_cond);
3572 break;
3574 case 0x2d: /* V9 sdivx */
3575 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3576 gen_movl_TN_reg(rd, cpu_dst);
3577 break;
3578 case 0x2e: /* V9 popc */
3580 cpu_src2 = get_src2(insn, cpu_src2);
3581 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3582 cpu_src2);
3583 gen_movl_TN_reg(rd, cpu_dst);
3585 case 0x2f: /* V9 movr */
3587 int cond = GET_FIELD_SP(insn, 10, 12);
3588 int l1;
3590 cpu_src1 = get_src1(insn, cpu_src1);
3592 l1 = gen_new_label();
3594 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3595 cpu_src1, 0, l1);
3596 if (IS_IMM) { /* immediate */
3597 TCGv r_const;
3599 rs2 = GET_FIELD_SPs(insn, 0, 9);
3600 r_const = tcg_const_tl((int)rs2);
3601 gen_movl_TN_reg(rd, r_const);
3602 tcg_temp_free(r_const);
3603 } else {
3604 rs2 = GET_FIELD_SP(insn, 0, 4);
3605 gen_movl_reg_TN(rs2, cpu_tmp0);
3606 gen_movl_TN_reg(rd, cpu_tmp0);
3608 gen_set_label(l1);
3609 break;
3611 #endif
3612 default:
3613 goto illegal_insn;
3616 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3617 #ifdef TARGET_SPARC64
3618 int opf = GET_FIELD_SP(insn, 5, 13);
3619 rs1 = GET_FIELD(insn, 13, 17);
3620 rs2 = GET_FIELD(insn, 27, 31);
3621 if (gen_trap_ifnofpu(dc, cpu_cond))
3622 goto jmp_insn;
3624 switch (opf) {
3625 case 0x000: /* VIS I edge8cc */
3626 case 0x001: /* VIS II edge8n */
3627 case 0x002: /* VIS I edge8lcc */
3628 case 0x003: /* VIS II edge8ln */
3629 case 0x004: /* VIS I edge16cc */
3630 case 0x005: /* VIS II edge16n */
3631 case 0x006: /* VIS I edge16lcc */
3632 case 0x007: /* VIS II edge16ln */
3633 case 0x008: /* VIS I edge32cc */
3634 case 0x009: /* VIS II edge32n */
3635 case 0x00a: /* VIS I edge32lcc */
3636 case 0x00b: /* VIS II edge32ln */
3637 // XXX
3638 goto illegal_insn;
3639 case 0x010: /* VIS I array8 */
3640 CHECK_FPU_FEATURE(dc, VIS1);
3641 cpu_src1 = get_src1(insn, cpu_src1);
3642 gen_movl_reg_TN(rs2, cpu_src2);
3643 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3644 cpu_src2);
3645 gen_movl_TN_reg(rd, cpu_dst);
3646 break;
3647 case 0x012: /* VIS I array16 */
3648 CHECK_FPU_FEATURE(dc, VIS1);
3649 cpu_src1 = get_src1(insn, cpu_src1);
3650 gen_movl_reg_TN(rs2, cpu_src2);
3651 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3652 cpu_src2);
3653 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3654 gen_movl_TN_reg(rd, cpu_dst);
3655 break;
3656 case 0x014: /* VIS I array32 */
3657 CHECK_FPU_FEATURE(dc, VIS1);
3658 cpu_src1 = get_src1(insn, cpu_src1);
3659 gen_movl_reg_TN(rs2, cpu_src2);
3660 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3661 cpu_src2);
3662 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3663 gen_movl_TN_reg(rd, cpu_dst);
3664 break;
3665 case 0x018: /* VIS I alignaddr */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 cpu_src1 = get_src1(insn, cpu_src1);
3668 gen_movl_reg_TN(rs2, cpu_src2);
3669 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3670 cpu_src2);
3671 gen_movl_TN_reg(rd, cpu_dst);
3672 break;
3673 case 0x019: /* VIS II bmask */
3674 case 0x01a: /* VIS I alignaddrl */
3675 // XXX
3676 goto illegal_insn;
3677 case 0x020: /* VIS I fcmple16 */
3678 CHECK_FPU_FEATURE(dc, VIS1);
3679 gen_op_load_fpr_DT0(DFPREG(rs1));
3680 gen_op_load_fpr_DT1(DFPREG(rs2));
3681 tcg_gen_helper_0_0(helper_fcmple16);
3682 gen_op_store_DT0_fpr(DFPREG(rd));
3683 break;
3684 case 0x022: /* VIS I fcmpne16 */
3685 CHECK_FPU_FEATURE(dc, VIS1);
3686 gen_op_load_fpr_DT0(DFPREG(rs1));
3687 gen_op_load_fpr_DT1(DFPREG(rs2));
3688 tcg_gen_helper_0_0(helper_fcmpne16);
3689 gen_op_store_DT0_fpr(DFPREG(rd));
3690 break;
3691 case 0x024: /* VIS I fcmple32 */
3692 CHECK_FPU_FEATURE(dc, VIS1);
3693 gen_op_load_fpr_DT0(DFPREG(rs1));
3694 gen_op_load_fpr_DT1(DFPREG(rs2));
3695 tcg_gen_helper_0_0(helper_fcmple32);
3696 gen_op_store_DT0_fpr(DFPREG(rd));
3697 break;
3698 case 0x026: /* VIS I fcmpne32 */
3699 CHECK_FPU_FEATURE(dc, VIS1);
3700 gen_op_load_fpr_DT0(DFPREG(rs1));
3701 gen_op_load_fpr_DT1(DFPREG(rs2));
3702 tcg_gen_helper_0_0(helper_fcmpne32);
3703 gen_op_store_DT0_fpr(DFPREG(rd));
3704 break;
3705 case 0x028: /* VIS I fcmpgt16 */
3706 CHECK_FPU_FEATURE(dc, VIS1);
3707 gen_op_load_fpr_DT0(DFPREG(rs1));
3708 gen_op_load_fpr_DT1(DFPREG(rs2));
3709 tcg_gen_helper_0_0(helper_fcmpgt16);
3710 gen_op_store_DT0_fpr(DFPREG(rd));
3711 break;
3712 case 0x02a: /* VIS I fcmpeq16 */
3713 CHECK_FPU_FEATURE(dc, VIS1);
3714 gen_op_load_fpr_DT0(DFPREG(rs1));
3715 gen_op_load_fpr_DT1(DFPREG(rs2));
3716 tcg_gen_helper_0_0(helper_fcmpeq16);
3717 gen_op_store_DT0_fpr(DFPREG(rd));
3718 break;
3719 case 0x02c: /* VIS I fcmpgt32 */
3720 CHECK_FPU_FEATURE(dc, VIS1);
3721 gen_op_load_fpr_DT0(DFPREG(rs1));
3722 gen_op_load_fpr_DT1(DFPREG(rs2));
3723 tcg_gen_helper_0_0(helper_fcmpgt32);
3724 gen_op_store_DT0_fpr(DFPREG(rd));
3725 break;
3726 case 0x02e: /* VIS I fcmpeq32 */
3727 CHECK_FPU_FEATURE(dc, VIS1);
3728 gen_op_load_fpr_DT0(DFPREG(rs1));
3729 gen_op_load_fpr_DT1(DFPREG(rs2));
3730 tcg_gen_helper_0_0(helper_fcmpeq32);
3731 gen_op_store_DT0_fpr(DFPREG(rd));
3732 break;
3733 case 0x031: /* VIS I fmul8x16 */
3734 CHECK_FPU_FEATURE(dc, VIS1);
3735 gen_op_load_fpr_DT0(DFPREG(rs1));
3736 gen_op_load_fpr_DT1(DFPREG(rs2));
3737 tcg_gen_helper_0_0(helper_fmul8x16);
3738 gen_op_store_DT0_fpr(DFPREG(rd));
3739 break;
3740 case 0x033: /* VIS I fmul8x16au */
3741 CHECK_FPU_FEATURE(dc, VIS1);
3742 gen_op_load_fpr_DT0(DFPREG(rs1));
3743 gen_op_load_fpr_DT1(DFPREG(rs2));
3744 tcg_gen_helper_0_0(helper_fmul8x16au);
3745 gen_op_store_DT0_fpr(DFPREG(rd));
3746 break;
3747 case 0x035: /* VIS I fmul8x16al */
3748 CHECK_FPU_FEATURE(dc, VIS1);
3749 gen_op_load_fpr_DT0(DFPREG(rs1));
3750 gen_op_load_fpr_DT1(DFPREG(rs2));
3751 tcg_gen_helper_0_0(helper_fmul8x16al);
3752 gen_op_store_DT0_fpr(DFPREG(rd));
3753 break;
3754 case 0x036: /* VIS I fmul8sux16 */
3755 CHECK_FPU_FEATURE(dc, VIS1);
3756 gen_op_load_fpr_DT0(DFPREG(rs1));
3757 gen_op_load_fpr_DT1(DFPREG(rs2));
3758 tcg_gen_helper_0_0(helper_fmul8sux16);
3759 gen_op_store_DT0_fpr(DFPREG(rd));
3760 break;
3761 case 0x037: /* VIS I fmul8ulx16 */
3762 CHECK_FPU_FEATURE(dc, VIS1);
3763 gen_op_load_fpr_DT0(DFPREG(rs1));
3764 gen_op_load_fpr_DT1(DFPREG(rs2));
3765 tcg_gen_helper_0_0(helper_fmul8ulx16);
3766 gen_op_store_DT0_fpr(DFPREG(rd));
3767 break;
3768 case 0x038: /* VIS I fmuld8sux16 */
3769 CHECK_FPU_FEATURE(dc, VIS1);
3770 gen_op_load_fpr_DT0(DFPREG(rs1));
3771 gen_op_load_fpr_DT1(DFPREG(rs2));
3772 tcg_gen_helper_0_0(helper_fmuld8sux16);
3773 gen_op_store_DT0_fpr(DFPREG(rd));
3774 break;
3775 case 0x039: /* VIS I fmuld8ulx16 */
3776 CHECK_FPU_FEATURE(dc, VIS1);
3777 gen_op_load_fpr_DT0(DFPREG(rs1));
3778 gen_op_load_fpr_DT1(DFPREG(rs2));
3779 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3780 gen_op_store_DT0_fpr(DFPREG(rd));
3781 break;
3782 case 0x03a: /* VIS I fpack32 */
3783 case 0x03b: /* VIS I fpack16 */
3784 case 0x03d: /* VIS I fpackfix */
3785 case 0x03e: /* VIS I pdist */
3786 // XXX
3787 goto illegal_insn;
3788 case 0x048: /* VIS I faligndata */
3789 CHECK_FPU_FEATURE(dc, VIS1);
3790 gen_op_load_fpr_DT0(DFPREG(rs1));
3791 gen_op_load_fpr_DT1(DFPREG(rs2));
3792 tcg_gen_helper_0_0(helper_faligndata);
3793 gen_op_store_DT0_fpr(DFPREG(rd));
3794 break;
3795 case 0x04b: /* VIS I fpmerge */
3796 CHECK_FPU_FEATURE(dc, VIS1);
3797 gen_op_load_fpr_DT0(DFPREG(rs1));
3798 gen_op_load_fpr_DT1(DFPREG(rs2));
3799 tcg_gen_helper_0_0(helper_fpmerge);
3800 gen_op_store_DT0_fpr(DFPREG(rd));
3801 break;
3802 case 0x04c: /* VIS II bshuffle */
3803 // XXX
3804 goto illegal_insn;
3805 case 0x04d: /* VIS I fexpand */
3806 CHECK_FPU_FEATURE(dc, VIS1);
3807 gen_op_load_fpr_DT0(DFPREG(rs1));
3808 gen_op_load_fpr_DT1(DFPREG(rs2));
3809 tcg_gen_helper_0_0(helper_fexpand);
3810 gen_op_store_DT0_fpr(DFPREG(rd));
3811 break;
3812 case 0x050: /* VIS I fpadd16 */
3813 CHECK_FPU_FEATURE(dc, VIS1);
3814 gen_op_load_fpr_DT0(DFPREG(rs1));
3815 gen_op_load_fpr_DT1(DFPREG(rs2));
3816 tcg_gen_helper_0_0(helper_fpadd16);
3817 gen_op_store_DT0_fpr(DFPREG(rd));
3818 break;
3819 case 0x051: /* VIS I fpadd16s */
3820 CHECK_FPU_FEATURE(dc, VIS1);
3821 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3822 cpu_fpr[rs1], cpu_fpr[rs2]);
3823 break;
3824 case 0x052: /* VIS I fpadd32 */
3825 CHECK_FPU_FEATURE(dc, VIS1);
3826 gen_op_load_fpr_DT0(DFPREG(rs1));
3827 gen_op_load_fpr_DT1(DFPREG(rs2));
3828 tcg_gen_helper_0_0(helper_fpadd32);
3829 gen_op_store_DT0_fpr(DFPREG(rd));
3830 break;
3831 case 0x053: /* VIS I fpadd32s */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3834 cpu_fpr[rs1], cpu_fpr[rs2]);
3835 break;
3836 case 0x054: /* VIS I fpsub16 */
3837 CHECK_FPU_FEATURE(dc, VIS1);
3838 gen_op_load_fpr_DT0(DFPREG(rs1));
3839 gen_op_load_fpr_DT1(DFPREG(rs2));
3840 tcg_gen_helper_0_0(helper_fpsub16);
3841 gen_op_store_DT0_fpr(DFPREG(rd));
3842 break;
3843 case 0x055: /* VIS I fpsub16s */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3846 cpu_fpr[rs1], cpu_fpr[rs2]);
3847 break;
3848 case 0x056: /* VIS I fpsub32 */
3849 CHECK_FPU_FEATURE(dc, VIS1);
3850 gen_op_load_fpr_DT0(DFPREG(rs1));
3851 gen_op_load_fpr_DT1(DFPREG(rs2));
3852 tcg_gen_helper_0_0(helper_fpsub32);
3853 gen_op_store_DT0_fpr(DFPREG(rd));
3854 break;
3855 case 0x057: /* VIS I fpsub32s */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3858 cpu_fpr[rs1], cpu_fpr[rs2]);
3859 break;
3860 case 0x060: /* VIS I fzero */
3861 CHECK_FPU_FEATURE(dc, VIS1);
3862 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3863 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3864 break;
3865 case 0x061: /* VIS I fzeros */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3868 break;
3869 case 0x062: /* VIS I fnor */
3870 CHECK_FPU_FEATURE(dc, VIS1);
3871 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3872 cpu_fpr[DFPREG(rs2)]);
3873 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3874 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3875 cpu_fpr[DFPREG(rs2) + 1]);
3876 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3877 break;
3878 case 0x063: /* VIS I fnors */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3881 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3882 break;
3883 case 0x064: /* VIS I fandnot2 */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3886 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3887 cpu_fpr[DFPREG(rs2)]);
3888 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3889 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3890 cpu_fpr[DFPREG(rs2) + 1]);
3891 break;
3892 case 0x065: /* VIS I fandnot2s */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3895 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3896 break;
3897 case 0x066: /* VIS I fnot2 */
3898 CHECK_FPU_FEATURE(dc, VIS1);
3899 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3900 -1);
3901 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3902 cpu_fpr[DFPREG(rs2) + 1], -1);
3903 break;
3904 case 0x067: /* VIS I fnot2s */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3907 break;
3908 case 0x068: /* VIS I fandnot1 */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3911 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3912 cpu_fpr[DFPREG(rs1)]);
3913 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3914 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3915 cpu_fpr[DFPREG(rs1) + 1]);
3916 break;
3917 case 0x069: /* VIS I fandnot1s */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3920 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3921 break;
3922 case 0x06a: /* VIS I fnot1 */
3923 CHECK_FPU_FEATURE(dc, VIS1);
3924 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3925 -1);
3926 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3927 cpu_fpr[DFPREG(rs1) + 1], -1);
3928 break;
3929 case 0x06b: /* VIS I fnot1s */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3932 break;
3933 case 0x06c: /* VIS I fxor */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3936 cpu_fpr[DFPREG(rs2)]);
3937 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3938 cpu_fpr[DFPREG(rs1) + 1],
3939 cpu_fpr[DFPREG(rs2) + 1]);
3940 break;
3941 case 0x06d: /* VIS I fxors */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3944 break;
3945 case 0x06e: /* VIS I fnand */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3948 cpu_fpr[DFPREG(rs2)]);
3949 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3950 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3951 cpu_fpr[DFPREG(rs2) + 1]);
3952 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3953 break;
3954 case 0x06f: /* VIS I fnands */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3957 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3958 break;
3959 case 0x070: /* VIS I fand */
3960 CHECK_FPU_FEATURE(dc, VIS1);
3961 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3962 cpu_fpr[DFPREG(rs2)]);
3963 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3964 cpu_fpr[DFPREG(rs1) + 1],
3965 cpu_fpr[DFPREG(rs2) + 1]);
3966 break;
3967 case 0x071: /* VIS I fands */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3970 break;
3971 case 0x072: /* VIS I fxnor */
3972 CHECK_FPU_FEATURE(dc, VIS1);
3973 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3974 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3975 cpu_fpr[DFPREG(rs1)]);
3976 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3977 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3978 cpu_fpr[DFPREG(rs1) + 1]);
3979 break;
3980 case 0x073: /* VIS I fxnors */
3981 CHECK_FPU_FEATURE(dc, VIS1);
3982 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3983 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3984 break;
3985 case 0x074: /* VIS I fsrc1 */
3986 CHECK_FPU_FEATURE(dc, VIS1);
3987 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3988 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3989 cpu_fpr[DFPREG(rs1) + 1]);
3990 break;
3991 case 0x075: /* VIS I fsrc1s */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3994 break;
3995 case 0x076: /* VIS I fornot2 */
3996 CHECK_FPU_FEATURE(dc, VIS1);
3997 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3998 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3999 cpu_fpr[DFPREG(rs2)]);
4000 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
4001 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4002 cpu_fpr[DFPREG(rs2) + 1]);
4003 break;
4004 case 0x077: /* VIS I fornot2s */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4007 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4008 break;
4009 case 0x078: /* VIS I fsrc2 */
4010 CHECK_FPU_FEATURE(dc, VIS1);
4011 gen_op_load_fpr_DT0(DFPREG(rs2));
4012 gen_op_store_DT0_fpr(DFPREG(rd));
4013 break;
4014 case 0x079: /* VIS I fsrc2s */
4015 CHECK_FPU_FEATURE(dc, VIS1);
4016 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4017 break;
4018 case 0x07a: /* VIS I fornot1 */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4021 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4022 cpu_fpr[DFPREG(rs1)]);
4023 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4024 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4025 cpu_fpr[DFPREG(rs1) + 1]);
4026 break;
4027 case 0x07b: /* VIS I fornot1s */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4030 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4031 break;
4032 case 0x07c: /* VIS I for */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4035 cpu_fpr[DFPREG(rs2)]);
4036 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4037 cpu_fpr[DFPREG(rs1) + 1],
4038 cpu_fpr[DFPREG(rs2) + 1]);
4039 break;
4040 case 0x07d: /* VIS I fors */
4041 CHECK_FPU_FEATURE(dc, VIS1);
4042 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4043 break;
4044 case 0x07e: /* VIS I fone */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4047 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4048 break;
4049 case 0x07f: /* VIS I fones */
4050 CHECK_FPU_FEATURE(dc, VIS1);
4051 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4052 break;
4053 case 0x080: /* VIS I shutdown */
4054 case 0x081: /* VIS II siam */
4055 // XXX
4056 goto illegal_insn;
4057 default:
4058 goto illegal_insn;
4060 #else
4061 goto ncp_insn;
4062 #endif
4063 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4064 #ifdef TARGET_SPARC64
4065 goto illegal_insn;
4066 #else
4067 goto ncp_insn;
4068 #endif
4069 #ifdef TARGET_SPARC64
4070 } else if (xop == 0x39) { /* V9 return */
4071 TCGv r_const;
4073 save_state(dc, cpu_cond);
4074 cpu_src1 = get_src1(insn, cpu_src1);
4075 if (IS_IMM) { /* immediate */
4076 rs2 = GET_FIELDs(insn, 19, 31);
4077 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4078 } else { /* register */
4079 rs2 = GET_FIELD(insn, 27, 31);
4080 if (rs2) {
4081 gen_movl_reg_TN(rs2, cpu_src2);
4082 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4083 } else
4084 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4086 tcg_gen_helper_0_0(helper_restore);
4087 gen_mov_pc_npc(dc, cpu_cond);
4088 r_const = tcg_const_i32(3);
4089 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4090 tcg_temp_free(r_const);
4091 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4092 dc->npc = DYNAMIC_PC;
4093 goto jmp_insn;
4094 #endif
4095 } else {
4096 cpu_src1 = get_src1(insn, cpu_src1);
4097 if (IS_IMM) { /* immediate */
4098 rs2 = GET_FIELDs(insn, 19, 31);
4099 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4100 } else { /* register */
4101 rs2 = GET_FIELD(insn, 27, 31);
4102 if (rs2) {
4103 gen_movl_reg_TN(rs2, cpu_src2);
4104 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4105 } else
4106 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4108 switch (xop) {
4109 case 0x38: /* jmpl */
4111 TCGv r_const;
4113 r_const = tcg_const_tl(dc->pc);
4114 gen_movl_TN_reg(rd, r_const);
4115 tcg_temp_free(r_const);
4116 gen_mov_pc_npc(dc, cpu_cond);
4117 r_const = tcg_const_i32(3);
4118 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4119 r_const);
4120 tcg_temp_free(r_const);
4121 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4122 dc->npc = DYNAMIC_PC;
4124 goto jmp_insn;
4125 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4126 case 0x39: /* rett, V9 return */
4128 TCGv r_const;
4130 if (!supervisor(dc))
4131 goto priv_insn;
4132 gen_mov_pc_npc(dc, cpu_cond);
4133 r_const = tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4135 r_const);
4136 tcg_temp_free(r_const);
4137 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4138 dc->npc = DYNAMIC_PC;
4139 tcg_gen_helper_0_0(helper_rett);
4141 goto jmp_insn;
4142 #endif
4143 case 0x3b: /* flush */
4144 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4145 goto unimp_flush;
4146 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4147 break;
4148 case 0x3c: /* save */
4149 save_state(dc, cpu_cond);
4150 tcg_gen_helper_0_0(helper_save);
4151 gen_movl_TN_reg(rd, cpu_dst);
4152 break;
4153 case 0x3d: /* restore */
4154 save_state(dc, cpu_cond);
4155 tcg_gen_helper_0_0(helper_restore);
4156 gen_movl_TN_reg(rd, cpu_dst);
4157 break;
4158 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4159 case 0x3e: /* V9 done/retry */
4161 switch (rd) {
4162 case 0:
4163 if (!supervisor(dc))
4164 goto priv_insn;
4165 dc->npc = DYNAMIC_PC;
4166 dc->pc = DYNAMIC_PC;
4167 tcg_gen_helper_0_0(helper_done);
4168 goto jmp_insn;
4169 case 1:
4170 if (!supervisor(dc))
4171 goto priv_insn;
4172 dc->npc = DYNAMIC_PC;
4173 dc->pc = DYNAMIC_PC;
4174 tcg_gen_helper_0_0(helper_retry);
4175 goto jmp_insn;
4176 default:
4177 goto illegal_insn;
4180 break;
4181 #endif
4182 default:
4183 goto illegal_insn;
4186 break;
4188 break;
4189 case 3: /* load/store instructions */
4191 unsigned int xop = GET_FIELD(insn, 7, 12);
4193 cpu_src1 = get_src1(insn, cpu_src1);
4194 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4195 rs2 = GET_FIELD(insn, 27, 31);
4196 gen_movl_reg_TN(rs2, cpu_src2);
4197 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4198 } else if (IS_IMM) { /* immediate */
4199 rs2 = GET_FIELDs(insn, 19, 31);
4200 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4201 } else { /* register */
4202 rs2 = GET_FIELD(insn, 27, 31);
4203 if (rs2 != 0) {
4204 gen_movl_reg_TN(rs2, cpu_src2);
4205 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4206 } else
4207 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4209 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4210 (xop > 0x17 && xop <= 0x1d ) ||
4211 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4212 switch (xop) {
4213 case 0x0: /* load unsigned word */
4214 gen_address_mask(dc, cpu_addr);
4215 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4216 break;
4217 case 0x1: /* load unsigned byte */
4218 gen_address_mask(dc, cpu_addr);
4219 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4220 break;
4221 case 0x2: /* load unsigned halfword */
4222 gen_address_mask(dc, cpu_addr);
4223 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4224 break;
4225 case 0x3: /* load double word */
4226 if (rd & 1)
4227 goto illegal_insn;
4228 else {
4229 TCGv r_const;
4231 save_state(dc, cpu_cond);
4232 r_const = tcg_const_i32(7);
4233 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4234 r_const); // XXX remove
4235 tcg_temp_free(r_const);
4236 gen_address_mask(dc, cpu_addr);
4237 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4238 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4239 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4240 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4241 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4242 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4243 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4245 break;
4246 case 0x9: /* load signed byte */
4247 gen_address_mask(dc, cpu_addr);
4248 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4249 break;
4250 case 0xa: /* load signed halfword */
4251 gen_address_mask(dc, cpu_addr);
4252 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4253 break;
4254 case 0xd: /* ldstub -- XXX: should be atomically */
4256 TCGv r_const;
4258 gen_address_mask(dc, cpu_addr);
4259 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4260 r_const = tcg_const_tl(0xff);
4261 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4262 tcg_temp_free(r_const);
4264 break;
4265 case 0x0f: /* swap register with memory. Also
4266 atomically */
4267 CHECK_IU_FEATURE(dc, SWAP);
4268 gen_movl_reg_TN(rd, cpu_val);
4269 gen_address_mask(dc, cpu_addr);
4270 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4271 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4272 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4273 break;
4274 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4275 case 0x10: /* load word alternate */
4276 #ifndef TARGET_SPARC64
4277 if (IS_IMM)
4278 goto illegal_insn;
4279 if (!supervisor(dc))
4280 goto priv_insn;
4281 #endif
4282 save_state(dc, cpu_cond);
4283 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4284 break;
4285 case 0x11: /* load unsigned byte alternate */
4286 #ifndef TARGET_SPARC64
4287 if (IS_IMM)
4288 goto illegal_insn;
4289 if (!supervisor(dc))
4290 goto priv_insn;
4291 #endif
4292 save_state(dc, cpu_cond);
4293 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4294 break;
4295 case 0x12: /* load unsigned halfword alternate */
4296 #ifndef TARGET_SPARC64
4297 if (IS_IMM)
4298 goto illegal_insn;
4299 if (!supervisor(dc))
4300 goto priv_insn;
4301 #endif
4302 save_state(dc, cpu_cond);
4303 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4304 break;
4305 case 0x13: /* load double word alternate */
4306 #ifndef TARGET_SPARC64
4307 if (IS_IMM)
4308 goto illegal_insn;
4309 if (!supervisor(dc))
4310 goto priv_insn;
4311 #endif
4312 if (rd & 1)
4313 goto illegal_insn;
4314 save_state(dc, cpu_cond);
4315 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4316 goto skip_move;
4317 case 0x19: /* load signed byte alternate */
4318 #ifndef TARGET_SPARC64
4319 if (IS_IMM)
4320 goto illegal_insn;
4321 if (!supervisor(dc))
4322 goto priv_insn;
4323 #endif
4324 save_state(dc, cpu_cond);
4325 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4326 break;
4327 case 0x1a: /* load signed halfword alternate */
4328 #ifndef TARGET_SPARC64
4329 if (IS_IMM)
4330 goto illegal_insn;
4331 if (!supervisor(dc))
4332 goto priv_insn;
4333 #endif
4334 save_state(dc, cpu_cond);
4335 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4336 break;
4337 case 0x1d: /* ldstuba -- XXX: should be atomically */
4338 #ifndef TARGET_SPARC64
4339 if (IS_IMM)
4340 goto illegal_insn;
4341 if (!supervisor(dc))
4342 goto priv_insn;
4343 #endif
4344 save_state(dc, cpu_cond);
4345 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4346 break;
4347 case 0x1f: /* swap reg with alt. memory. Also
4348 atomically */
4349 CHECK_IU_FEATURE(dc, SWAP);
4350 #ifndef TARGET_SPARC64
4351 if (IS_IMM)
4352 goto illegal_insn;
4353 if (!supervisor(dc))
4354 goto priv_insn;
4355 #endif
4356 save_state(dc, cpu_cond);
4357 gen_movl_reg_TN(rd, cpu_val);
4358 gen_swap_asi(cpu_val, cpu_addr, insn);
4359 break;
4361 #ifndef TARGET_SPARC64
4362 case 0x30: /* ldc */
4363 case 0x31: /* ldcsr */
4364 case 0x33: /* lddc */
4365 goto ncp_insn;
4366 #endif
4367 #endif
4368 #ifdef TARGET_SPARC64
4369 case 0x08: /* V9 ldsw */
4370 gen_address_mask(dc, cpu_addr);
4371 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4372 break;
4373 case 0x0b: /* V9 ldx */
4374 gen_address_mask(dc, cpu_addr);
4375 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4376 break;
4377 case 0x18: /* V9 ldswa */
4378 save_state(dc, cpu_cond);
4379 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4380 break;
4381 case 0x1b: /* V9 ldxa */
4382 save_state(dc, cpu_cond);
4383 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4384 break;
4385 case 0x2d: /* V9 prefetch, no effect */
4386 goto skip_move;
4387 case 0x30: /* V9 ldfa */
4388 save_state(dc, cpu_cond);
4389 gen_ldf_asi(cpu_addr, insn, 4, rd);
4390 goto skip_move;
4391 case 0x33: /* V9 lddfa */
4392 save_state(dc, cpu_cond);
4393 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4394 goto skip_move;
4395 case 0x3d: /* V9 prefetcha, no effect */
4396 goto skip_move;
4397 case 0x32: /* V9 ldqfa */
4398 CHECK_FPU_FEATURE(dc, FLOAT128);
4399 save_state(dc, cpu_cond);
4400 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4401 goto skip_move;
4402 #endif
4403 default:
4404 goto illegal_insn;
4406 gen_movl_TN_reg(rd, cpu_val);
4407 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4408 skip_move: ;
4409 #endif
4410 } else if (xop >= 0x20 && xop < 0x24) {
4411 if (gen_trap_ifnofpu(dc, cpu_cond))
4412 goto jmp_insn;
4413 save_state(dc, cpu_cond);
4414 switch (xop) {
4415 case 0x20: /* load fpreg */
4416 gen_address_mask(dc, cpu_addr);
4417 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4418 break;
4419 case 0x21: /* ldfsr, V9 ldxfsr */
4420 #ifdef TARGET_SPARC64
4421 gen_address_mask(dc, cpu_addr);
4422 if (rd == 1) {
4423 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4424 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4425 } else
4426 #else
4428 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4429 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4431 #endif
4432 break;
4433 case 0x22: /* load quad fpreg */
4435 TCGv r_const;
4437 CHECK_FPU_FEATURE(dc, FLOAT128);
4438 r_const = tcg_const_i32(dc->mem_idx);
4439 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4440 tcg_temp_free(r_const);
4441 gen_op_store_QT0_fpr(QFPREG(rd));
4443 break;
4444 case 0x23: /* load double fpreg */
4446 TCGv r_const;
4448 r_const = tcg_const_i32(dc->mem_idx);
4449 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4450 tcg_temp_free(r_const);
4451 gen_op_store_DT0_fpr(DFPREG(rd));
4453 break;
4454 default:
4455 goto illegal_insn;
4457 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4458 xop == 0xe || xop == 0x1e) {
4459 gen_movl_reg_TN(rd, cpu_val);
4460 switch (xop) {
4461 case 0x4: /* store word */
4462 gen_address_mask(dc, cpu_addr);
4463 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4464 break;
4465 case 0x5: /* store byte */
4466 gen_address_mask(dc, cpu_addr);
4467 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4468 break;
4469 case 0x6: /* store halfword */
4470 gen_address_mask(dc, cpu_addr);
4471 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4472 break;
4473 case 0x7: /* store double word */
4474 if (rd & 1)
4475 goto illegal_insn;
4476 else {
4477 TCGv r_low, r_const;
4479 save_state(dc, cpu_cond);
4480 gen_address_mask(dc, cpu_addr);
4481 r_const = tcg_const_i32(7);
4482 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4483 r_const); // XXX remove
4484 tcg_temp_free(r_const);
4485 r_low = tcg_temp_new(TCG_TYPE_TL);
4486 gen_movl_reg_TN(rd + 1, r_low);
4487 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4488 r_low);
4489 tcg_temp_free(r_low);
4490 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4492 break;
4493 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4494 case 0x14: /* store word alternate */
4495 #ifndef TARGET_SPARC64
4496 if (IS_IMM)
4497 goto illegal_insn;
4498 if (!supervisor(dc))
4499 goto priv_insn;
4500 #endif
4501 save_state(dc, cpu_cond);
4502 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4503 break;
4504 case 0x15: /* store byte alternate */
4505 #ifndef TARGET_SPARC64
4506 if (IS_IMM)
4507 goto illegal_insn;
4508 if (!supervisor(dc))
4509 goto priv_insn;
4510 #endif
4511 save_state(dc, cpu_cond);
4512 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4513 break;
4514 case 0x16: /* store halfword alternate */
4515 #ifndef TARGET_SPARC64
4516 if (IS_IMM)
4517 goto illegal_insn;
4518 if (!supervisor(dc))
4519 goto priv_insn;
4520 #endif
4521 save_state(dc, cpu_cond);
4522 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4523 break;
4524 case 0x17: /* store double word alternate */
4525 #ifndef TARGET_SPARC64
4526 if (IS_IMM)
4527 goto illegal_insn;
4528 if (!supervisor(dc))
4529 goto priv_insn;
4530 #endif
4531 if (rd & 1)
4532 goto illegal_insn;
4533 else {
4534 save_state(dc, cpu_cond);
4535 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4537 break;
4538 #endif
4539 #ifdef TARGET_SPARC64
4540 case 0x0e: /* V9 stx */
4541 gen_address_mask(dc, cpu_addr);
4542 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4543 break;
4544 case 0x1e: /* V9 stxa */
4545 save_state(dc, cpu_cond);
4546 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4547 break;
4548 #endif
4549 default:
4550 goto illegal_insn;
4552 } else if (xop > 0x23 && xop < 0x28) {
4553 if (gen_trap_ifnofpu(dc, cpu_cond))
4554 goto jmp_insn;
4555 save_state(dc, cpu_cond);
4556 switch (xop) {
4557 case 0x24: /* store fpreg */
4558 gen_address_mask(dc, cpu_addr);
4559 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4560 break;
4561 case 0x25: /* stfsr, V9 stxfsr */
4562 #ifdef TARGET_SPARC64
4563 gen_address_mask(dc, cpu_addr);
4564 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4565 if (rd == 1)
4566 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4567 else {
4568 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4569 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4571 #else
4572 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4573 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4574 #endif
4575 break;
4576 case 0x26:
4577 #ifdef TARGET_SPARC64
4578 /* V9 stqf, store quad fpreg */
4580 TCGv r_const;
4582 CHECK_FPU_FEATURE(dc, FLOAT128);
4583 gen_op_load_fpr_QT0(QFPREG(rd));
4584 r_const = tcg_const_i32(dc->mem_idx);
4585 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4586 tcg_temp_free(r_const);
4588 break;
4589 #else /* !TARGET_SPARC64 */
4590 /* stdfq, store floating point queue */
4591 #if defined(CONFIG_USER_ONLY)
4592 goto illegal_insn;
4593 #else
4594 if (!supervisor(dc))
4595 goto priv_insn;
4596 if (gen_trap_ifnofpu(dc, cpu_cond))
4597 goto jmp_insn;
4598 goto nfq_insn;
4599 #endif
4600 #endif
4601 case 0x27: /* store double fpreg */
4603 TCGv r_const;
4605 gen_op_load_fpr_DT0(DFPREG(rd));
4606 r_const = tcg_const_i32(dc->mem_idx);
4607 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4608 tcg_temp_free(r_const);
4610 break;
4611 default:
4612 goto illegal_insn;
4614 } else if (xop > 0x33 && xop < 0x3f) {
4615 save_state(dc, cpu_cond);
4616 switch (xop) {
4617 #ifdef TARGET_SPARC64
4618 case 0x34: /* V9 stfa */
4619 gen_stf_asi(cpu_addr, insn, 4, rd);
4620 break;
4621 case 0x36: /* V9 stqfa */
4623 TCGv r_const;
4625 CHECK_FPU_FEATURE(dc, FLOAT128);
4626 r_const = tcg_const_i32(7);
4627 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4628 r_const);
4629 tcg_temp_free(r_const);
4630 gen_op_load_fpr_QT0(QFPREG(rd));
4631 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4633 break;
4634 case 0x37: /* V9 stdfa */
4635 gen_op_load_fpr_DT0(DFPREG(rd));
4636 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4637 break;
4638 case 0x3c: /* V9 casa */
4639 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4640 gen_movl_TN_reg(rd, cpu_val);
4641 break;
4642 case 0x3e: /* V9 casxa */
4643 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4644 gen_movl_TN_reg(rd, cpu_val);
4645 break;
4646 #else
4647 case 0x34: /* stc */
4648 case 0x35: /* stcsr */
4649 case 0x36: /* stdcq */
4650 case 0x37: /* stdc */
4651 goto ncp_insn;
4652 #endif
4653 default:
4654 goto illegal_insn;
4657 else
4658 goto illegal_insn;
4660 break;
4662 /* default case for non jump instructions */
4663 if (dc->npc == DYNAMIC_PC) {
4664 dc->pc = DYNAMIC_PC;
4665 gen_op_next_insn();
4666 } else if (dc->npc == JUMP_PC) {
4667 /* we can do a static jump */
4668 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4669 dc->is_br = 1;
4670 } else {
4671 dc->pc = dc->npc;
4672 dc->npc = dc->npc + 4;
4674 jmp_insn:
4675 return;
4676 illegal_insn:
4678 TCGv r_const;
4680 save_state(dc, cpu_cond);
4681 r_const = tcg_const_i32(TT_ILL_INSN);
4682 tcg_gen_helper_0_1(raise_exception, r_const);
4683 tcg_temp_free(r_const);
4684 dc->is_br = 1;
4686 return;
4687 unimp_flush:
4689 TCGv r_const;
4691 save_state(dc, cpu_cond);
4692 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4693 tcg_gen_helper_0_1(raise_exception, r_const);
4694 tcg_temp_free(r_const);
4695 dc->is_br = 1;
4697 return;
4698 #if !defined(CONFIG_USER_ONLY)
4699 priv_insn:
4701 TCGv r_const;
4703 save_state(dc, cpu_cond);
4704 r_const = tcg_const_i32(TT_PRIV_INSN);
4705 tcg_gen_helper_0_1(raise_exception, r_const);
4706 tcg_temp_free(r_const);
4707 dc->is_br = 1;
4709 return;
4710 #endif
4711 nfpu_insn:
4712 save_state(dc, cpu_cond);
4713 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4714 dc->is_br = 1;
4715 return;
4716 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4717 nfq_insn:
4718 save_state(dc, cpu_cond);
4719 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4720 dc->is_br = 1;
4721 return;
4722 #endif
4723 #ifndef TARGET_SPARC64
4724 ncp_insn:
4726 TCGv r_const;
4728 save_state(dc, cpu_cond);
4729 r_const = tcg_const_i32(TT_NCP_INSN);
4730 tcg_gen_helper_0_1(raise_exception, r_const);
4731 tcg_temp_free(r_const);
4732 dc->is_br = 1;
4734 return;
4735 #endif
4738 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4739 int spc, CPUSPARCState *env)
4741 target_ulong pc_start, last_pc;
4742 uint16_t *gen_opc_end;
4743 DisasContext dc1, *dc = &dc1;
4744 int j, lj = -1;
4745 int num_insns;
4746 int max_insns;
4748 memset(dc, 0, sizeof(DisasContext));
4749 dc->tb = tb;
4750 pc_start = tb->pc;
4751 dc->pc = pc_start;
4752 last_pc = dc->pc;
4753 dc->npc = (target_ulong) tb->cs_base;
4754 dc->mem_idx = cpu_mmu_index(env);
4755 dc->def = env->def;
4756 if ((dc->def->features & CPU_FEATURE_FLOAT))
4757 dc->fpu_enabled = cpu_fpu_enabled(env);
4758 else
4759 dc->fpu_enabled = 0;
4760 #ifdef TARGET_SPARC64
4761 dc->address_mask_32bit = env->pstate & PS_AM;
4762 #endif
4763 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4765 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4766 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4767 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4769 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4771 // loads and stores
4772 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4773 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4775 num_insns = 0;
4776 max_insns = tb->cflags & CF_COUNT_MASK;
4777 if (max_insns == 0)
4778 max_insns = CF_COUNT_MASK;
4779 gen_icount_start();
4780 do {
4781 if (env->nb_breakpoints > 0) {
4782 for(j = 0; j < env->nb_breakpoints; j++) {
4783 if (env->breakpoints[j] == dc->pc) {
4784 if (dc->pc != pc_start)
4785 save_state(dc, cpu_cond);
4786 tcg_gen_helper_0_0(helper_debug);
4787 tcg_gen_exit_tb(0);
4788 dc->is_br = 1;
4789 goto exit_gen_loop;
4793 if (spc) {
4794 if (loglevel > 0)
4795 fprintf(logfile, "Search PC...\n");
4796 j = gen_opc_ptr - gen_opc_buf;
4797 if (lj < j) {
4798 lj++;
4799 while (lj < j)
4800 gen_opc_instr_start[lj++] = 0;
4801 gen_opc_pc[lj] = dc->pc;
4802 gen_opc_npc[lj] = dc->npc;
4803 gen_opc_instr_start[lj] = 1;
4804 gen_opc_icount[lj] = num_insns;
4807 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4808 gen_io_start();
4809 last_pc = dc->pc;
4810 disas_sparc_insn(dc);
4811 num_insns++;
4813 if (dc->is_br)
4814 break;
4815 /* if the next PC is different, we abort now */
4816 if (dc->pc != (last_pc + 4))
4817 break;
4818 /* if we reach a page boundary, we stop generation so that the
4819 PC of a TT_TFAULT exception is always in the right page */
4820 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4821 break;
4822 /* if single step mode, we generate only one instruction and
4823 generate an exception */
4824 if (env->singlestep_enabled) {
4825 tcg_gen_movi_tl(cpu_pc, dc->pc);
4826 tcg_gen_exit_tb(0);
4827 break;
4829 } while ((gen_opc_ptr < gen_opc_end) &&
4830 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4831 num_insns < max_insns);
4833 exit_gen_loop:
4834 tcg_temp_free(cpu_addr);
4835 tcg_temp_free(cpu_val);
4836 tcg_temp_free(cpu_dst);
4837 tcg_temp_free(cpu_tmp64);
4838 tcg_temp_free(cpu_tmp32);
4839 tcg_temp_free(cpu_tmp0);
4840 if (tb->cflags & CF_LAST_IO)
4841 gen_io_end();
4842 if (!dc->is_br) {
4843 if (dc->pc != DYNAMIC_PC &&
4844 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4845 /* static PC and NPC: we can use direct chaining */
4846 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4847 } else {
4848 if (dc->pc != DYNAMIC_PC)
4849 tcg_gen_movi_tl(cpu_pc, dc->pc);
4850 save_npc(dc, cpu_cond);
4851 tcg_gen_exit_tb(0);
4854 gen_icount_end(tb, num_insns);
4855 *gen_opc_ptr = INDEX_op_end;
4856 if (spc) {
4857 j = gen_opc_ptr - gen_opc_buf;
4858 lj++;
4859 while (lj <= j)
4860 gen_opc_instr_start[lj++] = 0;
4861 #if 0
4862 if (loglevel > 0) {
4863 page_dump(logfile);
4865 #endif
4866 gen_opc_jump_pc[0] = dc->jump_pc[0];
4867 gen_opc_jump_pc[1] = dc->jump_pc[1];
4868 } else {
4869 tb->size = last_pc + 4 - pc_start;
4870 tb->icount = num_insns;
4872 #ifdef DEBUG_DISAS
4873 if (loglevel & CPU_LOG_TB_IN_ASM) {
4874 fprintf(logfile, "--------------\n");
4875 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4876 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4877 fprintf(logfile, "\n");
4879 #endif
4882 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4884 gen_intermediate_code_internal(tb, 0, env);
4887 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4889 gen_intermediate_code_internal(tb, 1, env);
4892 void gen_intermediate_code_init(CPUSPARCState *env)
4894 unsigned int i;
4895 static int inited;
4896 static const char * const gregnames[8] = {
4897 NULL, // g0 not used
4898 "g1",
4899 "g2",
4900 "g3",
4901 "g4",
4902 "g5",
4903 "g6",
4904 "g7",
4906 static const char * const fregnames[64] = {
4907 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4908 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4909 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4910 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4911 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4912 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4913 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4914 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4917 /* init various static tables */
4918 if (!inited) {
4919 inited = 1;
4921 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4922 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4923 offsetof(CPUState, regwptr),
4924 "regwptr");
4925 #ifdef TARGET_SPARC64
4926 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4927 TCG_AREG0, offsetof(CPUState, xcc),
4928 "xcc");
4929 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4930 TCG_AREG0, offsetof(CPUState, asi),
4931 "asi");
4932 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4933 TCG_AREG0, offsetof(CPUState, fprs),
4934 "fprs");
4935 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4936 TCG_AREG0, offsetof(CPUState, gsr),
4937 "gsr");
4938 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4939 TCG_AREG0,
4940 offsetof(CPUState, tick_cmpr),
4941 "tick_cmpr");
4942 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4943 TCG_AREG0,
4944 offsetof(CPUState, stick_cmpr),
4945 "stick_cmpr");
4946 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4947 TCG_AREG0,
4948 offsetof(CPUState, hstick_cmpr),
4949 "hstick_cmpr");
4950 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4951 offsetof(CPUState, hintp),
4952 "hintp");
4953 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4954 offsetof(CPUState, htba),
4955 "htba");
4956 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4957 offsetof(CPUState, hver),
4958 "hver");
4959 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4960 offsetof(CPUState, ssr), "ssr");
4961 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4962 offsetof(CPUState, version), "ver");
4963 #else
4964 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4965 TCG_AREG0, offsetof(CPUState, wim),
4966 "wim");
4967 #endif
4968 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4969 TCG_AREG0, offsetof(CPUState, cond),
4970 "cond");
4971 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4972 TCG_AREG0, offsetof(CPUState, cc_src),
4973 "cc_src");
4974 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4975 offsetof(CPUState, cc_src2),
4976 "cc_src2");
4977 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4978 TCG_AREG0, offsetof(CPUState, cc_dst),
4979 "cc_dst");
4980 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4981 TCG_AREG0, offsetof(CPUState, psr),
4982 "psr");
4983 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4984 TCG_AREG0, offsetof(CPUState, fsr),
4985 "fsr");
4986 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4987 TCG_AREG0, offsetof(CPUState, pc),
4988 "pc");
4989 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4990 TCG_AREG0, offsetof(CPUState, npc),
4991 "npc");
4992 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
4993 TCG_AREG0, offsetof(CPUState, y), "y");
4994 #ifndef CONFIG_USER_ONLY
4995 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
4996 TCG_AREG0, offsetof(CPUState, tbr),
4997 "tbr");
4998 #endif
4999 for (i = 1; i < 8; i++)
5000 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5001 offsetof(CPUState, gregs[i]),
5002 gregnames[i]);
5003 for (i = 0; i < TARGET_FPREGS; i++)
5004 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5005 offsetof(CPUState, fpr[i]),
5006 fregnames[i]);
5008 /* register helpers */
5010 #undef DEF_HELPER
5011 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5012 #include "helper.h"
5016 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5017 unsigned long searched_pc, int pc_pos, void *puc)
5019 target_ulong npc;
5020 env->pc = gen_opc_pc[pc_pos];
5021 npc = gen_opc_npc[pc_pos];
5022 if (npc == 1) {
5023 /* dynamic NPC: already stored */
5024 } else if (npc == 2) {
5025 target_ulong t2 = (target_ulong)(unsigned long)puc;
5026 /* jump PC: use T2 and the jump targets of the translation */
5027 if (t2)
5028 env->npc = gen_opc_jump_pc[0];
5029 else
5030 env->npc = gen_opc_jump_pc[1];
5031 } else {
5032 env->npc = npc;