Use the new concat_tl_i64 op for std and stda
[qemu/mini2440.git] / target-sparc / translate.c
blob96dd56faa62e34401ca810fc1cd255bd8f97d739
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define DEBUG_DISAS
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
61 #include "gen-icount.h"
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
96 static int sign_extend(int x, int len)
98 len = 32 - len;
99 return (x << len) >> len;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src)
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
108 offsetof(CPU_DoubleU, l.upper));
109 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
110 offsetof(CPU_DoubleU, l.lower));
113 static void gen_op_load_fpr_DT1(unsigned int src)
115 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
116 offsetof(CPU_DoubleU, l.upper));
117 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
118 offsetof(CPU_DoubleU, l.lower));
121 static void gen_op_store_DT0_fpr(unsigned int dst)
123 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.upper));
125 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
126 offsetof(CPU_DoubleU, l.lower));
129 static void gen_op_load_fpr_QT0(unsigned int src)
131 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
132 offsetof(CPU_QuadU, l.upmost));
133 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
134 offsetof(CPU_QuadU, l.upper));
135 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
136 offsetof(CPU_QuadU, l.lower));
137 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
138 offsetof(CPU_QuadU, l.lowest));
141 static void gen_op_load_fpr_QT1(unsigned int src)
143 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
144 offsetof(CPU_QuadU, l.upmost));
145 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
146 offsetof(CPU_QuadU, l.upper));
147 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
148 offsetof(CPU_QuadU, l.lower));
149 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
150 offsetof(CPU_QuadU, l.lowest));
153 static void gen_op_store_QT0_fpr(unsigned int dst)
155 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.upmost));
157 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
160 offsetof(CPU_QuadU, l.lower));
161 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
162 offsetof(CPU_QuadU, l.lowest));
165 /* moves */
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
170 #endif
171 #else
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
175 #else
176 #endif
177 #endif
179 #ifdef TARGET_SPARC64
180 #ifndef TARGET_ABI32
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
182 #else
183 #define AM_CHECK(dc) (1)
184 #endif
185 #endif
187 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
189 #ifdef TARGET_SPARC64
190 if (AM_CHECK(dc))
191 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
192 #endif
195 static inline void gen_movl_reg_TN(int reg, TCGv tn)
197 if (reg == 0)
198 tcg_gen_movi_tl(tn, 0);
199 else if (reg < 8)
200 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
201 else {
202 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
206 static inline void gen_movl_TN_reg(int reg, TCGv tn)
208 if (reg == 0)
209 return;
210 else if (reg < 8)
211 tcg_gen_mov_tl(cpu_gregs[reg], tn);
212 else {
213 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
217 static inline void gen_goto_tb(DisasContext *s, int tb_num,
218 target_ulong pc, target_ulong npc)
220 TranslationBlock *tb;
222 tb = s->tb;
223 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
224 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num);
227 tcg_gen_movi_tl(cpu_pc, pc);
228 tcg_gen_movi_tl(cpu_npc, npc);
229 tcg_gen_exit_tb((long)tb + tb_num);
230 } else {
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc, pc);
233 tcg_gen_movi_tl(cpu_npc, npc);
234 tcg_gen_exit_tb(0);
238 // XXX suboptimal
239 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
241 tcg_gen_extu_i32_tl(reg, src);
242 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
243 tcg_gen_andi_tl(reg, reg, 0x1);
246 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
248 tcg_gen_extu_i32_tl(reg, src);
249 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
250 tcg_gen_andi_tl(reg, reg, 0x1);
253 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
260 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
267 static inline void gen_cc_clear_icc(void)
269 tcg_gen_movi_i32(cpu_psr, 0);
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
275 tcg_gen_movi_i32(cpu_xcc, 0);
277 #endif
279 /* old op:
280 if (!T0)
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
283 env->psr |= PSR_NEG;
285 static inline void gen_cc_NZ_icc(TCGv dst)
287 TCGv r_temp;
288 int l1, l2;
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 r_temp = tcg_temp_new(TCG_TYPE_TL);
293 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
294 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
295 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
296 gen_set_label(l1);
297 tcg_gen_ext_i32_tl(r_temp, dst);
298 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
299 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
300 gen_set_label(l2);
301 tcg_temp_free(r_temp);
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst)
307 int l1, l2;
309 l1 = gen_new_label();
310 l2 = gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
312 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
313 gen_set_label(l1);
314 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
315 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
316 gen_set_label(l2);
318 #endif
320 /* old op:
321 if (T0 < src1)
322 env->psr |= PSR_CARRY;
324 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
326 TCGv r_temp1, r_temp2;
327 int l1;
329 l1 = gen_new_label();
330 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
331 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
332 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
333 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
334 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
335 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
336 gen_set_label(l1);
337 tcg_temp_free(r_temp1);
338 tcg_temp_free(r_temp2);
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
344 int l1;
346 l1 = gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
348 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
349 gen_set_label(l1);
351 #endif
353 /* old op:
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
355 env->psr |= PSR_OVF;
357 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
359 TCGv r_temp;
361 r_temp = tcg_temp_new(TCG_TYPE_TL);
362 tcg_gen_xor_tl(r_temp, src1, src2);
363 tcg_gen_xori_tl(r_temp, r_temp, -1);
364 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
365 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
366 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
368 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
369 tcg_temp_free(r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
376 TCGv r_temp;
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_temp_free(r_temp);
387 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
389 #endif
391 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
393 TCGv r_temp, r_const;
394 int l1;
396 l1 = gen_new_label();
398 r_temp = tcg_temp_new(TCG_TYPE_TL);
399 tcg_gen_xor_tl(r_temp, src1, src2);
400 tcg_gen_xori_tl(r_temp, r_temp, -1);
401 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
402 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
403 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
405 r_const = tcg_const_i32(TT_TOVF);
406 tcg_gen_helper_0_1(raise_exception, r_const);
407 tcg_temp_free(r_const);
408 gen_set_label(l1);
409 tcg_temp_free(r_temp);
412 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
414 int l1;
416 l1 = gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0, src1, src2);
418 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
421 gen_set_label(l1);
424 static inline void gen_tag_tv(TCGv src1, TCGv src2)
426 int l1;
427 TCGv r_const;
429 l1 = gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0, src1, src2);
431 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
433 r_const = tcg_const_i32(TT_TOVF);
434 tcg_gen_helper_0_1(raise_exception, r_const);
435 tcg_temp_free(r_const);
436 gen_set_label(l1);
439 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
441 tcg_gen_mov_tl(cpu_cc_src, src1);
442 tcg_gen_mov_tl(cpu_cc_src2, src2);
443 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
444 gen_cc_clear_icc();
445 gen_cc_NZ_icc(cpu_cc_dst);
446 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
447 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 #ifdef TARGET_SPARC64
449 gen_cc_clear_xcc();
450 gen_cc_NZ_xcc(cpu_cc_dst);
451 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
452 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453 #endif
454 tcg_gen_mov_tl(dst, cpu_cc_dst);
457 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
459 tcg_gen_mov_tl(cpu_cc_src, src1);
460 tcg_gen_mov_tl(cpu_cc_src2, src2);
461 gen_mov_reg_C(cpu_tmp0, cpu_psr);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
463 gen_cc_clear_icc();
464 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
465 #ifdef TARGET_SPARC64
466 gen_cc_clear_xcc();
467 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
468 #endif
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
470 gen_cc_NZ_icc(cpu_cc_dst);
471 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst);
475 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
476 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 #endif
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
483 tcg_gen_mov_tl(cpu_cc_src, src1);
484 tcg_gen_mov_tl(cpu_cc_src2, src2);
485 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
486 gen_cc_clear_icc();
487 gen_cc_NZ_icc(cpu_cc_dst);
488 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
489 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
491 #ifdef TARGET_SPARC64
492 gen_cc_clear_xcc();
493 gen_cc_NZ_xcc(cpu_cc_dst);
494 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
495 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 #endif
497 tcg_gen_mov_tl(dst, cpu_cc_dst);
500 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
502 tcg_gen_mov_tl(cpu_cc_src, src1);
503 tcg_gen_mov_tl(cpu_cc_src2, src2);
504 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
505 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 gen_cc_clear_icc();
508 gen_cc_NZ_icc(cpu_cc_dst);
509 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
510 #ifdef TARGET_SPARC64
511 gen_cc_clear_xcc();
512 gen_cc_NZ_xcc(cpu_cc_dst);
513 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
514 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
515 #endif
516 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 /* old op:
520 if (src1 < T1)
521 env->psr |= PSR_CARRY;
523 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
525 TCGv r_temp1, r_temp2;
526 int l1;
528 l1 = gen_new_label();
529 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
530 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
531 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
532 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
533 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
534 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
535 gen_set_label(l1);
536 tcg_temp_free(r_temp1);
537 tcg_temp_free(r_temp2);
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
543 int l1;
545 l1 = gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
547 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
548 gen_set_label(l1);
550 #endif
552 /* old op:
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
554 env->psr |= PSR_OVF;
556 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
558 TCGv r_temp;
560 r_temp = tcg_temp_new(TCG_TYPE_TL);
561 tcg_gen_xor_tl(r_temp, src1, src2);
562 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
563 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
564 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
566 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
567 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
568 tcg_temp_free(r_temp);
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
574 TCGv r_temp;
576 r_temp = tcg_temp_new(TCG_TYPE_TL);
577 tcg_gen_xor_tl(r_temp, src1, src2);
578 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
579 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
580 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
582 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
583 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
584 tcg_temp_free(r_temp);
586 #endif
588 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
590 TCGv r_temp, r_const;
591 int l1;
593 l1 = gen_new_label();
595 r_temp = tcg_temp_new(TCG_TYPE_TL);
596 tcg_gen_xor_tl(r_temp, src1, src2);
597 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
598 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
599 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
601 r_const = tcg_const_i32(TT_TOVF);
602 tcg_gen_helper_0_1(raise_exception, r_const);
603 tcg_temp_free(r_const);
604 gen_set_label(l1);
605 tcg_temp_free(r_temp);
608 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
610 tcg_gen_mov_tl(cpu_cc_src, src1);
611 tcg_gen_mov_tl(cpu_cc_src2, src2);
612 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
613 gen_cc_clear_icc();
614 gen_cc_NZ_icc(cpu_cc_dst);
615 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
616 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 #ifdef TARGET_SPARC64
618 gen_cc_clear_xcc();
619 gen_cc_NZ_xcc(cpu_cc_dst);
620 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
621 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
622 #endif
623 tcg_gen_mov_tl(dst, cpu_cc_dst);
626 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
628 tcg_gen_mov_tl(cpu_cc_src, src1);
629 tcg_gen_mov_tl(cpu_cc_src2, src2);
630 gen_mov_reg_C(cpu_tmp0, cpu_psr);
631 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
632 gen_cc_clear_icc();
633 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
634 #ifdef TARGET_SPARC64
635 gen_cc_clear_xcc();
636 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
637 #endif
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
639 gen_cc_NZ_icc(cpu_cc_dst);
640 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
641 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst);
644 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
645 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
646 #endif
647 tcg_gen_mov_tl(dst, cpu_cc_dst);
650 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
652 tcg_gen_mov_tl(cpu_cc_src, src1);
653 tcg_gen_mov_tl(cpu_cc_src2, src2);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
660 #ifdef TARGET_SPARC64
661 gen_cc_clear_xcc();
662 gen_cc_NZ_xcc(cpu_cc_dst);
663 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
664 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
665 #endif
666 tcg_gen_mov_tl(dst, cpu_cc_dst);
669 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
671 tcg_gen_mov_tl(cpu_cc_src, src1);
672 tcg_gen_mov_tl(cpu_cc_src2, src2);
673 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
674 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 gen_cc_clear_icc();
677 gen_cc_NZ_icc(cpu_cc_dst);
678 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
679 #ifdef TARGET_SPARC64
680 gen_cc_clear_xcc();
681 gen_cc_NZ_xcc(cpu_cc_dst);
682 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
683 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684 #endif
685 tcg_gen_mov_tl(dst, cpu_cc_dst);
688 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
690 TCGv r_temp;
691 int l1;
693 l1 = gen_new_label();
694 r_temp = tcg_temp_new(TCG_TYPE_TL);
696 /* old op:
697 if (!(env->y & 1))
698 T1 = 0;
700 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
701 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
702 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
703 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
704 tcg_gen_movi_tl(cpu_cc_src2, 0);
705 gen_set_label(l1);
707 // b2 = T0 & 1;
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
710 tcg_gen_shli_tl(r_temp, r_temp, 31);
711 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
712 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
713 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
714 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
716 // b1 = N ^ V;
717 gen_mov_reg_N(cpu_tmp0, cpu_psr);
718 gen_mov_reg_V(r_temp, cpu_psr);
719 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
720 tcg_temp_free(r_temp);
722 // T0 = (b1 << 31) | (T0 >> 1);
723 // src1 = T0;
724 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
725 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
726 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
728 /* do addition and update flags */
729 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
731 gen_cc_clear_icc();
732 gen_cc_NZ_icc(cpu_cc_dst);
733 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
735 tcg_gen_mov_tl(dst, cpu_cc_dst);
738 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
740 TCGv r_temp, r_temp2;
742 r_temp = tcg_temp_new(TCG_TYPE_I64);
743 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
745 tcg_gen_extu_i32_i64(r_temp, src2);
746 tcg_gen_extu_i32_i64(r_temp2, src1);
747 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
749 tcg_gen_shri_i64(r_temp, r_temp2, 32);
750 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
751 tcg_temp_free(r_temp);
752 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst, r_temp2);
755 #else
756 tcg_gen_trunc_i64_tl(dst, r_temp2);
757 #endif
758 tcg_temp_free(r_temp2);
761 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
763 TCGv r_temp, r_temp2;
765 r_temp = tcg_temp_new(TCG_TYPE_I64);
766 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
768 tcg_gen_ext_i32_i64(r_temp, src2);
769 tcg_gen_ext_i32_i64(r_temp2, src1);
770 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
772 tcg_gen_shri_i64(r_temp, r_temp2, 32);
773 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
774 tcg_temp_free(r_temp);
775 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
776 #ifdef TARGET_SPARC64
777 tcg_gen_mov_i64(dst, r_temp2);
778 #else
779 tcg_gen_trunc_i64_tl(dst, r_temp2);
780 #endif
781 tcg_temp_free(r_temp2);
784 #ifdef TARGET_SPARC64
785 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
787 TCGv r_const;
788 int l1;
790 l1 = gen_new_label();
791 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
792 r_const = tcg_const_i32(TT_DIV_ZERO);
793 tcg_gen_helper_0_1(raise_exception, r_const);
794 tcg_temp_free(r_const);
795 gen_set_label(l1);
798 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
800 int l1, l2;
802 l1 = gen_new_label();
803 l2 = gen_new_label();
804 tcg_gen_mov_tl(cpu_cc_src, src1);
805 tcg_gen_mov_tl(cpu_cc_src2, src2);
806 gen_trap_ifdivzero_tl(cpu_cc_src2);
807 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
808 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
809 tcg_gen_movi_i64(dst, INT64_MIN);
810 tcg_gen_br(l2);
811 gen_set_label(l1);
812 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
813 gen_set_label(l2);
815 #endif
817 static inline void gen_op_div_cc(TCGv dst)
819 int l1;
821 tcg_gen_mov_tl(cpu_cc_dst, dst);
822 gen_cc_clear_icc();
823 gen_cc_NZ_icc(cpu_cc_dst);
824 l1 = gen_new_label();
825 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
826 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
827 gen_set_label(l1);
830 static inline void gen_op_logic_cc(TCGv dst)
832 tcg_gen_mov_tl(cpu_cc_dst, dst);
834 gen_cc_clear_icc();
835 gen_cc_NZ_icc(cpu_cc_dst);
836 #ifdef TARGET_SPARC64
837 gen_cc_clear_xcc();
838 gen_cc_NZ_xcc(cpu_cc_dst);
839 #endif
842 // 1
843 static inline void gen_op_eval_ba(TCGv dst)
845 tcg_gen_movi_tl(dst, 1);
848 // Z
849 static inline void gen_op_eval_be(TCGv dst, TCGv src)
851 gen_mov_reg_Z(dst, src);
854 // Z | (N ^ V)
855 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
857 gen_mov_reg_N(cpu_tmp0, src);
858 gen_mov_reg_V(dst, src);
859 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
860 gen_mov_reg_Z(cpu_tmp0, src);
861 tcg_gen_or_tl(dst, dst, cpu_tmp0);
864 // N ^ V
865 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
867 gen_mov_reg_V(cpu_tmp0, src);
868 gen_mov_reg_N(dst, src);
869 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
872 // C | Z
873 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
875 gen_mov_reg_Z(cpu_tmp0, src);
876 gen_mov_reg_C(dst, src);
877 tcg_gen_or_tl(dst, dst, cpu_tmp0);
880 // C
881 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
883 gen_mov_reg_C(dst, src);
886 // V
887 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
889 gen_mov_reg_V(dst, src);
892 // 0
893 static inline void gen_op_eval_bn(TCGv dst)
895 tcg_gen_movi_tl(dst, 0);
898 // N
899 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
901 gen_mov_reg_N(dst, src);
904 // !Z
905 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
907 gen_mov_reg_Z(dst, src);
908 tcg_gen_xori_tl(dst, dst, 0x1);
911 // !(Z | (N ^ V))
912 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
914 gen_mov_reg_N(cpu_tmp0, src);
915 gen_mov_reg_V(dst, src);
916 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
917 gen_mov_reg_Z(cpu_tmp0, src);
918 tcg_gen_or_tl(dst, dst, cpu_tmp0);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !(N ^ V)
923 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
925 gen_mov_reg_V(cpu_tmp0, src);
926 gen_mov_reg_N(dst, src);
927 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
928 tcg_gen_xori_tl(dst, dst, 0x1);
931 // !(C | Z)
932 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
934 gen_mov_reg_Z(cpu_tmp0, src);
935 gen_mov_reg_C(dst, src);
936 tcg_gen_or_tl(dst, dst, cpu_tmp0);
937 tcg_gen_xori_tl(dst, dst, 0x1);
940 // !C
941 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
943 gen_mov_reg_C(dst, src);
944 tcg_gen_xori_tl(dst, dst, 0x1);
947 // !N
948 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
950 gen_mov_reg_N(dst, src);
951 tcg_gen_xori_tl(dst, dst, 0x1);
954 // !V
955 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
957 gen_mov_reg_V(dst, src);
958 tcg_gen_xori_tl(dst, dst, 0x1);
962 FPSR bit field FCC1 | FCC0:
966 3 unordered
968 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
969 unsigned int fcc_offset)
971 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
972 tcg_gen_andi_tl(reg, reg, 0x1);
975 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
976 unsigned int fcc_offset)
978 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
979 tcg_gen_andi_tl(reg, reg, 0x1);
982 // !0: FCC0 | FCC1
983 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
986 gen_mov_reg_FCC0(dst, src, fcc_offset);
987 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
988 tcg_gen_or_tl(dst, dst, cpu_tmp0);
991 // 1 or 2: FCC0 ^ FCC1
992 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
995 gen_mov_reg_FCC0(dst, src, fcc_offset);
996 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
997 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1000 // 1 or 3: FCC0
1001 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1002 unsigned int fcc_offset)
1004 gen_mov_reg_FCC0(dst, src, fcc_offset);
1007 // 1: FCC0 & !FCC1
1008 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1014 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 // 2 or 3: FCC1
1018 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1019 unsigned int fcc_offset)
1021 gen_mov_reg_FCC1(dst, src, fcc_offset);
1024 // 2: !FCC0 & FCC1
1025 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1026 unsigned int fcc_offset)
1028 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 tcg_gen_xori_tl(dst, dst, 0x1);
1030 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1031 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1034 // 3: FCC0 & FCC1
1035 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1036 unsigned int fcc_offset)
1038 gen_mov_reg_FCC0(dst, src, fcc_offset);
1039 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1040 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1043 // 0: !(FCC0 | FCC1)
1044 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1045 unsigned int fcc_offset)
1047 gen_mov_reg_FCC0(dst, src, fcc_offset);
1048 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1049 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1050 tcg_gen_xori_tl(dst, dst, 0x1);
1053 // 0 or 3: !(FCC0 ^ FCC1)
1054 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1055 unsigned int fcc_offset)
1057 gen_mov_reg_FCC0(dst, src, fcc_offset);
1058 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1059 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1060 tcg_gen_xori_tl(dst, dst, 0x1);
1063 // 0 or 2: !FCC0
1064 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1065 unsigned int fcc_offset)
1067 gen_mov_reg_FCC0(dst, src, fcc_offset);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1071 // !1: !(FCC0 & !FCC1)
1072 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1073 unsigned int fcc_offset)
1075 gen_mov_reg_FCC0(dst, src, fcc_offset);
1076 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1077 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1078 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1079 tcg_gen_xori_tl(dst, dst, 0x1);
1082 // 0 or 1: !FCC1
1083 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1084 unsigned int fcc_offset)
1086 gen_mov_reg_FCC1(dst, src, fcc_offset);
1087 tcg_gen_xori_tl(dst, dst, 0x1);
1090 // !2: !(!FCC0 & FCC1)
1091 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1092 unsigned int fcc_offset)
1094 gen_mov_reg_FCC0(dst, src, fcc_offset);
1095 tcg_gen_xori_tl(dst, dst, 0x1);
1096 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1097 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1101 // !3: !(FCC0 & FCC1)
1102 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC0(dst, src, fcc_offset);
1106 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1107 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1111 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1112 target_ulong pc2, TCGv r_cond)
1114 int l1;
1116 l1 = gen_new_label();
1118 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1120 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1122 gen_set_label(l1);
1123 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1126 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1127 target_ulong pc2, TCGv r_cond)
1129 int l1;
1131 l1 = gen_new_label();
1133 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1135 gen_goto_tb(dc, 0, pc2, pc1);
1137 gen_set_label(l1);
1138 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1141 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1142 TCGv r_cond)
1144 int l1, l2;
1146 l1 = gen_new_label();
1147 l2 = gen_new_label();
1149 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1151 tcg_gen_movi_tl(cpu_npc, npc1);
1152 tcg_gen_br(l2);
1154 gen_set_label(l1);
1155 tcg_gen_movi_tl(cpu_npc, npc2);
1156 gen_set_label(l2);
1159 /* call this function before using the condition register as it may
1160 have been set for a jump */
1161 static inline void flush_cond(DisasContext *dc, TCGv cond)
1163 if (dc->npc == JUMP_PC) {
1164 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1165 dc->npc = DYNAMIC_PC;
1169 static inline void save_npc(DisasContext *dc, TCGv cond)
1171 if (dc->npc == JUMP_PC) {
1172 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1173 dc->npc = DYNAMIC_PC;
1174 } else if (dc->npc != DYNAMIC_PC) {
1175 tcg_gen_movi_tl(cpu_npc, dc->npc);
1179 static inline void save_state(DisasContext *dc, TCGv cond)
1181 tcg_gen_movi_tl(cpu_pc, dc->pc);
1182 save_npc(dc, cond);
1185 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1187 if (dc->npc == JUMP_PC) {
1188 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1189 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1190 dc->pc = DYNAMIC_PC;
1191 } else if (dc->npc == DYNAMIC_PC) {
1192 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1193 dc->pc = DYNAMIC_PC;
1194 } else {
1195 dc->pc = dc->npc;
1199 static inline void gen_op_next_insn(void)
1201 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1202 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1205 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1207 TCGv r_src;
1209 #ifdef TARGET_SPARC64
1210 if (cc)
1211 r_src = cpu_xcc;
1212 else
1213 r_src = cpu_psr;
1214 #else
1215 r_src = cpu_psr;
1216 #endif
1217 switch (cond) {
1218 case 0x0:
1219 gen_op_eval_bn(r_dst);
1220 break;
1221 case 0x1:
1222 gen_op_eval_be(r_dst, r_src);
1223 break;
1224 case 0x2:
1225 gen_op_eval_ble(r_dst, r_src);
1226 break;
1227 case 0x3:
1228 gen_op_eval_bl(r_dst, r_src);
1229 break;
1230 case 0x4:
1231 gen_op_eval_bleu(r_dst, r_src);
1232 break;
1233 case 0x5:
1234 gen_op_eval_bcs(r_dst, r_src);
1235 break;
1236 case 0x6:
1237 gen_op_eval_bneg(r_dst, r_src);
1238 break;
1239 case 0x7:
1240 gen_op_eval_bvs(r_dst, r_src);
1241 break;
1242 case 0x8:
1243 gen_op_eval_ba(r_dst);
1244 break;
1245 case 0x9:
1246 gen_op_eval_bne(r_dst, r_src);
1247 break;
1248 case 0xa:
1249 gen_op_eval_bg(r_dst, r_src);
1250 break;
1251 case 0xb:
1252 gen_op_eval_bge(r_dst, r_src);
1253 break;
1254 case 0xc:
1255 gen_op_eval_bgu(r_dst, r_src);
1256 break;
1257 case 0xd:
1258 gen_op_eval_bcc(r_dst, r_src);
1259 break;
1260 case 0xe:
1261 gen_op_eval_bpos(r_dst, r_src);
1262 break;
1263 case 0xf:
1264 gen_op_eval_bvc(r_dst, r_src);
1265 break;
1269 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1271 unsigned int offset;
1273 switch (cc) {
1274 default:
1275 case 0x0:
1276 offset = 0;
1277 break;
1278 case 0x1:
1279 offset = 32 - 10;
1280 break;
1281 case 0x2:
1282 offset = 34 - 10;
1283 break;
1284 case 0x3:
1285 offset = 36 - 10;
1286 break;
1289 switch (cond) {
1290 case 0x0:
1291 gen_op_eval_bn(r_dst);
1292 break;
1293 case 0x1:
1294 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0x2:
1297 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0x3:
1300 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1301 break;
1302 case 0x4:
1303 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0x5:
1306 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0x6:
1309 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1310 break;
1311 case 0x7:
1312 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1313 break;
1314 case 0x8:
1315 gen_op_eval_ba(r_dst);
1316 break;
1317 case 0x9:
1318 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1319 break;
1320 case 0xa:
1321 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1322 break;
1323 case 0xb:
1324 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1325 break;
1326 case 0xc:
1327 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1328 break;
1329 case 0xd:
1330 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1331 break;
1332 case 0xe:
1333 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1334 break;
1335 case 0xf:
1336 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1337 break;
1341 #ifdef TARGET_SPARC64
1342 // Inverted logic
1343 static const int gen_tcg_cond_reg[8] = {
1345 TCG_COND_NE,
1346 TCG_COND_GT,
1347 TCG_COND_GE,
1349 TCG_COND_EQ,
1350 TCG_COND_LE,
1351 TCG_COND_LT,
1354 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1356 int l1;
1358 l1 = gen_new_label();
1359 tcg_gen_movi_tl(r_dst, 0);
1360 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1361 tcg_gen_movi_tl(r_dst, 1);
1362 gen_set_label(l1);
1364 #endif
1366 /* XXX: potentially incorrect if dynamic npc */
1367 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1368 TCGv r_cond)
1370 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1371 target_ulong target = dc->pc + offset;
1373 if (cond == 0x0) {
1374 /* unconditional not taken */
1375 if (a) {
1376 dc->pc = dc->npc + 4;
1377 dc->npc = dc->pc + 4;
1378 } else {
1379 dc->pc = dc->npc;
1380 dc->npc = dc->pc + 4;
1382 } else if (cond == 0x8) {
1383 /* unconditional taken */
1384 if (a) {
1385 dc->pc = target;
1386 dc->npc = dc->pc + 4;
1387 } else {
1388 dc->pc = dc->npc;
1389 dc->npc = target;
1391 } else {
1392 flush_cond(dc, r_cond);
1393 gen_cond(r_cond, cc, cond);
1394 if (a) {
1395 gen_branch_a(dc, target, dc->npc, r_cond);
1396 dc->is_br = 1;
1397 } else {
1398 dc->pc = dc->npc;
1399 dc->jump_pc[0] = target;
1400 dc->jump_pc[1] = dc->npc + 4;
1401 dc->npc = JUMP_PC;
1406 /* XXX: potentially incorrect if dynamic npc */
1407 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1408 TCGv r_cond)
1410 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1411 target_ulong target = dc->pc + offset;
1413 if (cond == 0x0) {
1414 /* unconditional not taken */
1415 if (a) {
1416 dc->pc = dc->npc + 4;
1417 dc->npc = dc->pc + 4;
1418 } else {
1419 dc->pc = dc->npc;
1420 dc->npc = dc->pc + 4;
1422 } else if (cond == 0x8) {
1423 /* unconditional taken */
1424 if (a) {
1425 dc->pc = target;
1426 dc->npc = dc->pc + 4;
1427 } else {
1428 dc->pc = dc->npc;
1429 dc->npc = target;
1431 } else {
1432 flush_cond(dc, r_cond);
1433 gen_fcond(r_cond, cc, cond);
1434 if (a) {
1435 gen_branch_a(dc, target, dc->npc, r_cond);
1436 dc->is_br = 1;
1437 } else {
1438 dc->pc = dc->npc;
1439 dc->jump_pc[0] = target;
1440 dc->jump_pc[1] = dc->npc + 4;
1441 dc->npc = JUMP_PC;
1446 #ifdef TARGET_SPARC64
1447 /* XXX: potentially incorrect if dynamic npc */
1448 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1449 TCGv r_cond, TCGv r_reg)
1451 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1452 target_ulong target = dc->pc + offset;
1454 flush_cond(dc, r_cond);
1455 gen_cond_reg(r_cond, cond, r_reg);
1456 if (a) {
1457 gen_branch_a(dc, target, dc->npc, r_cond);
1458 dc->is_br = 1;
1459 } else {
1460 dc->pc = dc->npc;
1461 dc->jump_pc[0] = target;
1462 dc->jump_pc[1] = dc->npc + 4;
1463 dc->npc = JUMP_PC;
1467 static GenOpFunc * const gen_fcmpd[4] = {
1468 helper_fcmpd,
1469 helper_fcmpd_fcc1,
1470 helper_fcmpd_fcc2,
1471 helper_fcmpd_fcc3,
1474 static GenOpFunc * const gen_fcmpq[4] = {
1475 helper_fcmpq,
1476 helper_fcmpq_fcc1,
1477 helper_fcmpq_fcc2,
1478 helper_fcmpq_fcc3,
1481 static GenOpFunc * const gen_fcmped[4] = {
1482 helper_fcmped,
1483 helper_fcmped_fcc1,
1484 helper_fcmped_fcc2,
1485 helper_fcmped_fcc3,
1488 static GenOpFunc * const gen_fcmpeq[4] = {
1489 helper_fcmpeq,
1490 helper_fcmpeq_fcc1,
1491 helper_fcmpeq_fcc2,
1492 helper_fcmpeq_fcc3,
1495 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1497 switch (fccno) {
1498 case 0:
1499 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1500 break;
1501 case 1:
1502 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1503 break;
1504 case 2:
1505 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1506 break;
1507 case 3:
1508 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1509 break;
1513 static inline void gen_op_fcmpd(int fccno)
1515 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1518 static inline void gen_op_fcmpq(int fccno)
1520 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1523 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1525 switch (fccno) {
1526 case 0:
1527 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1528 break;
1529 case 1:
1530 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1531 break;
1532 case 2:
1533 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1534 break;
1535 case 3:
1536 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1537 break;
1541 static inline void gen_op_fcmped(int fccno)
1543 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1546 static inline void gen_op_fcmpeq(int fccno)
1548 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1551 #else
1553 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1555 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1558 static inline void gen_op_fcmpd(int fccno)
1560 tcg_gen_helper_0_0(helper_fcmpd);
1563 static inline void gen_op_fcmpq(int fccno)
1565 tcg_gen_helper_0_0(helper_fcmpq);
1568 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1570 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1573 static inline void gen_op_fcmped(int fccno)
1575 tcg_gen_helper_0_0(helper_fcmped);
1578 static inline void gen_op_fcmpeq(int fccno)
1580 tcg_gen_helper_0_0(helper_fcmpeq);
1582 #endif
1584 static inline void gen_op_fpexception_im(int fsr_flags)
1586 TCGv r_const;
1588 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1589 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1590 r_const = tcg_const_i32(TT_FP_EXCP);
1591 tcg_gen_helper_0_1(raise_exception, r_const);
1592 tcg_temp_free(r_const);
1595 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1597 #if !defined(CONFIG_USER_ONLY)
1598 if (!dc->fpu_enabled) {
1599 TCGv r_const;
1601 save_state(dc, r_cond);
1602 r_const = tcg_const_i32(TT_NFPU_INSN);
1603 tcg_gen_helper_0_1(raise_exception, r_const);
1604 tcg_temp_free(r_const);
1605 dc->is_br = 1;
1606 return 1;
1608 #endif
1609 return 0;
1612 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1614 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1617 static inline void gen_clear_float_exceptions(void)
1619 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1622 /* asi moves */
1623 #ifdef TARGET_SPARC64
1624 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1626 int asi;
1627 TCGv r_asi;
1629 if (IS_IMM) {
1630 r_asi = tcg_temp_new(TCG_TYPE_I32);
1631 tcg_gen_mov_i32(r_asi, cpu_asi);
1632 } else {
1633 asi = GET_FIELD(insn, 19, 26);
1634 r_asi = tcg_const_i32(asi);
1636 return r_asi;
1639 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1640 int sign)
1642 TCGv r_asi, r_size, r_sign;
1644 r_asi = gen_get_asi(insn, addr);
1645 r_size = tcg_const_i32(size);
1646 r_sign = tcg_const_i32(sign);
1647 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1648 tcg_temp_free(r_sign);
1649 tcg_temp_free(r_size);
1650 tcg_temp_free(r_asi);
1653 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1655 TCGv r_asi, r_size;
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1660 tcg_temp_free(r_size);
1661 tcg_temp_free(r_asi);
1664 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1666 TCGv r_asi, r_size, r_rd;
1668 r_asi = gen_get_asi(insn, addr);
1669 r_size = tcg_const_i32(size);
1670 r_rd = tcg_const_i32(rd);
1671 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1672 tcg_temp_free(r_rd);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1677 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1679 TCGv r_asi, r_size, r_rd;
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1690 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1692 TCGv r_asi, r_size, r_sign;
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(4);
1696 r_sign = tcg_const_i32(0);
1697 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1698 tcg_temp_free(r_sign);
1699 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1700 tcg_temp_free(r_size);
1701 tcg_temp_free(r_asi);
1702 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1705 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1707 TCGv r_asi, r_rd;
1709 r_asi = gen_get_asi(insn, addr);
1710 r_rd = tcg_const_i32(rd);
1711 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1712 tcg_temp_free(r_rd);
1713 tcg_temp_free(r_asi);
1716 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1718 TCGv r_asi, r_size;
1720 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1721 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1722 r_asi = gen_get_asi(insn, addr);
1723 r_size = tcg_const_i32(8);
1724 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1725 tcg_temp_free(r_size);
1726 tcg_temp_free(r_asi);
1729 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1730 int rd)
1732 TCGv r_val1, r_asi;
1734 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1735 gen_movl_reg_TN(rd, r_val1);
1736 r_asi = gen_get_asi(insn, addr);
1737 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1738 tcg_temp_free(r_asi);
1739 tcg_temp_free(r_val1);
1742 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1743 int rd)
1745 TCGv r_asi;
1747 gen_movl_reg_TN(rd, cpu_tmp64);
1748 r_asi = gen_get_asi(insn, addr);
1749 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1750 tcg_temp_free(r_asi);
1753 #elif !defined(CONFIG_USER_ONLY)
1755 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1756 int sign)
1758 TCGv r_asi, r_size, r_sign;
1760 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1761 r_size = tcg_const_i32(size);
1762 r_sign = tcg_const_i32(sign);
1763 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1764 tcg_temp_free(r_sign);
1765 tcg_temp_free(r_size);
1766 tcg_temp_free(r_asi);
1767 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1770 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1772 TCGv r_asi, r_size;
1774 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1775 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1776 r_size = tcg_const_i32(size);
1777 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1778 tcg_temp_free(r_size);
1779 tcg_temp_free(r_asi);
1782 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1784 TCGv r_asi, r_size, r_sign;
1786 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1787 r_size = tcg_const_i32(4);
1788 r_sign = tcg_const_i32(0);
1789 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1790 tcg_temp_free(r_sign);
1791 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1792 tcg_temp_free(r_size);
1793 tcg_temp_free(r_asi);
1794 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1797 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1799 TCGv r_asi, r_size, r_sign;
1801 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1802 r_size = tcg_const_i32(8);
1803 r_sign = tcg_const_i32(0);
1804 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1805 tcg_temp_free(r_sign);
1806 tcg_temp_free(r_size);
1807 tcg_temp_free(r_asi);
1808 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1809 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1810 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1811 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1812 gen_movl_TN_reg(rd, hi);
1815 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1817 TCGv r_asi, r_size;
1819 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1820 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1821 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1822 r_size = tcg_const_i32(8);
1823 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1824 tcg_temp_free(r_size);
1825 tcg_temp_free(r_asi);
1827 #endif
1829 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1830 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1832 TCGv r_val, r_asi, r_size;
1834 gen_ld_asi(dst, addr, insn, 1, 0);
1836 r_val = tcg_const_i64(0xffULL);
1837 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1838 r_size = tcg_const_i32(1);
1839 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1840 tcg_temp_free(r_size);
1841 tcg_temp_free(r_asi);
1842 tcg_temp_free(r_val);
1844 #endif
1846 static inline TCGv get_src1(unsigned int insn, TCGv def)
1848 TCGv r_rs1 = def;
1849 unsigned int rs1;
1851 rs1 = GET_FIELD(insn, 13, 17);
1852 if (rs1 == 0)
1853 r_rs1 = tcg_const_tl(0); // XXX how to free?
1854 else if (rs1 < 8)
1855 r_rs1 = cpu_gregs[rs1];
1856 else
1857 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1858 return r_rs1;
1861 static inline TCGv get_src2(unsigned int insn, TCGv def)
1863 TCGv r_rs2 = def;
1864 unsigned int rs2;
1866 if (IS_IMM) { /* immediate */
1867 rs2 = GET_FIELDs(insn, 19, 31);
1868 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1869 } else { /* register */
1870 rs2 = GET_FIELD(insn, 27, 31);
1871 if (rs2 == 0)
1872 r_rs2 = tcg_const_tl(0); // XXX how to free?
1873 else if (rs2 < 8)
1874 r_rs2 = cpu_gregs[rs2];
1875 else
1876 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1878 return r_rs2;
1881 #define CHECK_IU_FEATURE(dc, FEATURE) \
1882 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1883 goto illegal_insn;
1884 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1885 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1886 goto nfpu_insn;
1888 /* before an instruction, dc->pc must be static */
1889 static void disas_sparc_insn(DisasContext * dc)
1891 unsigned int insn, opc, rs1, rs2, rd;
1893 if (unlikely(loglevel & CPU_LOG_TB_OP))
1894 tcg_gen_debug_insn_start(dc->pc);
1895 insn = ldl_code(dc->pc);
1896 opc = GET_FIELD(insn, 0, 1);
1898 rd = GET_FIELD(insn, 2, 6);
1900 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1901 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1903 switch (opc) {
1904 case 0: /* branches/sethi */
1906 unsigned int xop = GET_FIELD(insn, 7, 9);
1907 int32_t target;
1908 switch (xop) {
1909 #ifdef TARGET_SPARC64
1910 case 0x1: /* V9 BPcc */
1912 int cc;
1914 target = GET_FIELD_SP(insn, 0, 18);
1915 target = sign_extend(target, 18);
1916 target <<= 2;
1917 cc = GET_FIELD_SP(insn, 20, 21);
1918 if (cc == 0)
1919 do_branch(dc, target, insn, 0, cpu_cond);
1920 else if (cc == 2)
1921 do_branch(dc, target, insn, 1, cpu_cond);
1922 else
1923 goto illegal_insn;
1924 goto jmp_insn;
1926 case 0x3: /* V9 BPr */
1928 target = GET_FIELD_SP(insn, 0, 13) |
1929 (GET_FIELD_SP(insn, 20, 21) << 14);
1930 target = sign_extend(target, 16);
1931 target <<= 2;
1932 cpu_src1 = get_src1(insn, cpu_src1);
1933 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1934 goto jmp_insn;
1936 case 0x5: /* V9 FBPcc */
1938 int cc = GET_FIELD_SP(insn, 20, 21);
1939 if (gen_trap_ifnofpu(dc, cpu_cond))
1940 goto jmp_insn;
1941 target = GET_FIELD_SP(insn, 0, 18);
1942 target = sign_extend(target, 19);
1943 target <<= 2;
1944 do_fbranch(dc, target, insn, cc, cpu_cond);
1945 goto jmp_insn;
1947 #else
1948 case 0x7: /* CBN+x */
1950 goto ncp_insn;
1952 #endif
1953 case 0x2: /* BN+x */
1955 target = GET_FIELD(insn, 10, 31);
1956 target = sign_extend(target, 22);
1957 target <<= 2;
1958 do_branch(dc, target, insn, 0, cpu_cond);
1959 goto jmp_insn;
1961 case 0x6: /* FBN+x */
1963 if (gen_trap_ifnofpu(dc, cpu_cond))
1964 goto jmp_insn;
1965 target = GET_FIELD(insn, 10, 31);
1966 target = sign_extend(target, 22);
1967 target <<= 2;
1968 do_fbranch(dc, target, insn, 0, cpu_cond);
1969 goto jmp_insn;
1971 case 0x4: /* SETHI */
1972 if (rd) { // nop
1973 uint32_t value = GET_FIELD(insn, 10, 31);
1974 TCGv r_const;
1976 r_const = tcg_const_tl(value << 10);
1977 gen_movl_TN_reg(rd, r_const);
1978 tcg_temp_free(r_const);
1980 break;
1981 case 0x0: /* UNIMPL */
1982 default:
1983 goto illegal_insn;
1985 break;
1987 break;
1988 case 1:
1989 /*CALL*/ {
1990 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1991 TCGv r_const;
1993 r_const = tcg_const_tl(dc->pc);
1994 gen_movl_TN_reg(15, r_const);
1995 tcg_temp_free(r_const);
1996 target += dc->pc;
1997 gen_mov_pc_npc(dc, cpu_cond);
1998 dc->npc = target;
2000 goto jmp_insn;
2001 case 2: /* FPU & Logical Operations */
2003 unsigned int xop = GET_FIELD(insn, 7, 12);
2004 if (xop == 0x3a) { /* generate trap */
2005 int cond;
2007 cpu_src1 = get_src1(insn, cpu_src1);
2008 if (IS_IMM) {
2009 rs2 = GET_FIELD(insn, 25, 31);
2010 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2011 } else {
2012 rs2 = GET_FIELD(insn, 27, 31);
2013 if (rs2 != 0) {
2014 gen_movl_reg_TN(rs2, cpu_src2);
2015 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2016 } else
2017 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2019 cond = GET_FIELD(insn, 3, 6);
2020 if (cond == 0x8) {
2021 save_state(dc, cpu_cond);
2022 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2023 } else if (cond != 0) {
2024 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2025 #ifdef TARGET_SPARC64
2026 /* V9 icc/xcc */
2027 int cc = GET_FIELD_SP(insn, 11, 12);
2029 save_state(dc, cpu_cond);
2030 if (cc == 0)
2031 gen_cond(r_cond, 0, cond);
2032 else if (cc == 2)
2033 gen_cond(r_cond, 1, cond);
2034 else
2035 goto illegal_insn;
2036 #else
2037 save_state(dc, cpu_cond);
2038 gen_cond(r_cond, 0, cond);
2039 #endif
2040 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2041 tcg_temp_free(r_cond);
2043 gen_op_next_insn();
2044 tcg_gen_exit_tb(0);
2045 dc->is_br = 1;
2046 goto jmp_insn;
2047 } else if (xop == 0x28) {
2048 rs1 = GET_FIELD(insn, 13, 17);
2049 switch(rs1) {
2050 case 0: /* rdy */
2051 #ifndef TARGET_SPARC64
2052 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2053 manual, rdy on the microSPARC
2054 II */
2055 case 0x0f: /* stbar in the SPARCv8 manual,
2056 rdy on the microSPARC II */
2057 case 0x10 ... 0x1f: /* implementation-dependent in the
2058 SPARCv8 manual, rdy on the
2059 microSPARC II */
2060 #endif
2061 gen_movl_TN_reg(rd, cpu_y);
2062 break;
2063 #ifdef TARGET_SPARC64
2064 case 0x2: /* V9 rdccr */
2065 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2066 gen_movl_TN_reg(rd, cpu_dst);
2067 break;
2068 case 0x3: /* V9 rdasi */
2069 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2070 gen_movl_TN_reg(rd, cpu_dst);
2071 break;
2072 case 0x4: /* V9 rdtick */
2074 TCGv r_tickptr;
2076 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2077 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2078 offsetof(CPUState, tick));
2079 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2080 r_tickptr);
2081 tcg_temp_free(r_tickptr);
2082 gen_movl_TN_reg(rd, cpu_dst);
2084 break;
2085 case 0x5: /* V9 rdpc */
2087 TCGv r_const;
2089 r_const = tcg_const_tl(dc->pc);
2090 gen_movl_TN_reg(rd, r_const);
2091 tcg_temp_free(r_const);
2093 break;
2094 case 0x6: /* V9 rdfprs */
2095 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2096 gen_movl_TN_reg(rd, cpu_dst);
2097 break;
2098 case 0xf: /* V9 membar */
2099 break; /* no effect */
2100 case 0x13: /* Graphics Status */
2101 if (gen_trap_ifnofpu(dc, cpu_cond))
2102 goto jmp_insn;
2103 gen_movl_TN_reg(rd, cpu_gsr);
2104 break;
2105 case 0x17: /* Tick compare */
2106 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2107 break;
2108 case 0x18: /* System tick */
2110 TCGv r_tickptr;
2112 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2113 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2114 offsetof(CPUState, stick));
2115 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2116 r_tickptr);
2117 tcg_temp_free(r_tickptr);
2118 gen_movl_TN_reg(rd, cpu_dst);
2120 break;
2121 case 0x19: /* System tick compare */
2122 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2123 break;
2124 case 0x10: /* Performance Control */
2125 case 0x11: /* Performance Instrumentation Counter */
2126 case 0x12: /* Dispatch Control */
2127 case 0x14: /* Softint set, WO */
2128 case 0x15: /* Softint clear, WO */
2129 case 0x16: /* Softint write */
2130 #endif
2131 default:
2132 goto illegal_insn;
2134 #if !defined(CONFIG_USER_ONLY)
2135 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2136 #ifndef TARGET_SPARC64
2137 if (!supervisor(dc))
2138 goto priv_insn;
2139 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2140 #else
2141 CHECK_IU_FEATURE(dc, HYPV);
2142 if (!hypervisor(dc))
2143 goto priv_insn;
2144 rs1 = GET_FIELD(insn, 13, 17);
2145 switch (rs1) {
2146 case 0: // hpstate
2147 // gen_op_rdhpstate();
2148 break;
2149 case 1: // htstate
2150 // gen_op_rdhtstate();
2151 break;
2152 case 3: // hintp
2153 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2154 break;
2155 case 5: // htba
2156 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2157 break;
2158 case 6: // hver
2159 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2160 break;
2161 case 31: // hstick_cmpr
2162 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2163 break;
2164 default:
2165 goto illegal_insn;
2167 #endif
2168 gen_movl_TN_reg(rd, cpu_dst);
2169 break;
2170 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2171 if (!supervisor(dc))
2172 goto priv_insn;
2173 #ifdef TARGET_SPARC64
2174 rs1 = GET_FIELD(insn, 13, 17);
2175 switch (rs1) {
2176 case 0: // tpc
2178 TCGv r_tsptr;
2180 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2181 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2182 offsetof(CPUState, tsptr));
2183 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2184 offsetof(trap_state, tpc));
2185 tcg_temp_free(r_tsptr);
2187 break;
2188 case 1: // tnpc
2190 TCGv r_tsptr;
2192 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2193 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2194 offsetof(CPUState, tsptr));
2195 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2196 offsetof(trap_state, tnpc));
2197 tcg_temp_free(r_tsptr);
2199 break;
2200 case 2: // tstate
2202 TCGv r_tsptr;
2204 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2205 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2206 offsetof(CPUState, tsptr));
2207 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2208 offsetof(trap_state, tstate));
2209 tcg_temp_free(r_tsptr);
2211 break;
2212 case 3: // tt
2214 TCGv r_tsptr;
2216 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2217 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2218 offsetof(CPUState, tsptr));
2219 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2220 offsetof(trap_state, tt));
2221 tcg_temp_free(r_tsptr);
2223 break;
2224 case 4: // tick
2226 TCGv r_tickptr;
2228 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2229 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2230 offsetof(CPUState, tick));
2231 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2232 r_tickptr);
2233 gen_movl_TN_reg(rd, cpu_tmp0);
2234 tcg_temp_free(r_tickptr);
2236 break;
2237 case 5: // tba
2238 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2239 break;
2240 case 6: // pstate
2241 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2242 offsetof(CPUSPARCState, pstate));
2243 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2244 break;
2245 case 7: // tl
2246 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2247 offsetof(CPUSPARCState, tl));
2248 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2249 break;
2250 case 8: // pil
2251 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2252 offsetof(CPUSPARCState, psrpil));
2253 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2254 break;
2255 case 9: // cwp
2256 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2257 break;
2258 case 10: // cansave
2259 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2260 offsetof(CPUSPARCState, cansave));
2261 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2262 break;
2263 case 11: // canrestore
2264 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2265 offsetof(CPUSPARCState, canrestore));
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267 break;
2268 case 12: // cleanwin
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, cleanwin));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2272 break;
2273 case 13: // otherwin
2274 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2275 offsetof(CPUSPARCState, otherwin));
2276 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2277 break;
2278 case 14: // wstate
2279 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2280 offsetof(CPUSPARCState, wstate));
2281 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2282 break;
2283 case 16: // UA2005 gl
2284 CHECK_IU_FEATURE(dc, GL);
2285 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286 offsetof(CPUSPARCState, gl));
2287 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288 break;
2289 case 26: // UA2005 strand status
2290 CHECK_IU_FEATURE(dc, HYPV);
2291 if (!hypervisor(dc))
2292 goto priv_insn;
2293 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2294 break;
2295 case 31: // ver
2296 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2297 break;
2298 case 15: // fq
2299 default:
2300 goto illegal_insn;
2302 #else
2303 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2304 #endif
2305 gen_movl_TN_reg(rd, cpu_tmp0);
2306 break;
2307 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2308 #ifdef TARGET_SPARC64
2309 save_state(dc, cpu_cond);
2310 tcg_gen_helper_0_0(helper_flushw);
2311 #else
2312 if (!supervisor(dc))
2313 goto priv_insn;
2314 gen_movl_TN_reg(rd, cpu_tbr);
2315 #endif
2316 break;
2317 #endif
2318 } else if (xop == 0x34) { /* FPU Operations */
2319 if (gen_trap_ifnofpu(dc, cpu_cond))
2320 goto jmp_insn;
2321 gen_op_clear_ieee_excp_and_FTT();
2322 rs1 = GET_FIELD(insn, 13, 17);
2323 rs2 = GET_FIELD(insn, 27, 31);
2324 xop = GET_FIELD(insn, 18, 26);
2325 switch (xop) {
2326 case 0x1: /* fmovs */
2327 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2328 break;
2329 case 0x5: /* fnegs */
2330 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2331 cpu_fpr[rs2]);
2332 break;
2333 case 0x9: /* fabss */
2334 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2335 cpu_fpr[rs2]);
2336 break;
2337 case 0x29: /* fsqrts */
2338 CHECK_FPU_FEATURE(dc, FSQRT);
2339 gen_clear_float_exceptions();
2340 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2341 cpu_fpr[rs2]);
2342 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2343 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2344 break;
2345 case 0x2a: /* fsqrtd */
2346 CHECK_FPU_FEATURE(dc, FSQRT);
2347 gen_op_load_fpr_DT1(DFPREG(rs2));
2348 gen_clear_float_exceptions();
2349 tcg_gen_helper_0_0(helper_fsqrtd);
2350 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2351 gen_op_store_DT0_fpr(DFPREG(rd));
2352 break;
2353 case 0x2b: /* fsqrtq */
2354 CHECK_FPU_FEATURE(dc, FLOAT128);
2355 gen_op_load_fpr_QT1(QFPREG(rs2));
2356 gen_clear_float_exceptions();
2357 tcg_gen_helper_0_0(helper_fsqrtq);
2358 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2359 gen_op_store_QT0_fpr(QFPREG(rd));
2360 break;
2361 case 0x41: /* fadds */
2362 gen_clear_float_exceptions();
2363 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2364 cpu_fpr[rs1], cpu_fpr[rs2]);
2365 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2366 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367 break;
2368 case 0x42:
2369 gen_op_load_fpr_DT0(DFPREG(rs1));
2370 gen_op_load_fpr_DT1(DFPREG(rs2));
2371 gen_clear_float_exceptions();
2372 tcg_gen_helper_0_0(helper_faddd);
2373 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2374 gen_op_store_DT0_fpr(DFPREG(rd));
2375 break;
2376 case 0x43: /* faddq */
2377 CHECK_FPU_FEATURE(dc, FLOAT128);
2378 gen_op_load_fpr_QT0(QFPREG(rs1));
2379 gen_op_load_fpr_QT1(QFPREG(rs2));
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_0_0(helper_faddq);
2382 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2383 gen_op_store_QT0_fpr(QFPREG(rd));
2384 break;
2385 case 0x45: /* fsubs */
2386 gen_clear_float_exceptions();
2387 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2388 cpu_fpr[rs1], cpu_fpr[rs2]);
2389 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2390 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2391 break;
2392 case 0x46:
2393 gen_op_load_fpr_DT0(DFPREG(rs1));
2394 gen_op_load_fpr_DT1(DFPREG(rs2));
2395 gen_clear_float_exceptions();
2396 tcg_gen_helper_0_0(helper_fsubd);
2397 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2398 gen_op_store_DT0_fpr(DFPREG(rd));
2399 break;
2400 case 0x47: /* fsubq */
2401 CHECK_FPU_FEATURE(dc, FLOAT128);
2402 gen_op_load_fpr_QT0(QFPREG(rs1));
2403 gen_op_load_fpr_QT1(QFPREG(rs2));
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_0_0(helper_fsubq);
2406 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2407 gen_op_store_QT0_fpr(QFPREG(rd));
2408 break;
2409 case 0x49: /* fmuls */
2410 CHECK_FPU_FEATURE(dc, FMUL);
2411 gen_clear_float_exceptions();
2412 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2413 cpu_fpr[rs1], cpu_fpr[rs2]);
2414 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2415 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2416 break;
2417 case 0x4a: /* fmuld */
2418 CHECK_FPU_FEATURE(dc, FMUL);
2419 gen_op_load_fpr_DT0(DFPREG(rs1));
2420 gen_op_load_fpr_DT1(DFPREG(rs2));
2421 gen_clear_float_exceptions();
2422 tcg_gen_helper_0_0(helper_fmuld);
2423 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2424 gen_op_store_DT0_fpr(DFPREG(rd));
2425 break;
2426 case 0x4b: /* fmulq */
2427 CHECK_FPU_FEATURE(dc, FLOAT128);
2428 CHECK_FPU_FEATURE(dc, FMUL);
2429 gen_op_load_fpr_QT0(QFPREG(rs1));
2430 gen_op_load_fpr_QT1(QFPREG(rs2));
2431 gen_clear_float_exceptions();
2432 tcg_gen_helper_0_0(helper_fmulq);
2433 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2434 gen_op_store_QT0_fpr(QFPREG(rd));
2435 break;
2436 case 0x4d: /* fdivs */
2437 gen_clear_float_exceptions();
2438 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2439 cpu_fpr[rs1], cpu_fpr[rs2]);
2440 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2441 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2442 break;
2443 case 0x4e:
2444 gen_op_load_fpr_DT0(DFPREG(rs1));
2445 gen_op_load_fpr_DT1(DFPREG(rs2));
2446 gen_clear_float_exceptions();
2447 tcg_gen_helper_0_0(helper_fdivd);
2448 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2449 gen_op_store_DT0_fpr(DFPREG(rd));
2450 break;
2451 case 0x4f: /* fdivq */
2452 CHECK_FPU_FEATURE(dc, FLOAT128);
2453 gen_op_load_fpr_QT0(QFPREG(rs1));
2454 gen_op_load_fpr_QT1(QFPREG(rs2));
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_0_0(helper_fdivq);
2457 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2458 gen_op_store_QT0_fpr(QFPREG(rd));
2459 break;
2460 case 0x69: /* fsmuld */
2461 CHECK_FPU_FEATURE(dc, FSMULD);
2462 gen_clear_float_exceptions();
2463 tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
2464 cpu_fpr[rs2]);
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2466 gen_op_store_DT0_fpr(DFPREG(rd));
2467 break;
2468 case 0x6e: /* fdmulq */
2469 CHECK_FPU_FEATURE(dc, FLOAT128);
2470 gen_op_load_fpr_DT0(DFPREG(rs1));
2471 gen_op_load_fpr_DT1(DFPREG(rs2));
2472 gen_clear_float_exceptions();
2473 tcg_gen_helper_0_0(helper_fdmulq);
2474 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2475 gen_op_store_QT0_fpr(QFPREG(rd));
2476 break;
2477 case 0xc4: /* fitos */
2478 gen_clear_float_exceptions();
2479 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2480 cpu_fpr[rs2]);
2481 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2482 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2483 break;
2484 case 0xc6: /* fdtos */
2485 gen_op_load_fpr_DT1(DFPREG(rs2));
2486 gen_clear_float_exceptions();
2487 tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
2488 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2489 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2490 break;
2491 case 0xc7: /* fqtos */
2492 CHECK_FPU_FEATURE(dc, FLOAT128);
2493 gen_op_load_fpr_QT1(QFPREG(rs2));
2494 gen_clear_float_exceptions();
2495 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2496 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498 break;
2499 case 0xc8: /* fitod */
2500 tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
2501 gen_op_store_DT0_fpr(DFPREG(rd));
2502 break;
2503 case 0xc9: /* fstod */
2504 tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
2505 gen_op_store_DT0_fpr(DFPREG(rd));
2506 break;
2507 case 0xcb: /* fqtod */
2508 CHECK_FPU_FEATURE(dc, FLOAT128);
2509 gen_op_load_fpr_QT1(QFPREG(rs2));
2510 gen_clear_float_exceptions();
2511 tcg_gen_helper_0_0(helper_fqtod);
2512 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2513 gen_op_store_DT0_fpr(DFPREG(rd));
2514 break;
2515 case 0xcc: /* fitoq */
2516 CHECK_FPU_FEATURE(dc, FLOAT128);
2517 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2518 gen_op_store_QT0_fpr(QFPREG(rd));
2519 break;
2520 case 0xcd: /* fstoq */
2521 CHECK_FPU_FEATURE(dc, FLOAT128);
2522 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2523 gen_op_store_QT0_fpr(QFPREG(rd));
2524 break;
2525 case 0xce: /* fdtoq */
2526 CHECK_FPU_FEATURE(dc, FLOAT128);
2527 gen_op_load_fpr_DT1(DFPREG(rs2));
2528 tcg_gen_helper_0_0(helper_fdtoq);
2529 gen_op_store_QT0_fpr(QFPREG(rd));
2530 break;
2531 case 0xd1: /* fstoi */
2532 gen_clear_float_exceptions();
2533 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2534 cpu_fpr[rs2]);
2535 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2536 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2537 break;
2538 case 0xd2: /* fdtoi */
2539 gen_op_load_fpr_DT1(DFPREG(rs2));
2540 gen_clear_float_exceptions();
2541 tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
2542 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2543 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2544 break;
2545 case 0xd3: /* fqtoi */
2546 CHECK_FPU_FEATURE(dc, FLOAT128);
2547 gen_op_load_fpr_QT1(QFPREG(rs2));
2548 gen_clear_float_exceptions();
2549 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2550 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2551 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2552 break;
2553 #ifdef TARGET_SPARC64
2554 case 0x2: /* V9 fmovd */
2555 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2556 cpu_fpr[DFPREG(rs2)]);
2557 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2558 cpu_fpr[DFPREG(rs2) + 1]);
2559 break;
2560 case 0x3: /* V9 fmovq */
2561 CHECK_FPU_FEATURE(dc, FLOAT128);
2562 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2563 cpu_fpr[QFPREG(rs2)]);
2564 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2565 cpu_fpr[QFPREG(rs2) + 1]);
2566 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2567 cpu_fpr[QFPREG(rs2) + 2]);
2568 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2569 cpu_fpr[QFPREG(rs2) + 3]);
2570 break;
2571 case 0x6: /* V9 fnegd */
2572 gen_op_load_fpr_DT1(DFPREG(rs2));
2573 tcg_gen_helper_0_0(helper_fnegd);
2574 gen_op_store_DT0_fpr(DFPREG(rd));
2575 break;
2576 case 0x7: /* V9 fnegq */
2577 CHECK_FPU_FEATURE(dc, FLOAT128);
2578 gen_op_load_fpr_QT1(QFPREG(rs2));
2579 tcg_gen_helper_0_0(helper_fnegq);
2580 gen_op_store_QT0_fpr(QFPREG(rd));
2581 break;
2582 case 0xa: /* V9 fabsd */
2583 gen_op_load_fpr_DT1(DFPREG(rs2));
2584 tcg_gen_helper_0_0(helper_fabsd);
2585 gen_op_store_DT0_fpr(DFPREG(rd));
2586 break;
2587 case 0xb: /* V9 fabsq */
2588 CHECK_FPU_FEATURE(dc, FLOAT128);
2589 gen_op_load_fpr_QT1(QFPREG(rs2));
2590 tcg_gen_helper_0_0(helper_fabsq);
2591 gen_op_store_QT0_fpr(QFPREG(rd));
2592 break;
2593 case 0x81: /* V9 fstox */
2594 gen_clear_float_exceptions();
2595 tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
2596 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2597 gen_op_store_DT0_fpr(DFPREG(rd));
2598 break;
2599 case 0x82: /* V9 fdtox */
2600 gen_op_load_fpr_DT1(DFPREG(rs2));
2601 gen_clear_float_exceptions();
2602 tcg_gen_helper_0_0(helper_fdtox);
2603 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2604 gen_op_store_DT0_fpr(DFPREG(rd));
2605 break;
2606 case 0x83: /* V9 fqtox */
2607 CHECK_FPU_FEATURE(dc, FLOAT128);
2608 gen_op_load_fpr_QT1(QFPREG(rs2));
2609 gen_clear_float_exceptions();
2610 tcg_gen_helper_0_0(helper_fqtox);
2611 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2612 gen_op_store_DT0_fpr(DFPREG(rd));
2613 break;
2614 case 0x84: /* V9 fxtos */
2615 gen_op_load_fpr_DT1(DFPREG(rs2));
2616 gen_clear_float_exceptions();
2617 tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
2618 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2619 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2620 break;
2621 case 0x88: /* V9 fxtod */
2622 gen_op_load_fpr_DT1(DFPREG(rs2));
2623 gen_clear_float_exceptions();
2624 tcg_gen_helper_0_0(helper_fxtod);
2625 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2626 gen_op_store_DT0_fpr(DFPREG(rd));
2627 break;
2628 case 0x8c: /* V9 fxtoq */
2629 CHECK_FPU_FEATURE(dc, FLOAT128);
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fxtoq);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_QT0_fpr(QFPREG(rd));
2635 break;
2636 #endif
2637 default:
2638 goto illegal_insn;
2640 } else if (xop == 0x35) { /* FPU Operations */
2641 #ifdef TARGET_SPARC64
2642 int cond;
2643 #endif
2644 if (gen_trap_ifnofpu(dc, cpu_cond))
2645 goto jmp_insn;
2646 gen_op_clear_ieee_excp_and_FTT();
2647 rs1 = GET_FIELD(insn, 13, 17);
2648 rs2 = GET_FIELD(insn, 27, 31);
2649 xop = GET_FIELD(insn, 18, 26);
2650 #ifdef TARGET_SPARC64
2651 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2652 int l1;
2654 l1 = gen_new_label();
2655 cond = GET_FIELD_SP(insn, 14, 17);
2656 cpu_src1 = get_src1(insn, cpu_src1);
2657 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2658 0, l1);
2659 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2660 gen_set_label(l1);
2661 break;
2662 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2663 int l1;
2665 l1 = gen_new_label();
2666 cond = GET_FIELD_SP(insn, 14, 17);
2667 cpu_src1 = get_src1(insn, cpu_src1);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2669 0, l1);
2670 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2671 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2672 gen_set_label(l1);
2673 break;
2674 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2675 int l1;
2677 CHECK_FPU_FEATURE(dc, FLOAT128);
2678 l1 = gen_new_label();
2679 cond = GET_FIELD_SP(insn, 14, 17);
2680 cpu_src1 = get_src1(insn, cpu_src1);
2681 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2682 0, l1);
2683 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2684 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2685 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2686 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2687 gen_set_label(l1);
2688 break;
2690 #endif
2691 switch (xop) {
2692 #ifdef TARGET_SPARC64
2693 #define FMOVSCC(fcc) \
2695 TCGv r_cond; \
2696 int l1; \
2698 l1 = gen_new_label(); \
2699 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2700 cond = GET_FIELD_SP(insn, 14, 17); \
2701 gen_fcond(r_cond, fcc, cond); \
2702 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2703 0, l1); \
2704 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2705 gen_set_label(l1); \
2706 tcg_temp_free(r_cond); \
2708 #define FMOVDCC(fcc) \
2710 TCGv r_cond; \
2711 int l1; \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2718 0, l1); \
2719 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2720 cpu_fpr[DFPREG(rs2)]); \
2721 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2722 cpu_fpr[DFPREG(rs2) + 1]); \
2723 gen_set_label(l1); \
2724 tcg_temp_free(r_cond); \
2726 #define FMOVQCC(fcc) \
2728 TCGv r_cond; \
2729 int l1; \
2731 l1 = gen_new_label(); \
2732 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2733 cond = GET_FIELD_SP(insn, 14, 17); \
2734 gen_fcond(r_cond, fcc, cond); \
2735 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2736 0, l1); \
2737 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2738 cpu_fpr[QFPREG(rs2)]); \
2739 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2740 cpu_fpr[QFPREG(rs2) + 1]); \
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2742 cpu_fpr[QFPREG(rs2) + 2]); \
2743 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2744 cpu_fpr[QFPREG(rs2) + 3]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2748 case 0x001: /* V9 fmovscc %fcc0 */
2749 FMOVSCC(0);
2750 break;
2751 case 0x002: /* V9 fmovdcc %fcc0 */
2752 FMOVDCC(0);
2753 break;
2754 case 0x003: /* V9 fmovqcc %fcc0 */
2755 CHECK_FPU_FEATURE(dc, FLOAT128);
2756 FMOVQCC(0);
2757 break;
2758 case 0x041: /* V9 fmovscc %fcc1 */
2759 FMOVSCC(1);
2760 break;
2761 case 0x042: /* V9 fmovdcc %fcc1 */
2762 FMOVDCC(1);
2763 break;
2764 case 0x043: /* V9 fmovqcc %fcc1 */
2765 CHECK_FPU_FEATURE(dc, FLOAT128);
2766 FMOVQCC(1);
2767 break;
2768 case 0x081: /* V9 fmovscc %fcc2 */
2769 FMOVSCC(2);
2770 break;
2771 case 0x082: /* V9 fmovdcc %fcc2 */
2772 FMOVDCC(2);
2773 break;
2774 case 0x083: /* V9 fmovqcc %fcc2 */
2775 CHECK_FPU_FEATURE(dc, FLOAT128);
2776 FMOVQCC(2);
2777 break;
2778 case 0x0c1: /* V9 fmovscc %fcc3 */
2779 FMOVSCC(3);
2780 break;
2781 case 0x0c2: /* V9 fmovdcc %fcc3 */
2782 FMOVDCC(3);
2783 break;
2784 case 0x0c3: /* V9 fmovqcc %fcc3 */
2785 CHECK_FPU_FEATURE(dc, FLOAT128);
2786 FMOVQCC(3);
2787 break;
2788 #undef FMOVSCC
2789 #undef FMOVDCC
2790 #undef FMOVQCC
2791 #define FMOVCC(size_FDQ, icc) \
2793 TCGv r_cond; \
2794 int l1; \
2796 l1 = gen_new_label(); \
2797 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2798 cond = GET_FIELD_SP(insn, 14, 17); \
2799 gen_cond(r_cond, icc, cond); \
2800 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2801 0, l1); \
2802 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2803 (glue(size_FDQ, FPREG(rs2))); \
2804 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2805 (glue(size_FDQ, FPREG(rd))); \
2806 gen_set_label(l1); \
2807 tcg_temp_free(r_cond); \
2809 #define FMOVSCC(icc) \
2811 TCGv r_cond; \
2812 int l1; \
2814 l1 = gen_new_label(); \
2815 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2816 cond = GET_FIELD_SP(insn, 14, 17); \
2817 gen_cond(r_cond, icc, cond); \
2818 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2819 0, l1); \
2820 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2821 gen_set_label(l1); \
2822 tcg_temp_free(r_cond); \
2824 #define FMOVDCC(icc) \
2826 TCGv r_cond; \
2827 int l1; \
2829 l1 = gen_new_label(); \
2830 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2831 cond = GET_FIELD_SP(insn, 14, 17); \
2832 gen_cond(r_cond, icc, cond); \
2833 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2834 0, l1); \
2835 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2836 cpu_fpr[DFPREG(rs2)]); \
2837 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2838 cpu_fpr[DFPREG(rs2) + 1]); \
2839 gen_set_label(l1); \
2840 tcg_temp_free(r_cond); \
2842 #define FMOVQCC(icc) \
2844 TCGv r_cond; \
2845 int l1; \
2847 l1 = gen_new_label(); \
2848 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2849 cond = GET_FIELD_SP(insn, 14, 17); \
2850 gen_cond(r_cond, icc, cond); \
2851 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2852 0, l1); \
2853 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2854 cpu_fpr[QFPREG(rs2)]); \
2855 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2856 cpu_fpr[QFPREG(rs2) + 1]); \
2857 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2858 cpu_fpr[QFPREG(rs2) + 2]); \
2859 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2860 cpu_fpr[QFPREG(rs2) + 3]); \
2861 gen_set_label(l1); \
2862 tcg_temp_free(r_cond); \
2865 case 0x101: /* V9 fmovscc %icc */
2866 FMOVSCC(0);
2867 break;
2868 case 0x102: /* V9 fmovdcc %icc */
2869 FMOVDCC(0);
2870 case 0x103: /* V9 fmovqcc %icc */
2871 CHECK_FPU_FEATURE(dc, FLOAT128);
2872 FMOVQCC(0);
2873 break;
2874 case 0x181: /* V9 fmovscc %xcc */
2875 FMOVSCC(1);
2876 break;
2877 case 0x182: /* V9 fmovdcc %xcc */
2878 FMOVDCC(1);
2879 break;
2880 case 0x183: /* V9 fmovqcc %xcc */
2881 CHECK_FPU_FEATURE(dc, FLOAT128);
2882 FMOVQCC(1);
2883 break;
2884 #undef FMOVSCC
2885 #undef FMOVDCC
2886 #undef FMOVQCC
2887 #endif
2888 case 0x51: /* fcmps, V9 %fcc */
2889 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2890 break;
2891 case 0x52: /* fcmpd, V9 %fcc */
2892 gen_op_load_fpr_DT0(DFPREG(rs1));
2893 gen_op_load_fpr_DT1(DFPREG(rs2));
2894 gen_op_fcmpd(rd & 3);
2895 break;
2896 case 0x53: /* fcmpq, V9 %fcc */
2897 CHECK_FPU_FEATURE(dc, FLOAT128);
2898 gen_op_load_fpr_QT0(QFPREG(rs1));
2899 gen_op_load_fpr_QT1(QFPREG(rs2));
2900 gen_op_fcmpq(rd & 3);
2901 break;
2902 case 0x55: /* fcmpes, V9 %fcc */
2903 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2904 break;
2905 case 0x56: /* fcmped, V9 %fcc */
2906 gen_op_load_fpr_DT0(DFPREG(rs1));
2907 gen_op_load_fpr_DT1(DFPREG(rs2));
2908 gen_op_fcmped(rd & 3);
2909 break;
2910 case 0x57: /* fcmpeq, V9 %fcc */
2911 CHECK_FPU_FEATURE(dc, FLOAT128);
2912 gen_op_load_fpr_QT0(QFPREG(rs1));
2913 gen_op_load_fpr_QT1(QFPREG(rs2));
2914 gen_op_fcmpeq(rd & 3);
2915 break;
2916 default:
2917 goto illegal_insn;
2919 } else if (xop == 0x2) {
2920 // clr/mov shortcut
2922 rs1 = GET_FIELD(insn, 13, 17);
2923 if (rs1 == 0) {
2924 // or %g0, x, y -> mov T0, x; mov y, T0
2925 if (IS_IMM) { /* immediate */
2926 TCGv r_const;
2928 rs2 = GET_FIELDs(insn, 19, 31);
2929 r_const = tcg_const_tl((int)rs2);
2930 gen_movl_TN_reg(rd, r_const);
2931 tcg_temp_free(r_const);
2932 } else { /* register */
2933 rs2 = GET_FIELD(insn, 27, 31);
2934 gen_movl_reg_TN(rs2, cpu_dst);
2935 gen_movl_TN_reg(rd, cpu_dst);
2937 } else {
2938 cpu_src1 = get_src1(insn, cpu_src1);
2939 if (IS_IMM) { /* immediate */
2940 rs2 = GET_FIELDs(insn, 19, 31);
2941 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2942 gen_movl_TN_reg(rd, cpu_dst);
2943 } else { /* register */
2944 // or x, %g0, y -> mov T1, x; mov y, T1
2945 rs2 = GET_FIELD(insn, 27, 31);
2946 if (rs2 != 0) {
2947 gen_movl_reg_TN(rs2, cpu_src2);
2948 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2949 gen_movl_TN_reg(rd, cpu_dst);
2950 } else
2951 gen_movl_TN_reg(rd, cpu_src1);
2954 #ifdef TARGET_SPARC64
2955 } else if (xop == 0x25) { /* sll, V9 sllx */
2956 cpu_src1 = get_src1(insn, cpu_src1);
2957 if (IS_IMM) { /* immediate */
2958 rs2 = GET_FIELDs(insn, 20, 31);
2959 if (insn & (1 << 12)) {
2960 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2961 } else {
2962 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2964 } else { /* register */
2965 rs2 = GET_FIELD(insn, 27, 31);
2966 gen_movl_reg_TN(rs2, cpu_src2);
2967 if (insn & (1 << 12)) {
2968 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2969 } else {
2970 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2972 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2974 gen_movl_TN_reg(rd, cpu_dst);
2975 } else if (xop == 0x26) { /* srl, V9 srlx */
2976 cpu_src1 = get_src1(insn, cpu_src1);
2977 if (IS_IMM) { /* immediate */
2978 rs2 = GET_FIELDs(insn, 20, 31);
2979 if (insn & (1 << 12)) {
2980 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2981 } else {
2982 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2983 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2985 } else { /* register */
2986 rs2 = GET_FIELD(insn, 27, 31);
2987 gen_movl_reg_TN(rs2, cpu_src2);
2988 if (insn & (1 << 12)) {
2989 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2990 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2991 } else {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2993 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2994 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2997 gen_movl_TN_reg(rd, cpu_dst);
2998 } else if (xop == 0x27) { /* sra, V9 srax */
2999 cpu_src1 = get_src1(insn, cpu_src1);
3000 if (IS_IMM) { /* immediate */
3001 rs2 = GET_FIELDs(insn, 20, 31);
3002 if (insn & (1 << 12)) {
3003 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3004 } else {
3005 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3006 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3007 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3009 } else { /* register */
3010 rs2 = GET_FIELD(insn, 27, 31);
3011 gen_movl_reg_TN(rs2, cpu_src2);
3012 if (insn & (1 << 12)) {
3013 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3014 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3015 } else {
3016 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3017 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3018 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3019 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3022 gen_movl_TN_reg(rd, cpu_dst);
3023 #endif
3024 } else if (xop < 0x36) {
3025 cpu_src1 = get_src1(insn, cpu_src1);
3026 cpu_src2 = get_src2(insn, cpu_src2);
3027 if (xop < 0x20) {
3028 switch (xop & ~0x10) {
3029 case 0x0:
3030 if (xop & 0x10)
3031 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3032 else
3033 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3034 break;
3035 case 0x1:
3036 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3037 if (xop & 0x10)
3038 gen_op_logic_cc(cpu_dst);
3039 break;
3040 case 0x2:
3041 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3042 if (xop & 0x10)
3043 gen_op_logic_cc(cpu_dst);
3044 break;
3045 case 0x3:
3046 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3047 if (xop & 0x10)
3048 gen_op_logic_cc(cpu_dst);
3049 break;
3050 case 0x4:
3051 if (xop & 0x10)
3052 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3053 else
3054 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3055 break;
3056 case 0x5:
3057 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3058 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3059 if (xop & 0x10)
3060 gen_op_logic_cc(cpu_dst);
3061 break;
3062 case 0x6:
3063 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3064 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3065 if (xop & 0x10)
3066 gen_op_logic_cc(cpu_dst);
3067 break;
3068 case 0x7:
3069 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3070 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3071 if (xop & 0x10)
3072 gen_op_logic_cc(cpu_dst);
3073 break;
3074 case 0x8:
3075 if (xop & 0x10)
3076 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3077 else {
3078 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3079 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3080 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3082 break;
3083 #ifdef TARGET_SPARC64
3084 case 0x9: /* V9 mulx */
3085 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3086 break;
3087 #endif
3088 case 0xa:
3089 CHECK_IU_FEATURE(dc, MUL);
3090 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3091 if (xop & 0x10)
3092 gen_op_logic_cc(cpu_dst);
3093 break;
3094 case 0xb:
3095 CHECK_IU_FEATURE(dc, MUL);
3096 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3097 if (xop & 0x10)
3098 gen_op_logic_cc(cpu_dst);
3099 break;
3100 case 0xc:
3101 if (xop & 0x10)
3102 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3103 else {
3104 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3105 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3106 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3108 break;
3109 #ifdef TARGET_SPARC64
3110 case 0xd: /* V9 udivx */
3111 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3112 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3113 gen_trap_ifdivzero_tl(cpu_cc_src2);
3114 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3115 break;
3116 #endif
3117 case 0xe:
3118 CHECK_IU_FEATURE(dc, DIV);
3119 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3120 cpu_src2);
3121 if (xop & 0x10)
3122 gen_op_div_cc(cpu_dst);
3123 break;
3124 case 0xf:
3125 CHECK_IU_FEATURE(dc, DIV);
3126 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3127 cpu_src2);
3128 if (xop & 0x10)
3129 gen_op_div_cc(cpu_dst);
3130 break;
3131 default:
3132 goto illegal_insn;
3134 gen_movl_TN_reg(rd, cpu_dst);
3135 } else {
3136 switch (xop) {
3137 case 0x20: /* taddcc */
3138 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3139 gen_movl_TN_reg(rd, cpu_dst);
3140 break;
3141 case 0x21: /* tsubcc */
3142 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3143 gen_movl_TN_reg(rd, cpu_dst);
3144 break;
3145 case 0x22: /* taddcctv */
3146 save_state(dc, cpu_cond);
3147 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3148 gen_movl_TN_reg(rd, cpu_dst);
3149 break;
3150 case 0x23: /* tsubcctv */
3151 save_state(dc, cpu_cond);
3152 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3153 gen_movl_TN_reg(rd, cpu_dst);
3154 break;
3155 case 0x24: /* mulscc */
3156 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3157 gen_movl_TN_reg(rd, cpu_dst);
3158 break;
3159 #ifndef TARGET_SPARC64
3160 case 0x25: /* sll */
3161 if (IS_IMM) { /* immediate */
3162 rs2 = GET_FIELDs(insn, 20, 31);
3163 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3164 } else { /* register */
3165 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3166 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3168 gen_movl_TN_reg(rd, cpu_dst);
3169 break;
3170 case 0x26: /* srl */
3171 if (IS_IMM) { /* immediate */
3172 rs2 = GET_FIELDs(insn, 20, 31);
3173 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3174 } else { /* register */
3175 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3176 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3178 gen_movl_TN_reg(rd, cpu_dst);
3179 break;
3180 case 0x27: /* sra */
3181 if (IS_IMM) { /* immediate */
3182 rs2 = GET_FIELDs(insn, 20, 31);
3183 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3184 } else { /* register */
3185 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3186 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3188 gen_movl_TN_reg(rd, cpu_dst);
3189 break;
3190 #endif
3191 case 0x30:
3193 switch(rd) {
3194 case 0: /* wry */
3195 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3196 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3197 break;
3198 #ifndef TARGET_SPARC64
3199 case 0x01 ... 0x0f: /* undefined in the
3200 SPARCv8 manual, nop
3201 on the microSPARC
3202 II */
3203 case 0x10 ... 0x1f: /* implementation-dependent
3204 in the SPARCv8
3205 manual, nop on the
3206 microSPARC II */
3207 break;
3208 #else
3209 case 0x2: /* V9 wrccr */
3210 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3211 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3212 break;
3213 case 0x3: /* V9 wrasi */
3214 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3215 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3216 break;
3217 case 0x6: /* V9 wrfprs */
3218 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3219 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3220 save_state(dc, cpu_cond);
3221 gen_op_next_insn();
3222 tcg_gen_exit_tb(0);
3223 dc->is_br = 1;
3224 break;
3225 case 0xf: /* V9 sir, nop if user */
3226 #if !defined(CONFIG_USER_ONLY)
3227 if (supervisor(dc))
3228 ; // XXX
3229 #endif
3230 break;
3231 case 0x13: /* Graphics Status */
3232 if (gen_trap_ifnofpu(dc, cpu_cond))
3233 goto jmp_insn;
3234 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3235 break;
3236 case 0x17: /* Tick compare */
3237 #if !defined(CONFIG_USER_ONLY)
3238 if (!supervisor(dc))
3239 goto illegal_insn;
3240 #endif
3242 TCGv r_tickptr;
3244 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3245 cpu_src2);
3246 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3247 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3248 offsetof(CPUState, tick));
3249 tcg_gen_helper_0_2(helper_tick_set_limit,
3250 r_tickptr, cpu_tick_cmpr);
3251 tcg_temp_free(r_tickptr);
3253 break;
3254 case 0x18: /* System tick */
3255 #if !defined(CONFIG_USER_ONLY)
3256 if (!supervisor(dc))
3257 goto illegal_insn;
3258 #endif
3260 TCGv r_tickptr;
3262 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3263 cpu_src2);
3264 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3265 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3266 offsetof(CPUState, stick));
3267 tcg_gen_helper_0_2(helper_tick_set_count,
3268 r_tickptr, cpu_dst);
3269 tcg_temp_free(r_tickptr);
3271 break;
3272 case 0x19: /* System tick compare */
3273 #if !defined(CONFIG_USER_ONLY)
3274 if (!supervisor(dc))
3275 goto illegal_insn;
3276 #endif
3278 TCGv r_tickptr;
3280 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3281 cpu_src2);
3282 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3283 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3284 offsetof(CPUState, stick));
3285 tcg_gen_helper_0_2(helper_tick_set_limit,
3286 r_tickptr, cpu_stick_cmpr);
3287 tcg_temp_free(r_tickptr);
3289 break;
3291 case 0x10: /* Performance Control */
3292 case 0x11: /* Performance Instrumentation
3293 Counter */
3294 case 0x12: /* Dispatch Control */
3295 case 0x14: /* Softint set */
3296 case 0x15: /* Softint clear */
3297 case 0x16: /* Softint write */
3298 #endif
3299 default:
3300 goto illegal_insn;
3303 break;
3304 #if !defined(CONFIG_USER_ONLY)
3305 case 0x31: /* wrpsr, V9 saved, restored */
3307 if (!supervisor(dc))
3308 goto priv_insn;
3309 #ifdef TARGET_SPARC64
3310 switch (rd) {
3311 case 0:
3312 tcg_gen_helper_0_0(helper_saved);
3313 break;
3314 case 1:
3315 tcg_gen_helper_0_0(helper_restored);
3316 break;
3317 case 2: /* UA2005 allclean */
3318 case 3: /* UA2005 otherw */
3319 case 4: /* UA2005 normalw */
3320 case 5: /* UA2005 invalw */
3321 // XXX
3322 default:
3323 goto illegal_insn;
3325 #else
3326 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3327 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3328 save_state(dc, cpu_cond);
3329 gen_op_next_insn();
3330 tcg_gen_exit_tb(0);
3331 dc->is_br = 1;
3332 #endif
3334 break;
3335 case 0x32: /* wrwim, V9 wrpr */
3337 if (!supervisor(dc))
3338 goto priv_insn;
3339 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3340 #ifdef TARGET_SPARC64
3341 switch (rd) {
3342 case 0: // tpc
3344 TCGv r_tsptr;
3346 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3347 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3348 offsetof(CPUState, tsptr));
3349 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3350 offsetof(trap_state, tpc));
3351 tcg_temp_free(r_tsptr);
3353 break;
3354 case 1: // tnpc
3356 TCGv r_tsptr;
3358 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3359 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3360 offsetof(CPUState, tsptr));
3361 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3362 offsetof(trap_state, tnpc));
3363 tcg_temp_free(r_tsptr);
3365 break;
3366 case 2: // tstate
3368 TCGv r_tsptr;
3370 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3371 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3372 offsetof(CPUState, tsptr));
3373 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3374 offsetof(trap_state,
3375 tstate));
3376 tcg_temp_free(r_tsptr);
3378 break;
3379 case 3: // tt
3381 TCGv r_tsptr;
3383 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3384 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3385 offsetof(CPUState, tsptr));
3386 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3387 offsetof(trap_state, tt));
3388 tcg_temp_free(r_tsptr);
3390 break;
3391 case 4: // tick
3393 TCGv r_tickptr;
3395 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3396 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3397 offsetof(CPUState, tick));
3398 tcg_gen_helper_0_2(helper_tick_set_count,
3399 r_tickptr, cpu_tmp0);
3400 tcg_temp_free(r_tickptr);
3402 break;
3403 case 5: // tba
3404 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3405 break;
3406 case 6: // pstate
3407 save_state(dc, cpu_cond);
3408 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3409 gen_op_next_insn();
3410 tcg_gen_exit_tb(0);
3411 dc->is_br = 1;
3412 break;
3413 case 7: // tl
3414 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3415 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3416 offsetof(CPUSPARCState, tl));
3417 break;
3418 case 8: // pil
3419 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3420 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3421 offsetof(CPUSPARCState,
3422 psrpil));
3423 break;
3424 case 9: // cwp
3425 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3426 break;
3427 case 10: // cansave
3428 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3429 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3430 offsetof(CPUSPARCState,
3431 cansave));
3432 break;
3433 case 11: // canrestore
3434 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3435 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3436 offsetof(CPUSPARCState,
3437 canrestore));
3438 break;
3439 case 12: // cleanwin
3440 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3441 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3442 offsetof(CPUSPARCState,
3443 cleanwin));
3444 break;
3445 case 13: // otherwin
3446 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3447 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3448 offsetof(CPUSPARCState,
3449 otherwin));
3450 break;
3451 case 14: // wstate
3452 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3453 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3454 offsetof(CPUSPARCState,
3455 wstate));
3456 break;
3457 case 16: // UA2005 gl
3458 CHECK_IU_FEATURE(dc, GL);
3459 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3460 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3461 offsetof(CPUSPARCState, gl));
3462 break;
3463 case 26: // UA2005 strand status
3464 CHECK_IU_FEATURE(dc, HYPV);
3465 if (!hypervisor(dc))
3466 goto priv_insn;
3467 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3468 break;
3469 default:
3470 goto illegal_insn;
3472 #else
3473 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3474 if (dc->def->nwindows != 32)
3475 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3476 (1 << dc->def->nwindows) - 1);
3477 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3478 #endif
3480 break;
3481 case 0x33: /* wrtbr, UA2005 wrhpr */
3483 #ifndef TARGET_SPARC64
3484 if (!supervisor(dc))
3485 goto priv_insn;
3486 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3487 #else
3488 CHECK_IU_FEATURE(dc, HYPV);
3489 if (!hypervisor(dc))
3490 goto priv_insn;
3491 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3492 switch (rd) {
3493 case 0: // hpstate
3494 // XXX gen_op_wrhpstate();
3495 save_state(dc, cpu_cond);
3496 gen_op_next_insn();
3497 tcg_gen_exit_tb(0);
3498 dc->is_br = 1;
3499 break;
3500 case 1: // htstate
3501 // XXX gen_op_wrhtstate();
3502 break;
3503 case 3: // hintp
3504 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3505 break;
3506 case 5: // htba
3507 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3508 break;
3509 case 31: // hstick_cmpr
3511 TCGv r_tickptr;
3513 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3514 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3515 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3516 offsetof(CPUState, hstick));
3517 tcg_gen_helper_0_2(helper_tick_set_limit,
3518 r_tickptr, cpu_hstick_cmpr);
3519 tcg_temp_free(r_tickptr);
3521 break;
3522 case 6: // hver readonly
3523 default:
3524 goto illegal_insn;
3526 #endif
3528 break;
3529 #endif
3530 #ifdef TARGET_SPARC64
3531 case 0x2c: /* V9 movcc */
3533 int cc = GET_FIELD_SP(insn, 11, 12);
3534 int cond = GET_FIELD_SP(insn, 14, 17);
3535 TCGv r_cond;
3536 int l1;
3538 r_cond = tcg_temp_new(TCG_TYPE_TL);
3539 if (insn & (1 << 18)) {
3540 if (cc == 0)
3541 gen_cond(r_cond, 0, cond);
3542 else if (cc == 2)
3543 gen_cond(r_cond, 1, cond);
3544 else
3545 goto illegal_insn;
3546 } else {
3547 gen_fcond(r_cond, cc, cond);
3550 l1 = gen_new_label();
3552 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3553 if (IS_IMM) { /* immediate */
3554 TCGv r_const;
3556 rs2 = GET_FIELD_SPs(insn, 0, 10);
3557 r_const = tcg_const_tl((int)rs2);
3558 gen_movl_TN_reg(rd, r_const);
3559 tcg_temp_free(r_const);
3560 } else {
3561 rs2 = GET_FIELD_SP(insn, 0, 4);
3562 gen_movl_reg_TN(rs2, cpu_tmp0);
3563 gen_movl_TN_reg(rd, cpu_tmp0);
3565 gen_set_label(l1);
3566 tcg_temp_free(r_cond);
3567 break;
3569 case 0x2d: /* V9 sdivx */
3570 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3571 gen_movl_TN_reg(rd, cpu_dst);
3572 break;
3573 case 0x2e: /* V9 popc */
3575 cpu_src2 = get_src2(insn, cpu_src2);
3576 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3577 cpu_src2);
3578 gen_movl_TN_reg(rd, cpu_dst);
3580 case 0x2f: /* V9 movr */
3582 int cond = GET_FIELD_SP(insn, 10, 12);
3583 int l1;
3585 cpu_src1 = get_src1(insn, cpu_src1);
3587 l1 = gen_new_label();
3589 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3590 cpu_src1, 0, l1);
3591 if (IS_IMM) { /* immediate */
3592 TCGv r_const;
3594 rs2 = GET_FIELD_SPs(insn, 0, 9);
3595 r_const = tcg_const_tl((int)rs2);
3596 gen_movl_TN_reg(rd, r_const);
3597 tcg_temp_free(r_const);
3598 } else {
3599 rs2 = GET_FIELD_SP(insn, 0, 4);
3600 gen_movl_reg_TN(rs2, cpu_tmp0);
3601 gen_movl_TN_reg(rd, cpu_tmp0);
3603 gen_set_label(l1);
3604 break;
3606 #endif
3607 default:
3608 goto illegal_insn;
3611 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3612 #ifdef TARGET_SPARC64
3613 int opf = GET_FIELD_SP(insn, 5, 13);
3614 rs1 = GET_FIELD(insn, 13, 17);
3615 rs2 = GET_FIELD(insn, 27, 31);
3616 if (gen_trap_ifnofpu(dc, cpu_cond))
3617 goto jmp_insn;
3619 switch (opf) {
3620 case 0x000: /* VIS I edge8cc */
3621 case 0x001: /* VIS II edge8n */
3622 case 0x002: /* VIS I edge8lcc */
3623 case 0x003: /* VIS II edge8ln */
3624 case 0x004: /* VIS I edge16cc */
3625 case 0x005: /* VIS II edge16n */
3626 case 0x006: /* VIS I edge16lcc */
3627 case 0x007: /* VIS II edge16ln */
3628 case 0x008: /* VIS I edge32cc */
3629 case 0x009: /* VIS II edge32n */
3630 case 0x00a: /* VIS I edge32lcc */
3631 case 0x00b: /* VIS II edge32ln */
3632 // XXX
3633 goto illegal_insn;
3634 case 0x010: /* VIS I array8 */
3635 CHECK_FPU_FEATURE(dc, VIS1);
3636 cpu_src1 = get_src1(insn, cpu_src1);
3637 gen_movl_reg_TN(rs2, cpu_src2);
3638 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3639 cpu_src2);
3640 gen_movl_TN_reg(rd, cpu_dst);
3641 break;
3642 case 0x012: /* VIS I array16 */
3643 CHECK_FPU_FEATURE(dc, VIS1);
3644 cpu_src1 = get_src1(insn, cpu_src1);
3645 gen_movl_reg_TN(rs2, cpu_src2);
3646 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3647 cpu_src2);
3648 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3649 gen_movl_TN_reg(rd, cpu_dst);
3650 break;
3651 case 0x014: /* VIS I array32 */
3652 CHECK_FPU_FEATURE(dc, VIS1);
3653 cpu_src1 = get_src1(insn, cpu_src1);
3654 gen_movl_reg_TN(rs2, cpu_src2);
3655 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3656 cpu_src2);
3657 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3658 gen_movl_TN_reg(rd, cpu_dst);
3659 break;
3660 case 0x018: /* VIS I alignaddr */
3661 CHECK_FPU_FEATURE(dc, VIS1);
3662 cpu_src1 = get_src1(insn, cpu_src1);
3663 gen_movl_reg_TN(rs2, cpu_src2);
3664 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3665 cpu_src2);
3666 gen_movl_TN_reg(rd, cpu_dst);
3667 break;
3668 case 0x019: /* VIS II bmask */
3669 case 0x01a: /* VIS I alignaddrl */
3670 // XXX
3671 goto illegal_insn;
3672 case 0x020: /* VIS I fcmple16 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 tcg_gen_helper_0_0(helper_fcmple16);
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x022: /* VIS I fcmpne16 */
3680 CHECK_FPU_FEATURE(dc, VIS1);
3681 gen_op_load_fpr_DT0(DFPREG(rs1));
3682 gen_op_load_fpr_DT1(DFPREG(rs2));
3683 tcg_gen_helper_0_0(helper_fcmpne16);
3684 gen_op_store_DT0_fpr(DFPREG(rd));
3685 break;
3686 case 0x024: /* VIS I fcmple32 */
3687 CHECK_FPU_FEATURE(dc, VIS1);
3688 gen_op_load_fpr_DT0(DFPREG(rs1));
3689 gen_op_load_fpr_DT1(DFPREG(rs2));
3690 tcg_gen_helper_0_0(helper_fcmple32);
3691 gen_op_store_DT0_fpr(DFPREG(rd));
3692 break;
3693 case 0x026: /* VIS I fcmpne32 */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 tcg_gen_helper_0_0(helper_fcmpne32);
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x028: /* VIS I fcmpgt16 */
3701 CHECK_FPU_FEATURE(dc, VIS1);
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3704 tcg_gen_helper_0_0(helper_fcmpgt16);
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3706 break;
3707 case 0x02a: /* VIS I fcmpeq16 */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 tcg_gen_helper_0_0(helper_fcmpeq16);
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3713 break;
3714 case 0x02c: /* VIS I fcmpgt32 */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 tcg_gen_helper_0_0(helper_fcmpgt32);
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3720 break;
3721 case 0x02e: /* VIS I fcmpeq32 */
3722 CHECK_FPU_FEATURE(dc, VIS1);
3723 gen_op_load_fpr_DT0(DFPREG(rs1));
3724 gen_op_load_fpr_DT1(DFPREG(rs2));
3725 tcg_gen_helper_0_0(helper_fcmpeq32);
3726 gen_op_store_DT0_fpr(DFPREG(rd));
3727 break;
3728 case 0x031: /* VIS I fmul8x16 */
3729 CHECK_FPU_FEATURE(dc, VIS1);
3730 gen_op_load_fpr_DT0(DFPREG(rs1));
3731 gen_op_load_fpr_DT1(DFPREG(rs2));
3732 tcg_gen_helper_0_0(helper_fmul8x16);
3733 gen_op_store_DT0_fpr(DFPREG(rd));
3734 break;
3735 case 0x033: /* VIS I fmul8x16au */
3736 CHECK_FPU_FEATURE(dc, VIS1);
3737 gen_op_load_fpr_DT0(DFPREG(rs1));
3738 gen_op_load_fpr_DT1(DFPREG(rs2));
3739 tcg_gen_helper_0_0(helper_fmul8x16au);
3740 gen_op_store_DT0_fpr(DFPREG(rd));
3741 break;
3742 case 0x035: /* VIS I fmul8x16al */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 tcg_gen_helper_0_0(helper_fmul8x16al);
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x036: /* VIS I fmul8sux16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 tcg_gen_helper_0_0(helper_fmul8sux16);
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x037: /* VIS I fmul8ulx16 */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_op_load_fpr_DT0(DFPREG(rs1));
3759 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 tcg_gen_helper_0_0(helper_fmul8ulx16);
3761 gen_op_store_DT0_fpr(DFPREG(rd));
3762 break;
3763 case 0x038: /* VIS I fmuld8sux16 */
3764 CHECK_FPU_FEATURE(dc, VIS1);
3765 gen_op_load_fpr_DT0(DFPREG(rs1));
3766 gen_op_load_fpr_DT1(DFPREG(rs2));
3767 tcg_gen_helper_0_0(helper_fmuld8sux16);
3768 gen_op_store_DT0_fpr(DFPREG(rd));
3769 break;
3770 case 0x039: /* VIS I fmuld8ulx16 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 gen_op_load_fpr_DT0(DFPREG(rs1));
3773 gen_op_load_fpr_DT1(DFPREG(rs2));
3774 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3775 gen_op_store_DT0_fpr(DFPREG(rd));
3776 break;
3777 case 0x03a: /* VIS I fpack32 */
3778 case 0x03b: /* VIS I fpack16 */
3779 case 0x03d: /* VIS I fpackfix */
3780 case 0x03e: /* VIS I pdist */
3781 // XXX
3782 goto illegal_insn;
3783 case 0x048: /* VIS I faligndata */
3784 CHECK_FPU_FEATURE(dc, VIS1);
3785 gen_op_load_fpr_DT0(DFPREG(rs1));
3786 gen_op_load_fpr_DT1(DFPREG(rs2));
3787 tcg_gen_helper_0_0(helper_faligndata);
3788 gen_op_store_DT0_fpr(DFPREG(rd));
3789 break;
3790 case 0x04b: /* VIS I fpmerge */
3791 CHECK_FPU_FEATURE(dc, VIS1);
3792 gen_op_load_fpr_DT0(DFPREG(rs1));
3793 gen_op_load_fpr_DT1(DFPREG(rs2));
3794 tcg_gen_helper_0_0(helper_fpmerge);
3795 gen_op_store_DT0_fpr(DFPREG(rd));
3796 break;
3797 case 0x04c: /* VIS II bshuffle */
3798 // XXX
3799 goto illegal_insn;
3800 case 0x04d: /* VIS I fexpand */
3801 CHECK_FPU_FEATURE(dc, VIS1);
3802 gen_op_load_fpr_DT0(DFPREG(rs1));
3803 gen_op_load_fpr_DT1(DFPREG(rs2));
3804 tcg_gen_helper_0_0(helper_fexpand);
3805 gen_op_store_DT0_fpr(DFPREG(rd));
3806 break;
3807 case 0x050: /* VIS I fpadd16 */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 gen_op_load_fpr_DT0(DFPREG(rs1));
3810 gen_op_load_fpr_DT1(DFPREG(rs2));
3811 tcg_gen_helper_0_0(helper_fpadd16);
3812 gen_op_store_DT0_fpr(DFPREG(rd));
3813 break;
3814 case 0x051: /* VIS I fpadd16s */
3815 CHECK_FPU_FEATURE(dc, VIS1);
3816 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3817 cpu_fpr[rs1], cpu_fpr[rs2]);
3818 break;
3819 case 0x052: /* VIS I fpadd32 */
3820 CHECK_FPU_FEATURE(dc, VIS1);
3821 gen_op_load_fpr_DT0(DFPREG(rs1));
3822 gen_op_load_fpr_DT1(DFPREG(rs2));
3823 tcg_gen_helper_0_0(helper_fpadd32);
3824 gen_op_store_DT0_fpr(DFPREG(rd));
3825 break;
3826 case 0x053: /* VIS I fpadd32s */
3827 CHECK_FPU_FEATURE(dc, VIS1);
3828 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3829 cpu_fpr[rs1], cpu_fpr[rs2]);
3830 break;
3831 case 0x054: /* VIS I fpsub16 */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 gen_op_load_fpr_DT0(DFPREG(rs1));
3834 gen_op_load_fpr_DT1(DFPREG(rs2));
3835 tcg_gen_helper_0_0(helper_fpsub16);
3836 gen_op_store_DT0_fpr(DFPREG(rd));
3837 break;
3838 case 0x055: /* VIS I fpsub16s */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3841 cpu_fpr[rs1], cpu_fpr[rs2]);
3842 break;
3843 case 0x056: /* VIS I fpsub32 */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 gen_op_load_fpr_DT0(DFPREG(rs1));
3846 gen_op_load_fpr_DT1(DFPREG(rs2));
3847 tcg_gen_helper_0_0(helper_fpsub32);
3848 gen_op_store_DT0_fpr(DFPREG(rd));
3849 break;
3850 case 0x057: /* VIS I fpsub32s */
3851 CHECK_FPU_FEATURE(dc, VIS1);
3852 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3853 cpu_fpr[rs1], cpu_fpr[rs2]);
3854 break;
3855 case 0x060: /* VIS I fzero */
3856 CHECK_FPU_FEATURE(dc, VIS1);
3857 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3858 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3859 break;
3860 case 0x061: /* VIS I fzeros */
3861 CHECK_FPU_FEATURE(dc, VIS1);
3862 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3863 break;
3864 case 0x062: /* VIS I fnor */
3865 CHECK_FPU_FEATURE(dc, VIS1);
3866 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3867 cpu_fpr[DFPREG(rs2)]);
3868 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3869 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3870 cpu_fpr[DFPREG(rs2) + 1]);
3871 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3872 break;
3873 case 0x063: /* VIS I fnors */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3876 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3877 break;
3878 case 0x064: /* VIS I fandnot2 */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3881 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3882 cpu_fpr[DFPREG(rs2)]);
3883 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3884 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3885 cpu_fpr[DFPREG(rs2) + 1]);
3886 break;
3887 case 0x065: /* VIS I fandnot2s */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3890 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3891 break;
3892 case 0x066: /* VIS I fnot2 */
3893 CHECK_FPU_FEATURE(dc, VIS1);
3894 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3895 -1);
3896 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3897 cpu_fpr[DFPREG(rs2) + 1], -1);
3898 break;
3899 case 0x067: /* VIS I fnot2s */
3900 CHECK_FPU_FEATURE(dc, VIS1);
3901 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3902 break;
3903 case 0x068: /* VIS I fandnot1 */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3906 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3907 cpu_fpr[DFPREG(rs1)]);
3908 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3909 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3910 cpu_fpr[DFPREG(rs1) + 1]);
3911 break;
3912 case 0x069: /* VIS I fandnot1s */
3913 CHECK_FPU_FEATURE(dc, VIS1);
3914 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3915 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3916 break;
3917 case 0x06a: /* VIS I fnot1 */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3920 -1);
3921 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3922 cpu_fpr[DFPREG(rs1) + 1], -1);
3923 break;
3924 case 0x06b: /* VIS I fnot1s */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3927 break;
3928 case 0x06c: /* VIS I fxor */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3931 cpu_fpr[DFPREG(rs2)]);
3932 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3933 cpu_fpr[DFPREG(rs1) + 1],
3934 cpu_fpr[DFPREG(rs2) + 1]);
3935 break;
3936 case 0x06d: /* VIS I fxors */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3939 break;
3940 case 0x06e: /* VIS I fnand */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3943 cpu_fpr[DFPREG(rs2)]);
3944 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3945 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3946 cpu_fpr[DFPREG(rs2) + 1]);
3947 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3948 break;
3949 case 0x06f: /* VIS I fnands */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3952 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3953 break;
3954 case 0x070: /* VIS I fand */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3957 cpu_fpr[DFPREG(rs2)]);
3958 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3959 cpu_fpr[DFPREG(rs1) + 1],
3960 cpu_fpr[DFPREG(rs2) + 1]);
3961 break;
3962 case 0x071: /* VIS I fands */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3965 break;
3966 case 0x072: /* VIS I fxnor */
3967 CHECK_FPU_FEATURE(dc, VIS1);
3968 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3969 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3970 cpu_fpr[DFPREG(rs1)]);
3971 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3972 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3973 cpu_fpr[DFPREG(rs1) + 1]);
3974 break;
3975 case 0x073: /* VIS I fxnors */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3978 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3979 break;
3980 case 0x074: /* VIS I fsrc1 */
3981 CHECK_FPU_FEATURE(dc, VIS1);
3982 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3983 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3984 cpu_fpr[DFPREG(rs1) + 1]);
3985 break;
3986 case 0x075: /* VIS I fsrc1s */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3989 break;
3990 case 0x076: /* VIS I fornot2 */
3991 CHECK_FPU_FEATURE(dc, VIS1);
3992 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3993 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3994 cpu_fpr[DFPREG(rs2)]);
3995 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3996 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3997 cpu_fpr[DFPREG(rs2) + 1]);
3998 break;
3999 case 0x077: /* VIS I fornot2s */
4000 CHECK_FPU_FEATURE(dc, VIS1);
4001 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4002 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4003 break;
4004 case 0x078: /* VIS I fsrc2 */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 gen_op_load_fpr_DT0(DFPREG(rs2));
4007 gen_op_store_DT0_fpr(DFPREG(rd));
4008 break;
4009 case 0x079: /* VIS I fsrc2s */
4010 CHECK_FPU_FEATURE(dc, VIS1);
4011 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4012 break;
4013 case 0x07a: /* VIS I fornot1 */
4014 CHECK_FPU_FEATURE(dc, VIS1);
4015 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4016 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4017 cpu_fpr[DFPREG(rs1)]);
4018 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4019 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4020 cpu_fpr[DFPREG(rs1) + 1]);
4021 break;
4022 case 0x07b: /* VIS I fornot1s */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4025 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4026 break;
4027 case 0x07c: /* VIS I for */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4030 cpu_fpr[DFPREG(rs2)]);
4031 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4032 cpu_fpr[DFPREG(rs1) + 1],
4033 cpu_fpr[DFPREG(rs2) + 1]);
4034 break;
4035 case 0x07d: /* VIS I fors */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4038 break;
4039 case 0x07e: /* VIS I fone */
4040 CHECK_FPU_FEATURE(dc, VIS1);
4041 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4042 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4043 break;
4044 case 0x07f: /* VIS I fones */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4047 break;
4048 case 0x080: /* VIS I shutdown */
4049 case 0x081: /* VIS II siam */
4050 // XXX
4051 goto illegal_insn;
4052 default:
4053 goto illegal_insn;
4055 #else
4056 goto ncp_insn;
4057 #endif
4058 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4059 #ifdef TARGET_SPARC64
4060 goto illegal_insn;
4061 #else
4062 goto ncp_insn;
4063 #endif
4064 #ifdef TARGET_SPARC64
4065 } else if (xop == 0x39) { /* V9 return */
4066 TCGv r_const;
4068 save_state(dc, cpu_cond);
4069 cpu_src1 = get_src1(insn, cpu_src1);
4070 if (IS_IMM) { /* immediate */
4071 rs2 = GET_FIELDs(insn, 19, 31);
4072 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4073 } else { /* register */
4074 rs2 = GET_FIELD(insn, 27, 31);
4075 if (rs2) {
4076 gen_movl_reg_TN(rs2, cpu_src2);
4077 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4078 } else
4079 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4081 tcg_gen_helper_0_0(helper_restore);
4082 gen_mov_pc_npc(dc, cpu_cond);
4083 r_const = tcg_const_i32(3);
4084 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4085 tcg_temp_free(r_const);
4086 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4087 dc->npc = DYNAMIC_PC;
4088 goto jmp_insn;
4089 #endif
4090 } else {
4091 cpu_src1 = get_src1(insn, cpu_src1);
4092 if (IS_IMM) { /* immediate */
4093 rs2 = GET_FIELDs(insn, 19, 31);
4094 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4095 } else { /* register */
4096 rs2 = GET_FIELD(insn, 27, 31);
4097 if (rs2) {
4098 gen_movl_reg_TN(rs2, cpu_src2);
4099 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4100 } else
4101 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4103 switch (xop) {
4104 case 0x38: /* jmpl */
4106 TCGv r_const;
4108 r_const = tcg_const_tl(dc->pc);
4109 gen_movl_TN_reg(rd, r_const);
4110 tcg_temp_free(r_const);
4111 gen_mov_pc_npc(dc, cpu_cond);
4112 r_const = tcg_const_i32(3);
4113 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4114 r_const);
4115 tcg_temp_free(r_const);
4116 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4117 dc->npc = DYNAMIC_PC;
4119 goto jmp_insn;
4120 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4121 case 0x39: /* rett, V9 return */
4123 TCGv r_const;
4125 if (!supervisor(dc))
4126 goto priv_insn;
4127 gen_mov_pc_npc(dc, cpu_cond);
4128 r_const = tcg_const_i32(3);
4129 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4130 r_const);
4131 tcg_temp_free(r_const);
4132 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4133 dc->npc = DYNAMIC_PC;
4134 tcg_gen_helper_0_0(helper_rett);
4136 goto jmp_insn;
4137 #endif
4138 case 0x3b: /* flush */
4139 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4140 goto unimp_flush;
4141 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4142 break;
4143 case 0x3c: /* save */
4144 save_state(dc, cpu_cond);
4145 tcg_gen_helper_0_0(helper_save);
4146 gen_movl_TN_reg(rd, cpu_dst);
4147 break;
4148 case 0x3d: /* restore */
4149 save_state(dc, cpu_cond);
4150 tcg_gen_helper_0_0(helper_restore);
4151 gen_movl_TN_reg(rd, cpu_dst);
4152 break;
4153 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4154 case 0x3e: /* V9 done/retry */
4156 switch (rd) {
4157 case 0:
4158 if (!supervisor(dc))
4159 goto priv_insn;
4160 dc->npc = DYNAMIC_PC;
4161 dc->pc = DYNAMIC_PC;
4162 tcg_gen_helper_0_0(helper_done);
4163 goto jmp_insn;
4164 case 1:
4165 if (!supervisor(dc))
4166 goto priv_insn;
4167 dc->npc = DYNAMIC_PC;
4168 dc->pc = DYNAMIC_PC;
4169 tcg_gen_helper_0_0(helper_retry);
4170 goto jmp_insn;
4171 default:
4172 goto illegal_insn;
4175 break;
4176 #endif
4177 default:
4178 goto illegal_insn;
4181 break;
4183 break;
4184 case 3: /* load/store instructions */
4186 unsigned int xop = GET_FIELD(insn, 7, 12);
4188 cpu_src1 = get_src1(insn, cpu_src1);
4189 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4190 rs2 = GET_FIELD(insn, 27, 31);
4191 gen_movl_reg_TN(rs2, cpu_src2);
4192 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4193 } else if (IS_IMM) { /* immediate */
4194 rs2 = GET_FIELDs(insn, 19, 31);
4195 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4196 } else { /* register */
4197 rs2 = GET_FIELD(insn, 27, 31);
4198 if (rs2 != 0) {
4199 gen_movl_reg_TN(rs2, cpu_src2);
4200 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4201 } else
4202 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4204 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4205 (xop > 0x17 && xop <= 0x1d ) ||
4206 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4207 switch (xop) {
4208 case 0x0: /* load unsigned word */
4209 gen_address_mask(dc, cpu_addr);
4210 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4211 break;
4212 case 0x1: /* load unsigned byte */
4213 gen_address_mask(dc, cpu_addr);
4214 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4215 break;
4216 case 0x2: /* load unsigned halfword */
4217 gen_address_mask(dc, cpu_addr);
4218 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4219 break;
4220 case 0x3: /* load double word */
4221 if (rd & 1)
4222 goto illegal_insn;
4223 else {
4224 TCGv r_const;
4226 save_state(dc, cpu_cond);
4227 r_const = tcg_const_i32(7);
4228 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4229 r_const); // XXX remove
4230 tcg_temp_free(r_const);
4231 gen_address_mask(dc, cpu_addr);
4232 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4233 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4234 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4235 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4236 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4237 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4238 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4240 break;
4241 case 0x9: /* load signed byte */
4242 gen_address_mask(dc, cpu_addr);
4243 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4244 break;
4245 case 0xa: /* load signed halfword */
4246 gen_address_mask(dc, cpu_addr);
4247 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4248 break;
4249 case 0xd: /* ldstub -- XXX: should be atomically */
4251 TCGv r_const;
4253 gen_address_mask(dc, cpu_addr);
4254 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4255 r_const = tcg_const_tl(0xff);
4256 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4257 tcg_temp_free(r_const);
4259 break;
4260 case 0x0f: /* swap register with memory. Also
4261 atomically */
4262 CHECK_IU_FEATURE(dc, SWAP);
4263 gen_movl_reg_TN(rd, cpu_val);
4264 gen_address_mask(dc, cpu_addr);
4265 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4266 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4267 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4268 break;
4269 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4270 case 0x10: /* load word alternate */
4271 #ifndef TARGET_SPARC64
4272 if (IS_IMM)
4273 goto illegal_insn;
4274 if (!supervisor(dc))
4275 goto priv_insn;
4276 #endif
4277 save_state(dc, cpu_cond);
4278 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4279 break;
4280 case 0x11: /* load unsigned byte alternate */
4281 #ifndef TARGET_SPARC64
4282 if (IS_IMM)
4283 goto illegal_insn;
4284 if (!supervisor(dc))
4285 goto priv_insn;
4286 #endif
4287 save_state(dc, cpu_cond);
4288 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4289 break;
4290 case 0x12: /* load unsigned halfword alternate */
4291 #ifndef TARGET_SPARC64
4292 if (IS_IMM)
4293 goto illegal_insn;
4294 if (!supervisor(dc))
4295 goto priv_insn;
4296 #endif
4297 save_state(dc, cpu_cond);
4298 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4299 break;
4300 case 0x13: /* load double word alternate */
4301 #ifndef TARGET_SPARC64
4302 if (IS_IMM)
4303 goto illegal_insn;
4304 if (!supervisor(dc))
4305 goto priv_insn;
4306 #endif
4307 if (rd & 1)
4308 goto illegal_insn;
4309 save_state(dc, cpu_cond);
4310 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4311 goto skip_move;
4312 case 0x19: /* load signed byte alternate */
4313 #ifndef TARGET_SPARC64
4314 if (IS_IMM)
4315 goto illegal_insn;
4316 if (!supervisor(dc))
4317 goto priv_insn;
4318 #endif
4319 save_state(dc, cpu_cond);
4320 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4321 break;
4322 case 0x1a: /* load signed halfword alternate */
4323 #ifndef TARGET_SPARC64
4324 if (IS_IMM)
4325 goto illegal_insn;
4326 if (!supervisor(dc))
4327 goto priv_insn;
4328 #endif
4329 save_state(dc, cpu_cond);
4330 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4331 break;
4332 case 0x1d: /* ldstuba -- XXX: should be atomically */
4333 #ifndef TARGET_SPARC64
4334 if (IS_IMM)
4335 goto illegal_insn;
4336 if (!supervisor(dc))
4337 goto priv_insn;
4338 #endif
4339 save_state(dc, cpu_cond);
4340 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4341 break;
4342 case 0x1f: /* swap reg with alt. memory. Also
4343 atomically */
4344 CHECK_IU_FEATURE(dc, SWAP);
4345 #ifndef TARGET_SPARC64
4346 if (IS_IMM)
4347 goto illegal_insn;
4348 if (!supervisor(dc))
4349 goto priv_insn;
4350 #endif
4351 save_state(dc, cpu_cond);
4352 gen_movl_reg_TN(rd, cpu_val);
4353 gen_swap_asi(cpu_val, cpu_addr, insn);
4354 break;
4356 #ifndef TARGET_SPARC64
4357 case 0x30: /* ldc */
4358 case 0x31: /* ldcsr */
4359 case 0x33: /* lddc */
4360 goto ncp_insn;
4361 #endif
4362 #endif
4363 #ifdef TARGET_SPARC64
4364 case 0x08: /* V9 ldsw */
4365 gen_address_mask(dc, cpu_addr);
4366 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4367 break;
4368 case 0x0b: /* V9 ldx */
4369 gen_address_mask(dc, cpu_addr);
4370 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4371 break;
4372 case 0x18: /* V9 ldswa */
4373 save_state(dc, cpu_cond);
4374 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4375 break;
4376 case 0x1b: /* V9 ldxa */
4377 save_state(dc, cpu_cond);
4378 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4379 break;
4380 case 0x2d: /* V9 prefetch, no effect */
4381 goto skip_move;
4382 case 0x30: /* V9 ldfa */
4383 save_state(dc, cpu_cond);
4384 gen_ldf_asi(cpu_addr, insn, 4, rd);
4385 goto skip_move;
4386 case 0x33: /* V9 lddfa */
4387 save_state(dc, cpu_cond);
4388 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4389 goto skip_move;
4390 case 0x3d: /* V9 prefetcha, no effect */
4391 goto skip_move;
4392 case 0x32: /* V9 ldqfa */
4393 CHECK_FPU_FEATURE(dc, FLOAT128);
4394 save_state(dc, cpu_cond);
4395 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4396 goto skip_move;
4397 #endif
4398 default:
4399 goto illegal_insn;
4401 gen_movl_TN_reg(rd, cpu_val);
4402 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4403 skip_move: ;
4404 #endif
4405 } else if (xop >= 0x20 && xop < 0x24) {
4406 if (gen_trap_ifnofpu(dc, cpu_cond))
4407 goto jmp_insn;
4408 save_state(dc, cpu_cond);
4409 switch (xop) {
4410 case 0x20: /* load fpreg */
4411 gen_address_mask(dc, cpu_addr);
4412 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4413 break;
4414 case 0x21: /* ldfsr, V9 ldxfsr */
4415 #ifdef TARGET_SPARC64
4416 gen_address_mask(dc, cpu_addr);
4417 if (rd == 1) {
4418 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4419 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4420 } else
4421 #else
4423 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4424 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4426 #endif
4427 break;
4428 case 0x22: /* load quad fpreg */
4430 TCGv r_const;
4432 CHECK_FPU_FEATURE(dc, FLOAT128);
4433 r_const = tcg_const_i32(dc->mem_idx);
4434 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4435 tcg_temp_free(r_const);
4436 gen_op_store_QT0_fpr(QFPREG(rd));
4438 break;
4439 case 0x23: /* load double fpreg */
4441 TCGv r_const;
4443 r_const = tcg_const_i32(dc->mem_idx);
4444 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4445 tcg_temp_free(r_const);
4446 gen_op_store_DT0_fpr(DFPREG(rd));
4448 break;
4449 default:
4450 goto illegal_insn;
4452 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4453 xop == 0xe || xop == 0x1e) {
4454 gen_movl_reg_TN(rd, cpu_val);
4455 switch (xop) {
4456 case 0x4: /* store word */
4457 gen_address_mask(dc, cpu_addr);
4458 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4459 break;
4460 case 0x5: /* store byte */
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4463 break;
4464 case 0x6: /* store halfword */
4465 gen_address_mask(dc, cpu_addr);
4466 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4467 break;
4468 case 0x7: /* store double word */
4469 if (rd & 1)
4470 goto illegal_insn;
4471 else {
4472 TCGv r_const;
4474 save_state(dc, cpu_cond);
4475 gen_address_mask(dc, cpu_addr);
4476 r_const = tcg_const_i32(7);
4477 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4478 r_const); // XXX remove
4479 tcg_temp_free(r_const);
4480 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4481 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4482 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4484 break;
4485 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4486 case 0x14: /* store word alternate */
4487 #ifndef TARGET_SPARC64
4488 if (IS_IMM)
4489 goto illegal_insn;
4490 if (!supervisor(dc))
4491 goto priv_insn;
4492 #endif
4493 save_state(dc, cpu_cond);
4494 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4495 break;
4496 case 0x15: /* store byte alternate */
4497 #ifndef TARGET_SPARC64
4498 if (IS_IMM)
4499 goto illegal_insn;
4500 if (!supervisor(dc))
4501 goto priv_insn;
4502 #endif
4503 save_state(dc, cpu_cond);
4504 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4505 break;
4506 case 0x16: /* store halfword alternate */
4507 #ifndef TARGET_SPARC64
4508 if (IS_IMM)
4509 goto illegal_insn;
4510 if (!supervisor(dc))
4511 goto priv_insn;
4512 #endif
4513 save_state(dc, cpu_cond);
4514 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4515 break;
4516 case 0x17: /* store double word alternate */
4517 #ifndef TARGET_SPARC64
4518 if (IS_IMM)
4519 goto illegal_insn;
4520 if (!supervisor(dc))
4521 goto priv_insn;
4522 #endif
4523 if (rd & 1)
4524 goto illegal_insn;
4525 else {
4526 save_state(dc, cpu_cond);
4527 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4529 break;
4530 #endif
4531 #ifdef TARGET_SPARC64
4532 case 0x0e: /* V9 stx */
4533 gen_address_mask(dc, cpu_addr);
4534 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4535 break;
4536 case 0x1e: /* V9 stxa */
4537 save_state(dc, cpu_cond);
4538 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4539 break;
4540 #endif
4541 default:
4542 goto illegal_insn;
4544 } else if (xop > 0x23 && xop < 0x28) {
4545 if (gen_trap_ifnofpu(dc, cpu_cond))
4546 goto jmp_insn;
4547 save_state(dc, cpu_cond);
4548 switch (xop) {
4549 case 0x24: /* store fpreg */
4550 gen_address_mask(dc, cpu_addr);
4551 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4552 break;
4553 case 0x25: /* stfsr, V9 stxfsr */
4554 #ifdef TARGET_SPARC64
4555 gen_address_mask(dc, cpu_addr);
4556 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4557 if (rd == 1)
4558 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4559 else {
4560 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4561 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4563 #else
4564 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4565 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4566 #endif
4567 break;
4568 case 0x26:
4569 #ifdef TARGET_SPARC64
4570 /* V9 stqf, store quad fpreg */
4572 TCGv r_const;
4574 CHECK_FPU_FEATURE(dc, FLOAT128);
4575 gen_op_load_fpr_QT0(QFPREG(rd));
4576 r_const = tcg_const_i32(dc->mem_idx);
4577 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4578 tcg_temp_free(r_const);
4580 break;
4581 #else /* !TARGET_SPARC64 */
4582 /* stdfq, store floating point queue */
4583 #if defined(CONFIG_USER_ONLY)
4584 goto illegal_insn;
4585 #else
4586 if (!supervisor(dc))
4587 goto priv_insn;
4588 if (gen_trap_ifnofpu(dc, cpu_cond))
4589 goto jmp_insn;
4590 goto nfq_insn;
4591 #endif
4592 #endif
4593 case 0x27: /* store double fpreg */
4595 TCGv r_const;
4597 gen_op_load_fpr_DT0(DFPREG(rd));
4598 r_const = tcg_const_i32(dc->mem_idx);
4599 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4600 tcg_temp_free(r_const);
4602 break;
4603 default:
4604 goto illegal_insn;
4606 } else if (xop > 0x33 && xop < 0x3f) {
4607 save_state(dc, cpu_cond);
4608 switch (xop) {
4609 #ifdef TARGET_SPARC64
4610 case 0x34: /* V9 stfa */
4611 gen_stf_asi(cpu_addr, insn, 4, rd);
4612 break;
4613 case 0x36: /* V9 stqfa */
4615 TCGv r_const;
4617 CHECK_FPU_FEATURE(dc, FLOAT128);
4618 r_const = tcg_const_i32(7);
4619 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4620 r_const);
4621 tcg_temp_free(r_const);
4622 gen_op_load_fpr_QT0(QFPREG(rd));
4623 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4625 break;
4626 case 0x37: /* V9 stdfa */
4627 gen_op_load_fpr_DT0(DFPREG(rd));
4628 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4629 break;
4630 case 0x3c: /* V9 casa */
4631 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4632 gen_movl_TN_reg(rd, cpu_val);
4633 break;
4634 case 0x3e: /* V9 casxa */
4635 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4636 gen_movl_TN_reg(rd, cpu_val);
4637 break;
4638 #else
4639 case 0x34: /* stc */
4640 case 0x35: /* stcsr */
4641 case 0x36: /* stdcq */
4642 case 0x37: /* stdc */
4643 goto ncp_insn;
4644 #endif
4645 default:
4646 goto illegal_insn;
4649 else
4650 goto illegal_insn;
4652 break;
4654 /* default case for non jump instructions */
4655 if (dc->npc == DYNAMIC_PC) {
4656 dc->pc = DYNAMIC_PC;
4657 gen_op_next_insn();
4658 } else if (dc->npc == JUMP_PC) {
4659 /* we can do a static jump */
4660 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4661 dc->is_br = 1;
4662 } else {
4663 dc->pc = dc->npc;
4664 dc->npc = dc->npc + 4;
4666 jmp_insn:
4667 return;
4668 illegal_insn:
4670 TCGv r_const;
4672 save_state(dc, cpu_cond);
4673 r_const = tcg_const_i32(TT_ILL_INSN);
4674 tcg_gen_helper_0_1(raise_exception, r_const);
4675 tcg_temp_free(r_const);
4676 dc->is_br = 1;
4678 return;
4679 unimp_flush:
4681 TCGv r_const;
4683 save_state(dc, cpu_cond);
4684 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4685 tcg_gen_helper_0_1(raise_exception, r_const);
4686 tcg_temp_free(r_const);
4687 dc->is_br = 1;
4689 return;
4690 #if !defined(CONFIG_USER_ONLY)
4691 priv_insn:
4693 TCGv r_const;
4695 save_state(dc, cpu_cond);
4696 r_const = tcg_const_i32(TT_PRIV_INSN);
4697 tcg_gen_helper_0_1(raise_exception, r_const);
4698 tcg_temp_free(r_const);
4699 dc->is_br = 1;
4701 return;
4702 #endif
4703 nfpu_insn:
4704 save_state(dc, cpu_cond);
4705 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4706 dc->is_br = 1;
4707 return;
4708 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4709 nfq_insn:
4710 save_state(dc, cpu_cond);
4711 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4712 dc->is_br = 1;
4713 return;
4714 #endif
4715 #ifndef TARGET_SPARC64
4716 ncp_insn:
4718 TCGv r_const;
4720 save_state(dc, cpu_cond);
4721 r_const = tcg_const_i32(TT_NCP_INSN);
4722 tcg_gen_helper_0_1(raise_exception, r_const);
4723 tcg_temp_free(r_const);
4724 dc->is_br = 1;
4726 return;
4727 #endif
4730 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4731 int spc, CPUSPARCState *env)
4733 target_ulong pc_start, last_pc;
4734 uint16_t *gen_opc_end;
4735 DisasContext dc1, *dc = &dc1;
4736 int j, lj = -1;
4737 int num_insns;
4738 int max_insns;
4740 memset(dc, 0, sizeof(DisasContext));
4741 dc->tb = tb;
4742 pc_start = tb->pc;
4743 dc->pc = pc_start;
4744 last_pc = dc->pc;
4745 dc->npc = (target_ulong) tb->cs_base;
4746 dc->mem_idx = cpu_mmu_index(env);
4747 dc->def = env->def;
4748 if ((dc->def->features & CPU_FEATURE_FLOAT))
4749 dc->fpu_enabled = cpu_fpu_enabled(env);
4750 else
4751 dc->fpu_enabled = 0;
4752 #ifdef TARGET_SPARC64
4753 dc->address_mask_32bit = env->pstate & PS_AM;
4754 #endif
4755 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4757 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4758 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4759 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4761 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4763 // loads and stores
4764 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4765 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4767 num_insns = 0;
4768 max_insns = tb->cflags & CF_COUNT_MASK;
4769 if (max_insns == 0)
4770 max_insns = CF_COUNT_MASK;
4771 gen_icount_start();
4772 do {
4773 if (env->nb_breakpoints > 0) {
4774 for(j = 0; j < env->nb_breakpoints; j++) {
4775 if (env->breakpoints[j] == dc->pc) {
4776 if (dc->pc != pc_start)
4777 save_state(dc, cpu_cond);
4778 tcg_gen_helper_0_0(helper_debug);
4779 tcg_gen_exit_tb(0);
4780 dc->is_br = 1;
4781 goto exit_gen_loop;
4785 if (spc) {
4786 if (loglevel > 0)
4787 fprintf(logfile, "Search PC...\n");
4788 j = gen_opc_ptr - gen_opc_buf;
4789 if (lj < j) {
4790 lj++;
4791 while (lj < j)
4792 gen_opc_instr_start[lj++] = 0;
4793 gen_opc_pc[lj] = dc->pc;
4794 gen_opc_npc[lj] = dc->npc;
4795 gen_opc_instr_start[lj] = 1;
4796 gen_opc_icount[lj] = num_insns;
4799 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4800 gen_io_start();
4801 last_pc = dc->pc;
4802 disas_sparc_insn(dc);
4803 num_insns++;
4805 if (dc->is_br)
4806 break;
4807 /* if the next PC is different, we abort now */
4808 if (dc->pc != (last_pc + 4))
4809 break;
4810 /* if we reach a page boundary, we stop generation so that the
4811 PC of a TT_TFAULT exception is always in the right page */
4812 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4813 break;
4814 /* if single step mode, we generate only one instruction and
4815 generate an exception */
4816 if (env->singlestep_enabled) {
4817 tcg_gen_movi_tl(cpu_pc, dc->pc);
4818 tcg_gen_exit_tb(0);
4819 break;
4821 } while ((gen_opc_ptr < gen_opc_end) &&
4822 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4823 num_insns < max_insns);
4825 exit_gen_loop:
4826 tcg_temp_free(cpu_addr);
4827 tcg_temp_free(cpu_val);
4828 tcg_temp_free(cpu_dst);
4829 tcg_temp_free(cpu_tmp64);
4830 tcg_temp_free(cpu_tmp32);
4831 tcg_temp_free(cpu_tmp0);
4832 if (tb->cflags & CF_LAST_IO)
4833 gen_io_end();
4834 if (!dc->is_br) {
4835 if (dc->pc != DYNAMIC_PC &&
4836 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4837 /* static PC and NPC: we can use direct chaining */
4838 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4839 } else {
4840 if (dc->pc != DYNAMIC_PC)
4841 tcg_gen_movi_tl(cpu_pc, dc->pc);
4842 save_npc(dc, cpu_cond);
4843 tcg_gen_exit_tb(0);
4846 gen_icount_end(tb, num_insns);
4847 *gen_opc_ptr = INDEX_op_end;
4848 if (spc) {
4849 j = gen_opc_ptr - gen_opc_buf;
4850 lj++;
4851 while (lj <= j)
4852 gen_opc_instr_start[lj++] = 0;
4853 #if 0
4854 if (loglevel > 0) {
4855 page_dump(logfile);
4857 #endif
4858 gen_opc_jump_pc[0] = dc->jump_pc[0];
4859 gen_opc_jump_pc[1] = dc->jump_pc[1];
4860 } else {
4861 tb->size = last_pc + 4 - pc_start;
4862 tb->icount = num_insns;
4864 #ifdef DEBUG_DISAS
4865 if (loglevel & CPU_LOG_TB_IN_ASM) {
4866 fprintf(logfile, "--------------\n");
4867 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4868 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4869 fprintf(logfile, "\n");
4871 #endif
4874 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4876 gen_intermediate_code_internal(tb, 0, env);
4879 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4881 gen_intermediate_code_internal(tb, 1, env);
4884 void gen_intermediate_code_init(CPUSPARCState *env)
4886 unsigned int i;
4887 static int inited;
4888 static const char * const gregnames[8] = {
4889 NULL, // g0 not used
4890 "g1",
4891 "g2",
4892 "g3",
4893 "g4",
4894 "g5",
4895 "g6",
4896 "g7",
4898 static const char * const fregnames[64] = {
4899 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4900 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4901 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4902 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4903 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4904 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4905 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4906 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4909 /* init various static tables */
4910 if (!inited) {
4911 inited = 1;
4913 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4914 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4915 offsetof(CPUState, regwptr),
4916 "regwptr");
4917 #ifdef TARGET_SPARC64
4918 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4919 TCG_AREG0, offsetof(CPUState, xcc),
4920 "xcc");
4921 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4922 TCG_AREG0, offsetof(CPUState, asi),
4923 "asi");
4924 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4925 TCG_AREG0, offsetof(CPUState, fprs),
4926 "fprs");
4927 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4928 TCG_AREG0, offsetof(CPUState, gsr),
4929 "gsr");
4930 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4931 TCG_AREG0,
4932 offsetof(CPUState, tick_cmpr),
4933 "tick_cmpr");
4934 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4935 TCG_AREG0,
4936 offsetof(CPUState, stick_cmpr),
4937 "stick_cmpr");
4938 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4939 TCG_AREG0,
4940 offsetof(CPUState, hstick_cmpr),
4941 "hstick_cmpr");
4942 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4943 offsetof(CPUState, hintp),
4944 "hintp");
4945 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4946 offsetof(CPUState, htba),
4947 "htba");
4948 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4949 offsetof(CPUState, hver),
4950 "hver");
4951 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4952 offsetof(CPUState, ssr), "ssr");
4953 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4954 offsetof(CPUState, version), "ver");
4955 #else
4956 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4957 TCG_AREG0, offsetof(CPUState, wim),
4958 "wim");
4959 #endif
4960 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4961 TCG_AREG0, offsetof(CPUState, cond),
4962 "cond");
4963 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4964 TCG_AREG0, offsetof(CPUState, cc_src),
4965 "cc_src");
4966 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4967 offsetof(CPUState, cc_src2),
4968 "cc_src2");
4969 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4970 TCG_AREG0, offsetof(CPUState, cc_dst),
4971 "cc_dst");
4972 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4973 TCG_AREG0, offsetof(CPUState, psr),
4974 "psr");
4975 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4976 TCG_AREG0, offsetof(CPUState, fsr),
4977 "fsr");
4978 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4979 TCG_AREG0, offsetof(CPUState, pc),
4980 "pc");
4981 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4982 TCG_AREG0, offsetof(CPUState, npc),
4983 "npc");
4984 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
4985 TCG_AREG0, offsetof(CPUState, y), "y");
4986 #ifndef CONFIG_USER_ONLY
4987 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
4988 TCG_AREG0, offsetof(CPUState, tbr),
4989 "tbr");
4990 #endif
4991 for (i = 1; i < 8; i++)
4992 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4993 offsetof(CPUState, gregs[i]),
4994 gregnames[i]);
4995 for (i = 0; i < TARGET_FPREGS; i++)
4996 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
4997 offsetof(CPUState, fpr[i]),
4998 fregnames[i]);
5000 /* register helpers */
5002 #undef DEF_HELPER
5003 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5004 #include "helper.h"
5008 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5009 unsigned long searched_pc, int pc_pos, void *puc)
5011 target_ulong npc;
5012 env->pc = gen_opc_pc[pc_pos];
5013 npc = gen_opc_npc[pc_pos];
5014 if (npc == 1) {
5015 /* dynamic NPC: already stored */
5016 } else if (npc == 2) {
5017 target_ulong t2 = (target_ulong)(unsigned long)puc;
5018 /* jump PC: use T2 and the jump targets of the translation */
5019 if (t2)
5020 env->npc = gen_opc_jump_pc[0];
5021 else
5022 env->npc = gen_opc_jump_pc[1];
5023 } else {
5024 env->npc = npc;