2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
30 #include "qemu-common.h"
36 /* #define DO_SINGLE_STEP */
37 #define ALPHA_DEBUG_DISAS
38 /* #define DO_TB_FLUSH */
40 typedef struct DisasContext DisasContext
;
44 #if !defined (CONFIG_USER_ONLY)
50 /* global register indexes */
51 static TCGv_ptr cpu_env
;
52 static TCGv cpu_ir
[31];
53 static TCGv cpu_fir
[31];
58 static char cpu_reg_names
[10*4+21*5 + 10*5+21*6];
60 #include "gen-icount.h"
62 static void alpha_translate_init(void)
66 static int done_init
= 0;
71 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
74 for (i
= 0; i
< 31; i
++) {
75 sprintf(p
, "ir%d", i
);
76 cpu_ir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
77 offsetof(CPUState
, ir
[i
]), p
);
78 p
+= (i
< 10) ? 4 : 5;
80 sprintf(p
, "fir%d", i
);
81 cpu_fir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
82 offsetof(CPUState
, fir
[i
]), p
);
83 p
+= (i
< 10) ? 5 : 6;
86 cpu_pc
= tcg_global_mem_new_i64(TCG_AREG0
,
87 offsetof(CPUState
, pc
), "pc");
89 cpu_lock
= tcg_global_mem_new_i64(TCG_AREG0
,
90 offsetof(CPUState
, lock
), "lock");
92 /* register helpers */
99 static always_inline
void gen_excp (DisasContext
*ctx
,
100 int exception
, int error_code
)
104 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
105 tmp1
= tcg_const_i32(exception
);
106 tmp2
= tcg_const_i32(error_code
);
107 gen_helper_excp(tmp1
, tmp2
);
108 tcg_temp_free_i32(tmp2
);
109 tcg_temp_free_i32(tmp1
);
112 static always_inline
void gen_invalid (DisasContext
*ctx
)
114 gen_excp(ctx
, EXCP_OPCDEC
, 0);
117 static always_inline
void gen_qemu_ldf (TCGv t0
, TCGv t1
, int flags
)
119 TCGv tmp
= tcg_temp_new();
120 TCGv_i32 tmp32
= tcg_temp_new_i32();
121 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
122 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
123 gen_helper_memory_to_f(t0
, tmp32
);
124 tcg_temp_free_i32(tmp32
);
128 static always_inline
void gen_qemu_ldg (TCGv t0
, TCGv t1
, int flags
)
130 TCGv tmp
= tcg_temp_new();
131 tcg_gen_qemu_ld64(tmp
, t1
, flags
);
132 gen_helper_memory_to_g(t0
, tmp
);
136 static always_inline
void gen_qemu_lds (TCGv t0
, TCGv t1
, int flags
)
138 TCGv tmp
= tcg_temp_new();
139 TCGv_i32 tmp32
= tcg_temp_new_i32();
140 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
141 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
142 gen_helper_memory_to_s(t0
, tmp32
);
143 tcg_temp_free_i32(tmp32
);
147 static always_inline
void gen_qemu_ldl_l (TCGv t0
, TCGv t1
, int flags
)
149 tcg_gen_mov_i64(cpu_lock
, t1
);
150 tcg_gen_qemu_ld32s(t0
, t1
, flags
);
153 static always_inline
void gen_qemu_ldq_l (TCGv t0
, TCGv t1
, int flags
)
155 tcg_gen_mov_i64(cpu_lock
, t1
);
156 tcg_gen_qemu_ld64(t0
, t1
, flags
);
159 static always_inline
void gen_load_mem (DisasContext
*ctx
,
160 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
161 int ra
, int rb
, int32_t disp16
,
166 if (unlikely(ra
== 31))
169 addr
= tcg_temp_new();
171 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
173 tcg_gen_andi_i64(addr
, addr
, ~0x7);
177 tcg_gen_movi_i64(addr
, disp16
);
180 tcg_gen_qemu_load(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
182 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
186 static always_inline
void gen_qemu_stf (TCGv t0
, TCGv t1
, int flags
)
188 TCGv_i32 tmp32
= tcg_temp_new_i32();
189 TCGv tmp
= tcg_temp_new();
190 gen_helper_f_to_memory(tmp32
, t0
);
191 tcg_gen_extu_i32_i64(tmp
, tmp32
);
192 tcg_gen_qemu_st32(tmp
, t1
, flags
);
194 tcg_temp_free_i32(tmp32
);
197 static always_inline
void gen_qemu_stg (TCGv t0
, TCGv t1
, int flags
)
199 TCGv tmp
= tcg_temp_new();
200 gen_helper_g_to_memory(tmp
, t0
);
201 tcg_gen_qemu_st64(tmp
, t1
, flags
);
205 static always_inline
void gen_qemu_sts (TCGv t0
, TCGv t1
, int flags
)
207 TCGv_i32 tmp32
= tcg_temp_new_i32();
208 TCGv tmp
= tcg_temp_new();
209 gen_helper_s_to_memory(tmp32
, t0
);
210 tcg_gen_extu_i32_i64(tmp
, tmp32
);
211 tcg_gen_qemu_st32(tmp
, t1
, flags
);
213 tcg_temp_free_i32(tmp32
);
216 static always_inline
void gen_qemu_stl_c (TCGv t0
, TCGv t1
, int flags
)
220 l1
= gen_new_label();
221 l2
= gen_new_label();
222 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
223 tcg_gen_qemu_st32(t0
, t1
, flags
);
224 tcg_gen_movi_i64(t0
, 1);
227 tcg_gen_movi_i64(t0
, 0);
229 tcg_gen_movi_i64(cpu_lock
, -1);
232 static always_inline
void gen_qemu_stq_c (TCGv t0
, TCGv t1
, int flags
)
236 l1
= gen_new_label();
237 l2
= gen_new_label();
238 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
239 tcg_gen_qemu_st64(t0
, t1
, flags
);
240 tcg_gen_movi_i64(t0
, 1);
243 tcg_gen_movi_i64(t0
, 0);
245 tcg_gen_movi_i64(cpu_lock
, -1);
248 static always_inline
void gen_store_mem (DisasContext
*ctx
,
249 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
250 int ra
, int rb
, int32_t disp16
,
251 int fp
, int clear
, int local
)
255 addr
= tcg_temp_local_new();
257 addr
= tcg_temp_new();
259 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
261 tcg_gen_andi_i64(addr
, addr
, ~0x7);
265 tcg_gen_movi_i64(addr
, disp16
);
269 tcg_gen_qemu_store(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
271 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
275 zero
= tcg_const_local_i64(0);
277 zero
= tcg_const_i64(0);
278 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
284 static always_inline
void gen_bcond (DisasContext
*ctx
,
286 int ra
, int32_t disp16
, int mask
)
290 l1
= gen_new_label();
291 l2
= gen_new_label();
292 if (likely(ra
!= 31)) {
294 TCGv tmp
= tcg_temp_new();
295 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
296 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
299 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
301 /* Very uncommon case - Do not bother to optimize. */
302 TCGv tmp
= tcg_const_i64(0);
303 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
306 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
309 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
313 static always_inline
void gen_fbcond (DisasContext
*ctx
, int opc
,
314 int ra
, int32_t disp16
)
320 l1
= gen_new_label();
321 l2
= gen_new_label();
323 tmp
= tcg_temp_new();
326 tmp
= tcg_const_i64(0);
330 case 0x31: /* FBEQ */
331 gen_helper_cmpfeq(tmp
, src
);
333 case 0x32: /* FBLT */
334 gen_helper_cmpflt(tmp
, src
);
336 case 0x33: /* FBLE */
337 gen_helper_cmpfle(tmp
, src
);
339 case 0x35: /* FBNE */
340 gen_helper_cmpfne(tmp
, src
);
342 case 0x36: /* FBGE */
343 gen_helper_cmpfge(tmp
, src
);
345 case 0x37: /* FBGT */
346 gen_helper_cmpfgt(tmp
, src
);
351 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp
, 0, l1
);
352 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
355 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
359 static always_inline
void gen_cmov (TCGCond inv_cond
,
360 int ra
, int rb
, int rc
,
361 int islit
, uint8_t lit
, int mask
)
365 if (unlikely(rc
== 31))
368 l1
= gen_new_label();
372 TCGv tmp
= tcg_temp_new();
373 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
374 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
377 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
379 /* Very uncommon case - Do not bother to optimize. */
380 TCGv tmp
= tcg_const_i64(0);
381 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
386 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
388 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
392 #define FARITH2(name) \
393 static always_inline void glue(gen_f, name)(int rb, int rc) \
395 if (unlikely(rc == 31)) \
399 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
401 TCGv tmp = tcg_const_i64(0); \
402 gen_helper_ ## name (cpu_fir[rc], tmp); \
403 tcg_temp_free(tmp); \
424 #define FARITH3(name) \
425 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
427 if (unlikely(rc == 31)) \
432 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
434 TCGv tmp = tcg_const_i64(0); \
435 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
436 tcg_temp_free(tmp); \
439 TCGv tmp = tcg_const_i64(0); \
441 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
443 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
444 tcg_temp_free(tmp); \
475 #define FCMOV(name) \
476 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
481 if (unlikely(rc == 31)) \
484 l1 = gen_new_label(); \
485 tmp = tcg_temp_new(); \
487 tmp = tcg_temp_new(); \
488 gen_helper_ ## name (tmp, cpu_fir[ra]); \
490 tmp = tcg_const_i64(0); \
491 gen_helper_ ## name (tmp, tmp); \
493 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
495 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
497 tcg_gen_movi_i64(cpu_fir[rc], 0); \
507 /* EXTWH, EXTWH, EXTLH, EXTQH */
508 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
509 int ra
, int rb
, int rc
,
510 int islit
, uint8_t lit
)
512 if (unlikely(rc
== 31))
518 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
520 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
523 tmp1
= tcg_temp_new();
524 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
525 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
526 tmp2
= tcg_const_i64(64);
527 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
529 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
533 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
535 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
538 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
539 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
540 int ra
, int rb
, int rc
,
541 int islit
, uint8_t lit
)
543 if (unlikely(rc
== 31))
548 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
550 TCGv tmp
= tcg_temp_new();
551 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
552 tcg_gen_shli_i64(tmp
, tmp
, 3);
553 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
557 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
559 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
562 /* Code to call arith3 helpers */
563 #define ARITH3(name) \
564 static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
565 int islit, uint8_t lit) \
567 if (unlikely(rc == 31)) \
572 TCGv tmp = tcg_const_i64(lit); \
573 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
574 tcg_temp_free(tmp); \
576 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
578 TCGv tmp1 = tcg_const_i64(0); \
580 TCGv tmp2 = tcg_const_i64(lit); \
581 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
582 tcg_temp_free(tmp2); \
584 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
585 tcg_temp_free(tmp1); \
613 static always_inline
void gen_cmp(TCGCond cond
,
614 int ra
, int rb
, int rc
,
615 int islit
, uint8_t lit
)
620 if (unlikely(rc
== 31))
623 l1
= gen_new_label();
624 l2
= gen_new_label();
627 tmp
= tcg_temp_new();
628 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
630 tmp
= tcg_const_i64(0);
632 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
634 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
636 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
639 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
643 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
646 int32_t disp21
, disp16
, disp12
;
648 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
652 /* Decode all instruction fields */
654 ra
= (insn
>> 21) & 0x1F;
655 rb
= (insn
>> 16) & 0x1F;
657 sbz
= (insn
>> 13) & 0x07;
658 islit
= (insn
>> 12) & 1;
659 if (rb
== 31 && !islit
) {
663 lit
= (insn
>> 13) & 0xFF;
664 palcode
= insn
& 0x03FFFFFF;
665 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
666 disp16
= (int16_t)(insn
& 0x0000FFFF);
667 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
668 fn16
= insn
& 0x0000FFFF;
669 fn11
= (insn
>> 5) & 0x000007FF;
671 fn7
= (insn
>> 5) & 0x0000007F;
672 fn2
= (insn
>> 5) & 0x00000003;
674 #if defined ALPHA_DEBUG_DISAS
675 if (logfile
!= NULL
) {
676 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
677 opc
, ra
, rb
, rc
, disp16
);
683 if (palcode
>= 0x80 && palcode
< 0xC0) {
684 /* Unprivileged PAL call */
685 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
686 #if !defined (CONFIG_USER_ONLY)
687 } else if (palcode
< 0x40) {
688 /* Privileged PAL code */
689 if (ctx
->mem_idx
& 1)
692 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
695 /* Invalid PAL call */
723 if (likely(ra
!= 31)) {
725 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
727 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
732 if (likely(ra
!= 31)) {
734 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
736 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
741 if (!(ctx
->amask
& AMASK_BWX
))
743 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0, 0);
747 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 1);
751 if (!(ctx
->amask
& AMASK_BWX
))
753 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 0, 1);
757 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0, 0, 0);
761 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0, 0, 0);
765 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 1, 0);
771 if (likely(rc
!= 31)) {
774 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
775 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
777 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
778 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
782 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
784 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
790 if (likely(rc
!= 31)) {
792 TCGv tmp
= tcg_temp_new();
793 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
795 tcg_gen_addi_i64(tmp
, tmp
, lit
);
797 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
798 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
802 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
804 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
810 if (likely(rc
!= 31)) {
813 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
815 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
816 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
819 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
821 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
822 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
828 if (likely(rc
!= 31)) {
830 TCGv tmp
= tcg_temp_new();
831 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
833 tcg_gen_subi_i64(tmp
, tmp
, lit
);
835 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
836 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
840 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
842 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
843 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
850 gen_cmpbge(ra
, rb
, rc
, islit
, lit
);
854 if (likely(rc
!= 31)) {
856 TCGv tmp
= tcg_temp_new();
857 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
859 tcg_gen_addi_i64(tmp
, tmp
, lit
);
861 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
862 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
866 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
868 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
874 if (likely(rc
!= 31)) {
876 TCGv tmp
= tcg_temp_new();
877 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
879 tcg_gen_subi_i64(tmp
, tmp
, lit
);
881 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
882 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
886 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
888 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
889 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
896 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
900 if (likely(rc
!= 31)) {
903 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
905 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
908 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
910 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
916 if (likely(rc
!= 31)) {
918 TCGv tmp
= tcg_temp_new();
919 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
921 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
923 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
927 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
929 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
935 if (likely(rc
!= 31)) {
938 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
940 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
943 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
945 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
951 if (likely(rc
!= 31)) {
953 TCGv tmp
= tcg_temp_new();
954 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
956 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
958 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
962 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
964 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
970 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
974 if (likely(rc
!= 31)) {
976 TCGv tmp
= tcg_temp_new();
977 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
979 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
981 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
985 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
987 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
993 if (likely(rc
!= 31)) {
995 TCGv tmp
= tcg_temp_new();
996 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
998 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
1000 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
1004 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
1006 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1012 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
1016 gen_addlv(ra
, rb
, rc
, islit
, lit
);
1020 gen_sublv(ra
, rb
, rc
, islit
, lit
);
1024 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
1028 gen_addqv(ra
, rb
, rc
, islit
, lit
);
1032 gen_subqv(ra
, rb
, rc
, islit
, lit
);
1036 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
1046 if (likely(rc
!= 31)) {
1048 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1050 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1052 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1057 if (likely(rc
!= 31)) {
1060 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1062 tcg_gen_andc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1064 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1069 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1073 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1077 if (likely(rc
!= 31)) {
1080 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1082 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1085 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1087 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1093 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1097 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1101 if (likely(rc
!= 31)) {
1104 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1106 tcg_gen_orc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1109 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1111 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1117 if (likely(rc
!= 31)) {
1120 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1122 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1125 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1127 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1133 gen_cmov(TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1137 gen_cmov(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1141 if (likely(rc
!= 31)) {
1144 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1146 tcg_gen_eqv_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1149 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1151 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1157 if (likely(rc
!= 31)) {
1159 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1161 gen_helper_amask(cpu_ir
[rc
], cpu_ir
[rb
]);
1166 gen_cmov(TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1170 gen_cmov(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1175 gen_helper_load_implver(cpu_ir
[rc
]);
1185 gen_mskbl(ra
, rb
, rc
, islit
, lit
);
1189 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1193 gen_insbl(ra
, rb
, rc
, islit
, lit
);
1197 gen_mskwl(ra
, rb
, rc
, islit
, lit
);
1201 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1205 gen_inswl(ra
, rb
, rc
, islit
, lit
);
1209 gen_mskll(ra
, rb
, rc
, islit
, lit
);
1213 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1217 gen_insll(ra
, rb
, rc
, islit
, lit
);
1221 gen_zap(ra
, rb
, rc
, islit
, lit
);
1225 gen_zapnot(ra
, rb
, rc
, islit
, lit
);
1229 gen_mskql(ra
, rb
, rc
, islit
, lit
);
1233 if (likely(rc
!= 31)) {
1236 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1238 TCGv shift
= tcg_temp_new();
1239 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1240 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1241 tcg_temp_free(shift
);
1244 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1249 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1253 if (likely(rc
!= 31)) {
1256 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1258 TCGv shift
= tcg_temp_new();
1259 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1260 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1261 tcg_temp_free(shift
);
1264 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1269 gen_insql(ra
, rb
, rc
, islit
, lit
);
1273 if (likely(rc
!= 31)) {
1276 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1278 TCGv shift
= tcg_temp_new();
1279 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1280 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1281 tcg_temp_free(shift
);
1284 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1289 gen_mskwh(ra
, rb
, rc
, islit
, lit
);
1293 gen_inswh(ra
, rb
, rc
, islit
, lit
);
1297 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1301 gen_msklh(ra
, rb
, rc
, islit
, lit
);
1305 gen_inslh(ra
, rb
, rc
, islit
, lit
);
1309 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1313 gen_mskqh(ra
, rb
, rc
, islit
, lit
);
1317 gen_insqh(ra
, rb
, rc
, islit
, lit
);
1321 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1331 if (likely(rc
!= 31)) {
1333 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1336 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1338 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1339 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1345 if (likely(rc
!= 31)) {
1347 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1349 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1351 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1356 gen_umulh(ra
, rb
, rc
, islit
, lit
);
1360 gen_mullv(ra
, rb
, rc
, islit
, lit
);
1364 gen_mulqv(ra
, rb
, rc
, islit
, lit
);
1371 switch (fpfn
) { /* f11 & 0x3F */
1374 if (!(ctx
->amask
& AMASK_FIX
))
1376 if (likely(rc
!= 31)) {
1378 TCGv_i32 tmp
= tcg_temp_new_i32();
1379 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1380 gen_helper_memory_to_s(cpu_fir
[rc
], tmp
);
1381 tcg_temp_free_i32(tmp
);
1383 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1388 if (!(ctx
->amask
& AMASK_FIX
))
1394 if (!(ctx
->amask
& AMASK_FIX
))
1400 if (!(ctx
->amask
& AMASK_FIX
))
1402 if (likely(rc
!= 31)) {
1404 TCGv_i32 tmp
= tcg_temp_new_i32();
1405 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1406 gen_helper_memory_to_f(cpu_fir
[rc
], tmp
);
1407 tcg_temp_free_i32(tmp
);
1409 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1414 if (!(ctx
->amask
& AMASK_FIX
))
1416 if (likely(rc
!= 31)) {
1418 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_ir
[ra
]);
1420 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1425 if (!(ctx
->amask
& AMASK_FIX
))
1431 if (!(ctx
->amask
& AMASK_FIX
))
1440 /* VAX floating point */
1441 /* XXX: rounding mode and trap are ignored (!) */
1442 switch (fpfn
) { /* f11 & 0x3F */
1445 gen_faddf(ra
, rb
, rc
);
1449 gen_fsubf(ra
, rb
, rc
);
1453 gen_fmulf(ra
, rb
, rc
);
1457 gen_fdivf(ra
, rb
, rc
);
1469 gen_faddg(ra
, rb
, rc
);
1473 gen_fsubg(ra
, rb
, rc
);
1477 gen_fmulg(ra
, rb
, rc
);
1481 gen_fdivg(ra
, rb
, rc
);
1485 gen_fcmpgeq(ra
, rb
, rc
);
1489 gen_fcmpglt(ra
, rb
, rc
);
1493 gen_fcmpgle(ra
, rb
, rc
);
1524 /* IEEE floating-point */
1525 /* XXX: rounding mode and traps are ignored (!) */
1526 switch (fpfn
) { /* f11 & 0x3F */
1529 gen_fadds(ra
, rb
, rc
);
1533 gen_fsubs(ra
, rb
, rc
);
1537 gen_fmuls(ra
, rb
, rc
);
1541 gen_fdivs(ra
, rb
, rc
);
1545 gen_faddt(ra
, rb
, rc
);
1549 gen_fsubt(ra
, rb
, rc
);
1553 gen_fmult(ra
, rb
, rc
);
1557 gen_fdivt(ra
, rb
, rc
);
1561 gen_fcmptun(ra
, rb
, rc
);
1565 gen_fcmpteq(ra
, rb
, rc
);
1569 gen_fcmptlt(ra
, rb
, rc
);
1573 gen_fcmptle(ra
, rb
, rc
);
1576 /* XXX: incorrect */
1577 if (fn11
== 0x2AC || fn11
== 0x6AC) {
1608 if (likely(rc
!= 31)) {
1611 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
1614 gen_fcpys(ra
, rb
, rc
);
1619 gen_fcpysn(ra
, rb
, rc
);
1623 gen_fcpyse(ra
, rb
, rc
);
1627 if (likely(ra
!= 31))
1628 gen_helper_store_fpcr(cpu_fir
[ra
]);
1630 TCGv tmp
= tcg_const_i64(0);
1631 gen_helper_store_fpcr(tmp
);
1637 if (likely(ra
!= 31))
1638 gen_helper_load_fpcr(cpu_fir
[ra
]);
1642 gen_fcmpfeq(ra
, rb
, rc
);
1646 gen_fcmpfne(ra
, rb
, rc
);
1650 gen_fcmpflt(ra
, rb
, rc
);
1654 gen_fcmpfge(ra
, rb
, rc
);
1658 gen_fcmpfle(ra
, rb
, rc
);
1662 gen_fcmpfgt(ra
, rb
, rc
);
1670 gen_fcvtqlv(rb
, rc
);
1674 gen_fcvtqlsv(rb
, rc
);
1681 switch ((uint16_t)disp16
) {
1684 /* No-op. Just exit from the current tb */
1689 /* No-op. Just exit from the current tb */
1711 gen_helper_load_pcc(cpu_ir
[ra
]);
1716 gen_helper_rc(cpu_ir
[ra
]);
1720 /* XXX: TODO: evict tb cache at address rb */
1730 gen_helper_rs(cpu_ir
[ra
]);
1741 /* HW_MFPR (PALcode) */
1742 #if defined (CONFIG_USER_ONLY)
1748 TCGv tmp
= tcg_const_i32(insn
& 0xFF);
1749 gen_helper_mfpr(cpu_ir
[ra
], tmp
, cpu_ir
[ra
]);
1756 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1758 tcg_gen_movi_i64(cpu_pc
, 0);
1760 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1761 /* Those four jumps only differ by the branch prediction hint */
1779 /* HW_LD (PALcode) */
1780 #if defined (CONFIG_USER_ONLY)
1786 TCGv addr
= tcg_temp_new();
1788 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1790 tcg_gen_movi_i64(addr
, disp12
);
1791 switch ((insn
>> 12) & 0xF) {
1793 /* Longword physical access */
1794 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1797 /* Quadword physical access */
1798 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1801 /* Longword physical access with lock */
1802 gen_helper_ldl_l_raw(cpu_ir
[ra
], addr
);
1805 /* Quadword physical access with lock */
1806 gen_helper_ldq_l_raw(cpu_ir
[ra
], addr
);
1809 /* Longword virtual PTE fetch */
1810 gen_helper_ldl_kernel(cpu_ir
[ra
], addr
);
1813 /* Quadword virtual PTE fetch */
1814 gen_helper_ldq_kernel(cpu_ir
[ra
], addr
);
1817 /* Incpu_ir[ra]id */
1818 goto incpu_ir
[ra
]id_opc
;
1820 /* Incpu_ir[ra]id */
1821 goto incpu_ir
[ra
]id_opc
;
1823 /* Longword virtual access */
1824 gen_helper_st_virt_to_phys(addr
, addr
);
1825 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1828 /* Quadword virtual access */
1829 gen_helper_st_virt_to_phys(addr
, addr
);
1830 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1833 /* Longword virtual access with protection check */
1834 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, ctx
->flags
);
1837 /* Quadword virtual access with protection check */
1838 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, ctx
->flags
);
1841 /* Longword virtual access with altenate access mode */
1842 gen_helper_set_alt_mode();
1843 gen_helper_st_virt_to_phys(addr
, addr
);
1844 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1845 gen_helper_restore_mode();
1848 /* Quadword virtual access with altenate access mode */
1849 gen_helper_set_alt_mode();
1850 gen_helper_st_virt_to_phys(addr
, addr
);
1851 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1852 gen_helper_restore_mode();
1855 /* Longword virtual access with alternate access mode and
1858 gen_helper_set_alt_mode();
1859 gen_helper_ldl_data(cpu_ir
[ra
], addr
);
1860 gen_helper_restore_mode();
1863 /* Quadword virtual access with alternate access mode and
1866 gen_helper_set_alt_mode();
1867 gen_helper_ldq_data(cpu_ir
[ra
], addr
);
1868 gen_helper_restore_mode();
1871 tcg_temp_free(addr
);
1879 if (!(ctx
->amask
& AMASK_BWX
))
1881 if (likely(rc
!= 31)) {
1883 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1885 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1890 if (!(ctx
->amask
& AMASK_BWX
))
1892 if (likely(rc
!= 31)) {
1894 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1896 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1901 if (!(ctx
->amask
& AMASK_CIX
))
1903 if (likely(rc
!= 31)) {
1905 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1907 gen_helper_ctpop(cpu_ir
[rc
], cpu_ir
[rb
]);
1912 if (!(ctx
->amask
& AMASK_MVI
))
1919 if (!(ctx
->amask
& AMASK_CIX
))
1921 if (likely(rc
!= 31)) {
1923 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1925 gen_helper_ctlz(cpu_ir
[rc
], cpu_ir
[rb
]);
1930 if (!(ctx
->amask
& AMASK_CIX
))
1932 if (likely(rc
!= 31)) {
1934 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1936 gen_helper_cttz(cpu_ir
[rc
], cpu_ir
[rb
]);
1941 if (!(ctx
->amask
& AMASK_MVI
))
1948 if (!(ctx
->amask
& AMASK_MVI
))
1955 if (!(ctx
->amask
& AMASK_MVI
))
1962 if (!(ctx
->amask
& AMASK_MVI
))
1969 if (!(ctx
->amask
& AMASK_MVI
))
1976 if (!(ctx
->amask
& AMASK_MVI
))
1983 if (!(ctx
->amask
& AMASK_MVI
))
1990 if (!(ctx
->amask
& AMASK_MVI
))
1997 if (!(ctx
->amask
& AMASK_MVI
))
2004 if (!(ctx
->amask
& AMASK_MVI
))
2011 if (!(ctx
->amask
& AMASK_MVI
))
2018 if (!(ctx
->amask
& AMASK_MVI
))
2025 if (!(ctx
->amask
& AMASK_FIX
))
2027 if (likely(rc
!= 31)) {
2029 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_fir
[ra
]);
2031 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
2036 if (!(ctx
->amask
& AMASK_FIX
))
2039 TCGv_i32 tmp1
= tcg_temp_new_i32();
2041 gen_helper_s_to_memory(tmp1
, cpu_fir
[ra
]);
2043 TCGv tmp2
= tcg_const_i64(0);
2044 gen_helper_s_to_memory(tmp1
, tmp2
);
2045 tcg_temp_free(tmp2
);
2047 tcg_gen_ext_i32_i64(cpu_ir
[rc
], tmp1
);
2048 tcg_temp_free_i32(tmp1
);
2056 /* HW_MTPR (PALcode) */
2057 #if defined (CONFIG_USER_ONLY)
2063 TCGv tmp1
= tcg_const_i32(insn
& 0xFF);
2065 gen_helper_mtpr(tmp1
, cpu_ir
[ra
]);
2067 TCGv tmp2
= tcg_const_i64(0);
2068 gen_helper_mtpr(tmp1
, tmp2
);
2069 tcg_temp_free(tmp2
);
2071 tcg_temp_free(tmp1
);
2077 /* HW_REI (PALcode) */
2078 #if defined (CONFIG_USER_ONLY)
2085 gen_helper_hw_rei();
2090 tmp
= tcg_temp_new();
2091 tcg_gen_addi_i64(tmp
, cpu_ir
[rb
], (((int64_t)insn
<< 51) >> 51));
2093 tmp
= tcg_const_i64(((int64_t)insn
<< 51) >> 51);
2094 gen_helper_hw_ret(tmp
);
2101 /* HW_ST (PALcode) */
2102 #if defined (CONFIG_USER_ONLY)
2109 addr
= tcg_temp_new();
2111 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
2113 tcg_gen_movi_i64(addr
, disp12
);
2117 val
= tcg_temp_new();
2118 tcg_gen_movi_i64(val
, 0);
2120 switch ((insn
>> 12) & 0xF) {
2122 /* Longword physical access */
2123 gen_helper_stl_raw(val
, addr
);
2126 /* Quadword physical access */
2127 gen_helper_stq_raw(val
, addr
);
2130 /* Longword physical access with lock */
2131 gen_helper_stl_c_raw(val
, val
, addr
);
2134 /* Quadword physical access with lock */
2135 gen_helper_stq_c_raw(val
, val
, addr
);
2138 /* Longword virtual access */
2139 gen_helper_st_virt_to_phys(addr
, addr
);
2140 gen_helper_stl_raw(val
, addr
);
2143 /* Quadword virtual access */
2144 gen_helper_st_virt_to_phys(addr
, addr
);
2145 gen_helper_stq_raw(val
, addr
);
2166 /* Longword virtual access with alternate access mode */
2167 gen_helper_set_alt_mode();
2168 gen_helper_st_virt_to_phys(addr
, addr
);
2169 gen_helper_stl_raw(val
, addr
);
2170 gen_helper_restore_mode();
2173 /* Quadword virtual access with alternate access mode */
2174 gen_helper_set_alt_mode();
2175 gen_helper_st_virt_to_phys(addr
, addr
);
2176 gen_helper_stl_raw(val
, addr
);
2177 gen_helper_restore_mode();
2188 tcg_temp_free(addr
);
2195 gen_load_mem(ctx
, &gen_qemu_ldf
, ra
, rb
, disp16
, 1, 0);
2199 gen_load_mem(ctx
, &gen_qemu_ldg
, ra
, rb
, disp16
, 1, 0);
2203 gen_load_mem(ctx
, &gen_qemu_lds
, ra
, rb
, disp16
, 1, 0);
2207 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1, 0);
2211 gen_store_mem(ctx
, &gen_qemu_stf
, ra
, rb
, disp16
, 1, 0, 0);
2215 gen_store_mem(ctx
, &gen_qemu_stg
, ra
, rb
, disp16
, 1, 0, 0);
2219 gen_store_mem(ctx
, &gen_qemu_sts
, ra
, rb
, disp16
, 1, 0, 0);
2223 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1, 0, 0);
2227 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0, 0);
2231 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 0);
2235 gen_load_mem(ctx
, &gen_qemu_ldl_l
, ra
, rb
, disp16
, 0, 0);
2239 gen_load_mem(ctx
, &gen_qemu_ldq_l
, ra
, rb
, disp16
, 0, 0);
2243 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0, 0, 0);
2247 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 0, 0);
2251 gen_store_mem(ctx
, &gen_qemu_stl_c
, ra
, rb
, disp16
, 0, 0, 1);
2255 gen_store_mem(ctx
, &gen_qemu_stq_c
, ra
, rb
, disp16
, 0, 0, 1);
2260 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2261 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2264 case 0x31: /* FBEQ */
2265 case 0x32: /* FBLT */
2266 case 0x33: /* FBLE */
2267 gen_fbcond(ctx
, opc
, ra
, disp16
);
2273 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2274 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2277 case 0x35: /* FBNE */
2278 case 0x36: /* FBGE */
2279 case 0x37: /* FBGT */
2280 gen_fbcond(ctx
, opc
, ra
, disp16
);
2285 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2290 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2295 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2300 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2305 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2310 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2315 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2320 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2332 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2333 TranslationBlock
*tb
,
2336 #if defined ALPHA_DEBUG_DISAS
2337 static int insn_count
;
2339 DisasContext ctx
, *ctxp
= &ctx
;
2340 target_ulong pc_start
;
2342 uint16_t *gen_opc_end
;
2350 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2352 ctx
.amask
= env
->amask
;
2353 #if defined (CONFIG_USER_ONLY)
2356 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2357 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2360 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2362 max_insns
= CF_COUNT_MASK
;
2365 for (ret
= 0; ret
== 0;) {
2366 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
2367 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
2368 if (bp
->pc
== ctx
.pc
) {
2369 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2375 j
= gen_opc_ptr
- gen_opc_buf
;
2379 gen_opc_instr_start
[lj
++] = 0;
2380 gen_opc_pc
[lj
] = ctx
.pc
;
2381 gen_opc_instr_start
[lj
] = 1;
2382 gen_opc_icount
[lj
] = num_insns
;
2385 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2387 #if defined ALPHA_DEBUG_DISAS
2389 if (logfile
!= NULL
) {
2390 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2391 ctx
.pc
, ctx
.mem_idx
);
2394 insn
= ldl_code(ctx
.pc
);
2395 #if defined ALPHA_DEBUG_DISAS
2397 if (logfile
!= NULL
) {
2398 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2403 ret
= translate_one(ctxp
, insn
);
2406 /* if we reach a page boundary or are single stepping, stop
2409 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2410 num_insns
>= max_insns
) {
2414 if (env
->singlestep_enabled
) {
2415 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2419 #if defined (DO_SINGLE_STEP)
2423 if (ret
!= 1 && ret
!= 3) {
2424 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2426 #if defined (DO_TB_FLUSH)
2427 gen_helper_tb_flush();
2429 if (tb
->cflags
& CF_LAST_IO
)
2431 /* Generate the return instruction */
2433 gen_icount_end(tb
, num_insns
);
2434 *gen_opc_ptr
= INDEX_op_end
;
2436 j
= gen_opc_ptr
- gen_opc_buf
;
2439 gen_opc_instr_start
[lj
++] = 0;
2441 tb
->size
= ctx
.pc
- pc_start
;
2442 tb
->icount
= num_insns
;
2444 #if defined ALPHA_DEBUG_DISAS
2445 if (loglevel
& CPU_LOG_TB_CPU
) {
2446 cpu_dump_state(env
, logfile
, fprintf
, 0);
2448 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2449 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2450 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2451 fprintf(logfile
, "\n");
2456 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2458 gen_intermediate_code_internal(env
, tb
, 0);
2461 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2463 gen_intermediate_code_internal(env
, tb
, 1);
2466 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2471 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2475 alpha_translate_init();
2477 /* XXX: should not be hardcoded */
2478 env
->implver
= IMPLVER_2106x
;
2480 #if defined (CONFIG_USER_ONLY)
2484 /* Initialize IPR */
2485 hwpcb
= env
->ipr
[IPR_PCBB
];
2486 env
->ipr
[IPR_ASN
] = 0;
2487 env
->ipr
[IPR_ASTEN
] = 0;
2488 env
->ipr
[IPR_ASTSR
] = 0;
2489 env
->ipr
[IPR_DATFX
] = 0;
2491 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495 env
->ipr
[IPR_FEN
] = 0;
2496 env
->ipr
[IPR_IPL
] = 31;
2497 env
->ipr
[IPR_MCES
] = 0;
2498 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2499 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500 env
->ipr
[IPR_SISR
] = 0;
2501 env
->ipr
[IPR_VIRBND
] = -1ULL;
2506 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2507 unsigned long searched_pc
, int pc_pos
, void *puc
)
2509 env
->pc
= gen_opc_pc
[pc_pos
];