2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
28 #include "host-utils.h"
30 #include "qemu-common.h"
36 /* #define DO_SINGLE_STEP */
37 #define ALPHA_DEBUG_DISAS
38 /* #define DO_TB_FLUSH */
41 #ifdef ALPHA_DEBUG_DISAS
42 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
44 # define LOG_DISAS(...) do { } while (0)
47 typedef struct DisasContext DisasContext
;
51 #if !defined (CONFIG_USER_ONLY)
58 /* global register indexes */
59 static TCGv_ptr cpu_env
;
60 static TCGv cpu_ir
[31];
61 static TCGv cpu_fir
[31];
66 static char cpu_reg_names
[10*4+21*5 + 10*5+21*6];
68 #include "gen-icount.h"
70 static void alpha_translate_init(void)
74 static int done_init
= 0;
79 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
82 for (i
= 0; i
< 31; i
++) {
83 sprintf(p
, "ir%d", i
);
84 cpu_ir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
85 offsetof(CPUState
, ir
[i
]), p
);
86 p
+= (i
< 10) ? 4 : 5;
88 sprintf(p
, "fir%d", i
);
89 cpu_fir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
90 offsetof(CPUState
, fir
[i
]), p
);
91 p
+= (i
< 10) ? 5 : 6;
94 cpu_pc
= tcg_global_mem_new_i64(TCG_AREG0
,
95 offsetof(CPUState
, pc
), "pc");
97 cpu_lock
= tcg_global_mem_new_i64(TCG_AREG0
,
98 offsetof(CPUState
, lock
), "lock");
100 /* register helpers */
107 static always_inline
void gen_excp (DisasContext
*ctx
,
108 int exception
, int error_code
)
112 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
113 tmp1
= tcg_const_i32(exception
);
114 tmp2
= tcg_const_i32(error_code
);
115 gen_helper_excp(tmp1
, tmp2
);
116 tcg_temp_free_i32(tmp2
);
117 tcg_temp_free_i32(tmp1
);
120 static always_inline
void gen_invalid (DisasContext
*ctx
)
122 gen_excp(ctx
, EXCP_OPCDEC
, 0);
125 static always_inline
void gen_qemu_ldf (TCGv t0
, TCGv t1
, int flags
)
127 TCGv tmp
= tcg_temp_new();
128 TCGv_i32 tmp32
= tcg_temp_new_i32();
129 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
130 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
131 gen_helper_memory_to_f(t0
, tmp32
);
132 tcg_temp_free_i32(tmp32
);
136 static always_inline
void gen_qemu_ldg (TCGv t0
, TCGv t1
, int flags
)
138 TCGv tmp
= tcg_temp_new();
139 tcg_gen_qemu_ld64(tmp
, t1
, flags
);
140 gen_helper_memory_to_g(t0
, tmp
);
144 static always_inline
void gen_qemu_lds (TCGv t0
, TCGv t1
, int flags
)
146 TCGv tmp
= tcg_temp_new();
147 TCGv_i32 tmp32
= tcg_temp_new_i32();
148 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
149 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
150 gen_helper_memory_to_s(t0
, tmp32
);
151 tcg_temp_free_i32(tmp32
);
155 static always_inline
void gen_qemu_ldl_l (TCGv t0
, TCGv t1
, int flags
)
157 tcg_gen_mov_i64(cpu_lock
, t1
);
158 tcg_gen_qemu_ld32s(t0
, t1
, flags
);
161 static always_inline
void gen_qemu_ldq_l (TCGv t0
, TCGv t1
, int flags
)
163 tcg_gen_mov_i64(cpu_lock
, t1
);
164 tcg_gen_qemu_ld64(t0
, t1
, flags
);
167 static always_inline
void gen_load_mem (DisasContext
*ctx
,
168 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
169 int ra
, int rb
, int32_t disp16
,
174 if (unlikely(ra
== 31))
177 addr
= tcg_temp_new();
179 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
181 tcg_gen_andi_i64(addr
, addr
, ~0x7);
185 tcg_gen_movi_i64(addr
, disp16
);
188 tcg_gen_qemu_load(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
190 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
194 static always_inline
void gen_qemu_stf (TCGv t0
, TCGv t1
, int flags
)
196 TCGv_i32 tmp32
= tcg_temp_new_i32();
197 TCGv tmp
= tcg_temp_new();
198 gen_helper_f_to_memory(tmp32
, t0
);
199 tcg_gen_extu_i32_i64(tmp
, tmp32
);
200 tcg_gen_qemu_st32(tmp
, t1
, flags
);
202 tcg_temp_free_i32(tmp32
);
205 static always_inline
void gen_qemu_stg (TCGv t0
, TCGv t1
, int flags
)
207 TCGv tmp
= tcg_temp_new();
208 gen_helper_g_to_memory(tmp
, t0
);
209 tcg_gen_qemu_st64(tmp
, t1
, flags
);
213 static always_inline
void gen_qemu_sts (TCGv t0
, TCGv t1
, int flags
)
215 TCGv_i32 tmp32
= tcg_temp_new_i32();
216 TCGv tmp
= tcg_temp_new();
217 gen_helper_s_to_memory(tmp32
, t0
);
218 tcg_gen_extu_i32_i64(tmp
, tmp32
);
219 tcg_gen_qemu_st32(tmp
, t1
, flags
);
221 tcg_temp_free_i32(tmp32
);
224 static always_inline
void gen_qemu_stl_c (TCGv t0
, TCGv t1
, int flags
)
228 l1
= gen_new_label();
229 l2
= gen_new_label();
230 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
231 tcg_gen_qemu_st32(t0
, t1
, flags
);
232 tcg_gen_movi_i64(t0
, 1);
235 tcg_gen_movi_i64(t0
, 0);
237 tcg_gen_movi_i64(cpu_lock
, -1);
240 static always_inline
void gen_qemu_stq_c (TCGv t0
, TCGv t1
, int flags
)
244 l1
= gen_new_label();
245 l2
= gen_new_label();
246 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
247 tcg_gen_qemu_st64(t0
, t1
, flags
);
248 tcg_gen_movi_i64(t0
, 1);
251 tcg_gen_movi_i64(t0
, 0);
253 tcg_gen_movi_i64(cpu_lock
, -1);
256 static always_inline
void gen_store_mem (DisasContext
*ctx
,
257 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
258 int ra
, int rb
, int32_t disp16
,
259 int fp
, int clear
, int local
)
263 addr
= tcg_temp_local_new();
265 addr
= tcg_temp_new();
267 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
269 tcg_gen_andi_i64(addr
, addr
, ~0x7);
273 tcg_gen_movi_i64(addr
, disp16
);
277 tcg_gen_qemu_store(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
279 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
283 zero
= tcg_const_local_i64(0);
285 zero
= tcg_const_i64(0);
286 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
292 static always_inline
void gen_bcond (DisasContext
*ctx
,
294 int ra
, int32_t disp
, int mask
)
298 l1
= gen_new_label();
299 l2
= gen_new_label();
300 if (likely(ra
!= 31)) {
302 TCGv tmp
= tcg_temp_new();
303 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
304 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
307 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
309 /* Very uncommon case - Do not bother to optimize. */
310 TCGv tmp
= tcg_const_i64(0);
311 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
314 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
317 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp
<< 2));
321 static always_inline
void gen_fbcond (DisasContext
*ctx
, int opc
,
322 int ra
, int32_t disp16
)
328 l1
= gen_new_label();
329 l2
= gen_new_label();
331 tmp
= tcg_temp_new();
334 tmp
= tcg_const_i64(0);
338 case 0x31: /* FBEQ */
339 gen_helper_cmpfeq(tmp
, src
);
341 case 0x32: /* FBLT */
342 gen_helper_cmpflt(tmp
, src
);
344 case 0x33: /* FBLE */
345 gen_helper_cmpfle(tmp
, src
);
347 case 0x35: /* FBNE */
348 gen_helper_cmpfne(tmp
, src
);
350 case 0x36: /* FBGE */
351 gen_helper_cmpfge(tmp
, src
);
353 case 0x37: /* FBGT */
354 gen_helper_cmpfgt(tmp
, src
);
359 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp
, 0, l1
);
360 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
363 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
367 static always_inline
void gen_cmov (TCGCond inv_cond
,
368 int ra
, int rb
, int rc
,
369 int islit
, uint8_t lit
, int mask
)
373 if (unlikely(rc
== 31))
376 l1
= gen_new_label();
380 TCGv tmp
= tcg_temp_new();
381 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
382 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
385 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
387 /* Very uncommon case - Do not bother to optimize. */
388 TCGv tmp
= tcg_const_i64(0);
389 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
394 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
396 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
400 #define FARITH2(name) \
401 static always_inline void glue(gen_f, name)(int rb, int rc) \
403 if (unlikely(rc == 31)) \
407 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
409 TCGv tmp = tcg_const_i64(0); \
410 gen_helper_ ## name (cpu_fir[rc], tmp); \
411 tcg_temp_free(tmp); \
432 #define FARITH3(name) \
433 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
435 if (unlikely(rc == 31)) \
440 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
442 TCGv tmp = tcg_const_i64(0); \
443 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
444 tcg_temp_free(tmp); \
447 TCGv tmp = tcg_const_i64(0); \
449 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
451 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
452 tcg_temp_free(tmp); \
483 #define FCMOV(name) \
484 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
489 if (unlikely(rc == 31)) \
492 l1 = gen_new_label(); \
493 tmp = tcg_temp_new(); \
495 tmp = tcg_temp_new(); \
496 gen_helper_ ## name (tmp, cpu_fir[ra]); \
498 tmp = tcg_const_i64(0); \
499 gen_helper_ ## name (tmp, tmp); \
501 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
503 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
505 tcg_gen_movi_i64(cpu_fir[rc], 0); \
515 /* EXTWH, EXTWH, EXTLH, EXTQH */
516 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
517 int ra
, int rb
, int rc
,
518 int islit
, uint8_t lit
)
520 if (unlikely(rc
== 31))
526 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
528 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
531 tmp1
= tcg_temp_new();
532 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
533 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
534 tmp2
= tcg_const_i64(64);
535 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
537 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
541 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
543 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
546 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
547 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
548 int ra
, int rb
, int rc
,
549 int islit
, uint8_t lit
)
551 if (unlikely(rc
== 31))
556 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
558 TCGv tmp
= tcg_temp_new();
559 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
560 tcg_gen_shli_i64(tmp
, tmp
, 3);
561 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
565 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
567 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
570 /* Code to call arith3 helpers */
571 #define ARITH3(name) \
572 static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
573 int islit, uint8_t lit) \
575 if (unlikely(rc == 31)) \
580 TCGv tmp = tcg_const_i64(lit); \
581 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
582 tcg_temp_free(tmp); \
584 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
586 TCGv tmp1 = tcg_const_i64(0); \
588 TCGv tmp2 = tcg_const_i64(lit); \
589 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
590 tcg_temp_free(tmp2); \
592 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
593 tcg_temp_free(tmp1); \
621 static always_inline
void gen_cmp(TCGCond cond
,
622 int ra
, int rb
, int rc
,
623 int islit
, uint8_t lit
)
628 if (unlikely(rc
== 31))
631 l1
= gen_new_label();
632 l2
= gen_new_label();
635 tmp
= tcg_temp_new();
636 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
638 tmp
= tcg_const_i64(0);
640 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
642 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
644 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
647 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
651 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
654 int32_t disp21
, disp16
, disp12
;
656 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
660 /* Decode all instruction fields */
662 ra
= (insn
>> 21) & 0x1F;
663 rb
= (insn
>> 16) & 0x1F;
665 sbz
= (insn
>> 13) & 0x07;
666 islit
= (insn
>> 12) & 1;
667 if (rb
== 31 && !islit
) {
671 lit
= (insn
>> 13) & 0xFF;
672 palcode
= insn
& 0x03FFFFFF;
673 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
674 disp16
= (int16_t)(insn
& 0x0000FFFF);
675 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
676 fn16
= insn
& 0x0000FFFF;
677 fn11
= (insn
>> 5) & 0x000007FF;
679 fn7
= (insn
>> 5) & 0x0000007F;
680 fn2
= (insn
>> 5) & 0x00000003;
682 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
683 opc
, ra
, rb
, rc
, disp16
);
687 if (palcode
>= 0x80 && palcode
< 0xC0) {
688 /* Unprivileged PAL call */
689 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x3F) << 6), 0);
690 #if !defined (CONFIG_USER_ONLY)
691 } else if (palcode
< 0x40) {
692 /* Privileged PAL code */
693 if (ctx
->mem_idx
& 1)
696 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x3F) << 6), 0);
699 /* Invalid PAL call */
727 if (likely(ra
!= 31)) {
729 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
731 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
736 if (likely(ra
!= 31)) {
738 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
740 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
745 if (!(ctx
->amask
& AMASK_BWX
))
747 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0, 0);
751 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 1);
755 if (!(ctx
->amask
& AMASK_BWX
))
757 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 0, 0);
761 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0, 0, 0);
765 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0, 0, 0);
769 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 1, 0);
775 if (likely(rc
!= 31)) {
778 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
779 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
781 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
782 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
786 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
788 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
794 if (likely(rc
!= 31)) {
796 TCGv tmp
= tcg_temp_new();
797 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
799 tcg_gen_addi_i64(tmp
, tmp
, lit
);
801 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
802 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
806 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
808 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
814 if (likely(rc
!= 31)) {
817 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
819 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
820 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
823 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
825 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
826 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
832 if (likely(rc
!= 31)) {
834 TCGv tmp
= tcg_temp_new();
835 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
837 tcg_gen_subi_i64(tmp
, tmp
, lit
);
839 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
840 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
844 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
846 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
847 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
854 gen_cmpbge(ra
, rb
, rc
, islit
, lit
);
858 if (likely(rc
!= 31)) {
860 TCGv tmp
= tcg_temp_new();
861 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
863 tcg_gen_addi_i64(tmp
, tmp
, lit
);
865 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
866 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
870 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
872 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
878 if (likely(rc
!= 31)) {
880 TCGv tmp
= tcg_temp_new();
881 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
883 tcg_gen_subi_i64(tmp
, tmp
, lit
);
885 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
886 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
890 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
892 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
893 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
900 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
904 if (likely(rc
!= 31)) {
907 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
909 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
912 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
914 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
920 if (likely(rc
!= 31)) {
922 TCGv tmp
= tcg_temp_new();
923 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
925 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
927 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
931 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
933 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
939 if (likely(rc
!= 31)) {
942 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
944 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
947 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
949 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
955 if (likely(rc
!= 31)) {
957 TCGv tmp
= tcg_temp_new();
958 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
960 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
962 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
966 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
968 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
974 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
978 if (likely(rc
!= 31)) {
980 TCGv tmp
= tcg_temp_new();
981 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
983 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
985 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
989 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
991 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
997 if (likely(rc
!= 31)) {
999 TCGv tmp
= tcg_temp_new();
1000 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
1002 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
1004 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
1008 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
1010 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1016 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
1020 gen_addlv(ra
, rb
, rc
, islit
, lit
);
1024 gen_sublv(ra
, rb
, rc
, islit
, lit
);
1028 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
1032 gen_addqv(ra
, rb
, rc
, islit
, lit
);
1036 gen_subqv(ra
, rb
, rc
, islit
, lit
);
1040 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
1050 if (likely(rc
!= 31)) {
1052 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1054 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1056 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1061 if (likely(rc
!= 31)) {
1064 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1066 tcg_gen_andc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1068 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1073 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1077 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1081 if (likely(rc
!= 31)) {
1084 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1086 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1089 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1091 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1097 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1101 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1105 if (likely(rc
!= 31)) {
1108 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1110 tcg_gen_orc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1113 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1115 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1121 if (likely(rc
!= 31)) {
1124 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1126 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1129 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1131 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1137 gen_cmov(TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1141 gen_cmov(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1145 if (likely(rc
!= 31)) {
1148 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1150 tcg_gen_eqv_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1153 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1155 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1161 if (likely(rc
!= 31)) {
1163 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1165 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1166 switch (ctx
->env
->implver
) {
1168 /* EV4, EV45, LCA, LCA45 & EV5 */
1173 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[rc
],
1174 ~(uint64_t)ctx
->amask
);
1181 gen_cmov(TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1185 gen_cmov(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1190 tcg_gen_movi_i64(cpu_ir
[rc
], ctx
->env
->implver
);
1200 gen_mskbl(ra
, rb
, rc
, islit
, lit
);
1204 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1208 gen_insbl(ra
, rb
, rc
, islit
, lit
);
1212 gen_mskwl(ra
, rb
, rc
, islit
, lit
);
1216 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1220 gen_inswl(ra
, rb
, rc
, islit
, lit
);
1224 gen_mskll(ra
, rb
, rc
, islit
, lit
);
1228 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1232 gen_insll(ra
, rb
, rc
, islit
, lit
);
1236 gen_zap(ra
, rb
, rc
, islit
, lit
);
1240 gen_zapnot(ra
, rb
, rc
, islit
, lit
);
1244 gen_mskql(ra
, rb
, rc
, islit
, lit
);
1248 if (likely(rc
!= 31)) {
1251 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1253 TCGv shift
= tcg_temp_new();
1254 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1255 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1256 tcg_temp_free(shift
);
1259 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1264 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1268 if (likely(rc
!= 31)) {
1271 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1273 TCGv shift
= tcg_temp_new();
1274 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1275 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1276 tcg_temp_free(shift
);
1279 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1284 gen_insql(ra
, rb
, rc
, islit
, lit
);
1288 if (likely(rc
!= 31)) {
1291 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1293 TCGv shift
= tcg_temp_new();
1294 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1295 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1296 tcg_temp_free(shift
);
1299 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1304 gen_mskwh(ra
, rb
, rc
, islit
, lit
);
1308 gen_inswh(ra
, rb
, rc
, islit
, lit
);
1312 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1316 gen_msklh(ra
, rb
, rc
, islit
, lit
);
1320 gen_inslh(ra
, rb
, rc
, islit
, lit
);
1324 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1328 gen_mskqh(ra
, rb
, rc
, islit
, lit
);
1332 gen_insqh(ra
, rb
, rc
, islit
, lit
);
1336 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1346 if (likely(rc
!= 31)) {
1348 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1351 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1353 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1354 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1360 if (likely(rc
!= 31)) {
1362 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1364 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1366 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1371 gen_umulh(ra
, rb
, rc
, islit
, lit
);
1375 gen_mullv(ra
, rb
, rc
, islit
, lit
);
1379 gen_mulqv(ra
, rb
, rc
, islit
, lit
);
1386 switch (fpfn
) { /* f11 & 0x3F */
1389 if (!(ctx
->amask
& AMASK_FIX
))
1391 if (likely(rc
!= 31)) {
1393 TCGv_i32 tmp
= tcg_temp_new_i32();
1394 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1395 gen_helper_memory_to_s(cpu_fir
[rc
], tmp
);
1396 tcg_temp_free_i32(tmp
);
1398 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1403 if (!(ctx
->amask
& AMASK_FIX
))
1409 if (!(ctx
->amask
& AMASK_FIX
))
1415 if (!(ctx
->amask
& AMASK_FIX
))
1417 if (likely(rc
!= 31)) {
1419 TCGv_i32 tmp
= tcg_temp_new_i32();
1420 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1421 gen_helper_memory_to_f(cpu_fir
[rc
], tmp
);
1422 tcg_temp_free_i32(tmp
);
1424 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1429 if (!(ctx
->amask
& AMASK_FIX
))
1431 if (likely(rc
!= 31)) {
1433 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_ir
[ra
]);
1435 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1440 if (!(ctx
->amask
& AMASK_FIX
))
1446 if (!(ctx
->amask
& AMASK_FIX
))
1455 /* VAX floating point */
1456 /* XXX: rounding mode and trap are ignored (!) */
1457 switch (fpfn
) { /* f11 & 0x3F */
1460 gen_faddf(ra
, rb
, rc
);
1464 gen_fsubf(ra
, rb
, rc
);
1468 gen_fmulf(ra
, rb
, rc
);
1472 gen_fdivf(ra
, rb
, rc
);
1484 gen_faddg(ra
, rb
, rc
);
1488 gen_fsubg(ra
, rb
, rc
);
1492 gen_fmulg(ra
, rb
, rc
);
1496 gen_fdivg(ra
, rb
, rc
);
1500 gen_fcmpgeq(ra
, rb
, rc
);
1504 gen_fcmpglt(ra
, rb
, rc
);
1508 gen_fcmpgle(ra
, rb
, rc
);
1539 /* IEEE floating-point */
1540 /* XXX: rounding mode and traps are ignored (!) */
1541 switch (fpfn
) { /* f11 & 0x3F */
1544 gen_fadds(ra
, rb
, rc
);
1548 gen_fsubs(ra
, rb
, rc
);
1552 gen_fmuls(ra
, rb
, rc
);
1556 gen_fdivs(ra
, rb
, rc
);
1560 gen_faddt(ra
, rb
, rc
);
1564 gen_fsubt(ra
, rb
, rc
);
1568 gen_fmult(ra
, rb
, rc
);
1572 gen_fdivt(ra
, rb
, rc
);
1576 gen_fcmptun(ra
, rb
, rc
);
1580 gen_fcmpteq(ra
, rb
, rc
);
1584 gen_fcmptlt(ra
, rb
, rc
);
1588 gen_fcmptle(ra
, rb
, rc
);
1591 /* XXX: incorrect */
1592 if (fn11
== 0x2AC || fn11
== 0x6AC) {
1623 if (likely(rc
!= 31)) {
1626 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
1629 gen_fcpys(ra
, rb
, rc
);
1634 gen_fcpysn(ra
, rb
, rc
);
1638 gen_fcpyse(ra
, rb
, rc
);
1642 if (likely(ra
!= 31))
1643 gen_helper_store_fpcr(cpu_fir
[ra
]);
1645 TCGv tmp
= tcg_const_i64(0);
1646 gen_helper_store_fpcr(tmp
);
1652 if (likely(ra
!= 31))
1653 gen_helper_load_fpcr(cpu_fir
[ra
]);
1657 gen_fcmpfeq(ra
, rb
, rc
);
1661 gen_fcmpfne(ra
, rb
, rc
);
1665 gen_fcmpflt(ra
, rb
, rc
);
1669 gen_fcmpfge(ra
, rb
, rc
);
1673 gen_fcmpfle(ra
, rb
, rc
);
1677 gen_fcmpfgt(ra
, rb
, rc
);
1685 gen_fcvtqlv(rb
, rc
);
1689 gen_fcvtqlsv(rb
, rc
);
1696 switch ((uint16_t)disp16
) {
1699 /* No-op. Just exit from the current tb */
1704 /* No-op. Just exit from the current tb */
1726 gen_helper_load_pcc(cpu_ir
[ra
]);
1731 gen_helper_rc(cpu_ir
[ra
]);
1739 gen_helper_rs(cpu_ir
[ra
]);
1750 /* HW_MFPR (PALcode) */
1751 #if defined (CONFIG_USER_ONLY)
1757 TCGv tmp
= tcg_const_i32(insn
& 0xFF);
1758 gen_helper_mfpr(cpu_ir
[ra
], tmp
, cpu_ir
[ra
]);
1765 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1767 tcg_gen_movi_i64(cpu_pc
, 0);
1769 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1770 /* Those four jumps only differ by the branch prediction hint */
1788 /* HW_LD (PALcode) */
1789 #if defined (CONFIG_USER_ONLY)
1795 TCGv addr
= tcg_temp_new();
1797 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1799 tcg_gen_movi_i64(addr
, disp12
);
1800 switch ((insn
>> 12) & 0xF) {
1802 /* Longword physical access (hw_ldl/p) */
1803 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1806 /* Quadword physical access (hw_ldq/p) */
1807 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1810 /* Longword physical access with lock (hw_ldl_l/p) */
1811 gen_helper_ldl_l_raw(cpu_ir
[ra
], addr
);
1814 /* Quadword physical access with lock (hw_ldq_l/p) */
1815 gen_helper_ldq_l_raw(cpu_ir
[ra
], addr
);
1818 /* Longword virtual PTE fetch (hw_ldl/v) */
1819 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, 0);
1822 /* Quadword virtual PTE fetch (hw_ldq/v) */
1823 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, 0);
1826 /* Incpu_ir[ra]id */
1829 /* Incpu_ir[ra]id */
1832 /* Longword virtual access (hw_ldl) */
1833 gen_helper_st_virt_to_phys(addr
, addr
);
1834 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1837 /* Quadword virtual access (hw_ldq) */
1838 gen_helper_st_virt_to_phys(addr
, addr
);
1839 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1842 /* Longword virtual access with protection check (hw_ldl/w) */
1843 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, 0);
1846 /* Quadword virtual access with protection check (hw_ldq/w) */
1847 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, 0);
1850 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1851 gen_helper_set_alt_mode();
1852 gen_helper_st_virt_to_phys(addr
, addr
);
1853 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1854 gen_helper_restore_mode();
1857 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1858 gen_helper_set_alt_mode();
1859 gen_helper_st_virt_to_phys(addr
, addr
);
1860 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1861 gen_helper_restore_mode();
1864 /* Longword virtual access with alternate access mode and
1865 * protection checks (hw_ldl/wa)
1867 gen_helper_set_alt_mode();
1868 gen_helper_ldl_data(cpu_ir
[ra
], addr
);
1869 gen_helper_restore_mode();
1872 /* Quadword virtual access with alternate access mode and
1873 * protection checks (hw_ldq/wa)
1875 gen_helper_set_alt_mode();
1876 gen_helper_ldq_data(cpu_ir
[ra
], addr
);
1877 gen_helper_restore_mode();
1880 tcg_temp_free(addr
);
1888 if (!(ctx
->amask
& AMASK_BWX
))
1890 if (likely(rc
!= 31)) {
1892 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1894 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1899 if (!(ctx
->amask
& AMASK_BWX
))
1901 if (likely(rc
!= 31)) {
1903 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1905 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1910 if (!(ctx
->amask
& AMASK_CIX
))
1912 if (likely(rc
!= 31)) {
1914 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1916 gen_helper_ctpop(cpu_ir
[rc
], cpu_ir
[rb
]);
1921 if (!(ctx
->amask
& AMASK_MVI
))
1928 if (!(ctx
->amask
& AMASK_CIX
))
1930 if (likely(rc
!= 31)) {
1932 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1934 gen_helper_ctlz(cpu_ir
[rc
], cpu_ir
[rb
]);
1939 if (!(ctx
->amask
& AMASK_CIX
))
1941 if (likely(rc
!= 31)) {
1943 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1945 gen_helper_cttz(cpu_ir
[rc
], cpu_ir
[rb
]);
1950 if (!(ctx
->amask
& AMASK_MVI
))
1957 if (!(ctx
->amask
& AMASK_MVI
))
1964 if (!(ctx
->amask
& AMASK_MVI
))
1971 if (!(ctx
->amask
& AMASK_MVI
))
1978 if (!(ctx
->amask
& AMASK_MVI
))
1985 if (!(ctx
->amask
& AMASK_MVI
))
1992 if (!(ctx
->amask
& AMASK_MVI
))
1999 if (!(ctx
->amask
& AMASK_MVI
))
2006 if (!(ctx
->amask
& AMASK_MVI
))
2013 if (!(ctx
->amask
& AMASK_MVI
))
2020 if (!(ctx
->amask
& AMASK_MVI
))
2027 if (!(ctx
->amask
& AMASK_MVI
))
2034 if (!(ctx
->amask
& AMASK_FIX
))
2036 if (likely(rc
!= 31)) {
2038 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_fir
[ra
]);
2040 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
2045 if (!(ctx
->amask
& AMASK_FIX
))
2048 TCGv_i32 tmp1
= tcg_temp_new_i32();
2050 gen_helper_s_to_memory(tmp1
, cpu_fir
[ra
]);
2052 TCGv tmp2
= tcg_const_i64(0);
2053 gen_helper_s_to_memory(tmp1
, tmp2
);
2054 tcg_temp_free(tmp2
);
2056 tcg_gen_ext_i32_i64(cpu_ir
[rc
], tmp1
);
2057 tcg_temp_free_i32(tmp1
);
2065 /* HW_MTPR (PALcode) */
2066 #if defined (CONFIG_USER_ONLY)
2072 TCGv tmp1
= tcg_const_i32(insn
& 0xFF);
2074 gen_helper_mtpr(tmp1
, cpu_ir
[ra
]);
2076 TCGv tmp2
= tcg_const_i64(0);
2077 gen_helper_mtpr(tmp1
, tmp2
);
2078 tcg_temp_free(tmp2
);
2080 tcg_temp_free(tmp1
);
2086 /* HW_REI (PALcode) */
2087 #if defined (CONFIG_USER_ONLY)
2094 gen_helper_hw_rei();
2099 tmp
= tcg_temp_new();
2100 tcg_gen_addi_i64(tmp
, cpu_ir
[rb
], (((int64_t)insn
<< 51) >> 51));
2102 tmp
= tcg_const_i64(((int64_t)insn
<< 51) >> 51);
2103 gen_helper_hw_ret(tmp
);
2110 /* HW_ST (PALcode) */
2111 #if defined (CONFIG_USER_ONLY)
2118 addr
= tcg_temp_new();
2120 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
2122 tcg_gen_movi_i64(addr
, disp12
);
2126 val
= tcg_temp_new();
2127 tcg_gen_movi_i64(val
, 0);
2129 switch ((insn
>> 12) & 0xF) {
2131 /* Longword physical access */
2132 gen_helper_stl_raw(val
, addr
);
2135 /* Quadword physical access */
2136 gen_helper_stq_raw(val
, addr
);
2139 /* Longword physical access with lock */
2140 gen_helper_stl_c_raw(val
, val
, addr
);
2143 /* Quadword physical access with lock */
2144 gen_helper_stq_c_raw(val
, val
, addr
);
2147 /* Longword virtual access */
2148 gen_helper_st_virt_to_phys(addr
, addr
);
2149 gen_helper_stl_raw(val
, addr
);
2152 /* Quadword virtual access */
2153 gen_helper_st_virt_to_phys(addr
, addr
);
2154 gen_helper_stq_raw(val
, addr
);
2175 /* Longword virtual access with alternate access mode */
2176 gen_helper_set_alt_mode();
2177 gen_helper_st_virt_to_phys(addr
, addr
);
2178 gen_helper_stl_raw(val
, addr
);
2179 gen_helper_restore_mode();
2182 /* Quadword virtual access with alternate access mode */
2183 gen_helper_set_alt_mode();
2184 gen_helper_st_virt_to_phys(addr
, addr
);
2185 gen_helper_stl_raw(val
, addr
);
2186 gen_helper_restore_mode();
2197 tcg_temp_free(addr
);
2203 gen_load_mem(ctx
, &gen_qemu_ldf
, ra
, rb
, disp16
, 1, 0);
2207 gen_load_mem(ctx
, &gen_qemu_ldg
, ra
, rb
, disp16
, 1, 0);
2211 gen_load_mem(ctx
, &gen_qemu_lds
, ra
, rb
, disp16
, 1, 0);
2215 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1, 0);
2219 gen_store_mem(ctx
, &gen_qemu_stf
, ra
, rb
, disp16
, 1, 0, 0);
2223 gen_store_mem(ctx
, &gen_qemu_stg
, ra
, rb
, disp16
, 1, 0, 0);
2227 gen_store_mem(ctx
, &gen_qemu_sts
, ra
, rb
, disp16
, 1, 0, 0);
2231 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1, 0, 0);
2235 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0, 0);
2239 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 0);
2243 gen_load_mem(ctx
, &gen_qemu_ldl_l
, ra
, rb
, disp16
, 0, 0);
2247 gen_load_mem(ctx
, &gen_qemu_ldq_l
, ra
, rb
, disp16
, 0, 0);
2251 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0, 0, 0);
2255 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 0, 0);
2259 gen_store_mem(ctx
, &gen_qemu_stl_c
, ra
, rb
, disp16
, 0, 0, 1);
2263 gen_store_mem(ctx
, &gen_qemu_stq_c
, ra
, rb
, disp16
, 0, 0, 1);
2268 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2269 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2272 case 0x31: /* FBEQ */
2273 case 0x32: /* FBLT */
2274 case 0x33: /* FBLE */
2275 gen_fbcond(ctx
, opc
, ra
, disp16
);
2281 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2282 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2285 case 0x35: /* FBNE */
2286 case 0x36: /* FBGE */
2287 case 0x37: /* FBGT */
2288 gen_fbcond(ctx
, opc
, ra
, disp16
);
2293 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp21
, 1);
2298 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp21
, 0);
2303 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp21
, 0);
2308 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp21
, 0);
2313 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp21
, 1);
2318 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp21
, 0);
2323 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp21
, 0);
2328 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp21
, 0);
2340 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2341 TranslationBlock
*tb
,
2344 #if defined ALPHA_DEBUG_DISAS
2345 static int insn_count
;
2347 DisasContext ctx
, *ctxp
= &ctx
;
2348 target_ulong pc_start
;
2350 uint16_t *gen_opc_end
;
2358 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2360 ctx
.amask
= env
->amask
;
2362 #if defined (CONFIG_USER_ONLY)
2365 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2366 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2369 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2371 max_insns
= CF_COUNT_MASK
;
2374 for (ret
= 0; ret
== 0;) {
2375 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
2376 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
2377 if (bp
->pc
== ctx
.pc
) {
2378 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2384 j
= gen_opc_ptr
- gen_opc_buf
;
2388 gen_opc_instr_start
[lj
++] = 0;
2390 gen_opc_pc
[lj
] = ctx
.pc
;
2391 gen_opc_instr_start
[lj
] = 1;
2392 gen_opc_icount
[lj
] = num_insns
;
2394 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2396 #if defined ALPHA_DEBUG_DISAS
2398 LOG_DISAS("pc " TARGET_FMT_lx
" mem_idx %d\n",
2399 ctx
.pc
, ctx
.mem_idx
);
2401 insn
= ldl_code(ctx
.pc
);
2402 #if defined ALPHA_DEBUG_DISAS
2404 LOG_DISAS("opcode %08x %d\n", insn
, insn_count
);
2408 ret
= translate_one(ctxp
, insn
);
2411 /* if we reach a page boundary or are single stepping, stop
2414 if (env
->singlestep_enabled
) {
2415 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2419 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
2422 if (gen_opc_ptr
>= gen_opc_end
)
2425 if (num_insns
>= max_insns
)
2432 if (ret
!= 1 && ret
!= 3) {
2433 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2435 #if defined (DO_TB_FLUSH)
2436 gen_helper_tb_flush();
2438 if (tb
->cflags
& CF_LAST_IO
)
2440 /* Generate the return instruction */
2442 gen_icount_end(tb
, num_insns
);
2443 *gen_opc_ptr
= INDEX_op_end
;
2445 j
= gen_opc_ptr
- gen_opc_buf
;
2448 gen_opc_instr_start
[lj
++] = 0;
2450 tb
->size
= ctx
.pc
- pc_start
;
2451 tb
->icount
= num_insns
;
2453 #if defined ALPHA_DEBUG_DISAS
2454 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
2455 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2456 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
2457 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 1);
2463 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2465 gen_intermediate_code_internal(env
, tb
, 0);
2468 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2470 gen_intermediate_code_internal(env
, tb
, 1);
2473 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2478 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2480 alpha_translate_init();
2482 /* XXX: should not be hardcoded */
2483 env
->implver
= IMPLVER_2106x
;
2485 #if defined (CONFIG_USER_ONLY)
2489 /* Initialize IPR */
2490 hwpcb
= env
->ipr
[IPR_PCBB
];
2491 env
->ipr
[IPR_ASN
] = 0;
2492 env
->ipr
[IPR_ASTEN
] = 0;
2493 env
->ipr
[IPR_ASTSR
] = 0;
2494 env
->ipr
[IPR_DATFX
] = 0;
2496 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2497 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2498 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2499 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2500 env
->ipr
[IPR_FEN
] = 0;
2501 env
->ipr
[IPR_IPL
] = 31;
2502 env
->ipr
[IPR_MCES
] = 0;
2503 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2504 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2505 env
->ipr
[IPR_SISR
] = 0;
2506 env
->ipr
[IPR_VIRBND
] = -1ULL;
2508 qemu_init_vcpu(env
);
2512 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2513 unsigned long searched_pc
, int pc_pos
, void *puc
)
2515 env
->pc
= gen_opc_pc
[pc_pos
];