2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
27 #include "host-utils.h"
29 #include "qemu-common.h"
35 /* #define DO_SINGLE_STEP */
36 #define ALPHA_DEBUG_DISAS
37 /* #define DO_TB_FLUSH */
40 #ifdef ALPHA_DEBUG_DISAS
41 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
43 # define LOG_DISAS(...) do { } while (0)
46 typedef struct DisasContext DisasContext
;
50 #if !defined (CONFIG_USER_ONLY)
57 /* global register indexes */
58 static TCGv_ptr cpu_env
;
59 static TCGv cpu_ir
[31];
60 static TCGv cpu_fir
[31];
65 static char cpu_reg_names
[10*4+21*5 + 10*5+21*6];
67 #include "gen-icount.h"
69 static void alpha_translate_init(void)
73 static int done_init
= 0;
78 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
81 for (i
= 0; i
< 31; i
++) {
82 sprintf(p
, "ir%d", i
);
83 cpu_ir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
84 offsetof(CPUState
, ir
[i
]), p
);
85 p
+= (i
< 10) ? 4 : 5;
87 sprintf(p
, "fir%d", i
);
88 cpu_fir
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
89 offsetof(CPUState
, fir
[i
]), p
);
90 p
+= (i
< 10) ? 5 : 6;
93 cpu_pc
= tcg_global_mem_new_i64(TCG_AREG0
,
94 offsetof(CPUState
, pc
), "pc");
96 cpu_lock
= tcg_global_mem_new_i64(TCG_AREG0
,
97 offsetof(CPUState
, lock
), "lock");
99 /* register helpers */
106 static always_inline
void gen_excp (DisasContext
*ctx
,
107 int exception
, int error_code
)
111 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
112 tmp1
= tcg_const_i32(exception
);
113 tmp2
= tcg_const_i32(error_code
);
114 gen_helper_excp(tmp1
, tmp2
);
115 tcg_temp_free_i32(tmp2
);
116 tcg_temp_free_i32(tmp1
);
119 static always_inline
void gen_invalid (DisasContext
*ctx
)
121 gen_excp(ctx
, EXCP_OPCDEC
, 0);
124 static always_inline
void gen_qemu_ldf (TCGv t0
, TCGv t1
, int flags
)
126 TCGv tmp
= tcg_temp_new();
127 TCGv_i32 tmp32
= tcg_temp_new_i32();
128 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
129 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
130 gen_helper_memory_to_f(t0
, tmp32
);
131 tcg_temp_free_i32(tmp32
);
135 static always_inline
void gen_qemu_ldg (TCGv t0
, TCGv t1
, int flags
)
137 TCGv tmp
= tcg_temp_new();
138 tcg_gen_qemu_ld64(tmp
, t1
, flags
);
139 gen_helper_memory_to_g(t0
, tmp
);
143 static always_inline
void gen_qemu_lds (TCGv t0
, TCGv t1
, int flags
)
145 TCGv tmp
= tcg_temp_new();
146 TCGv_i32 tmp32
= tcg_temp_new_i32();
147 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
148 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
149 gen_helper_memory_to_s(t0
, tmp32
);
150 tcg_temp_free_i32(tmp32
);
154 static always_inline
void gen_qemu_ldl_l (TCGv t0
, TCGv t1
, int flags
)
156 tcg_gen_mov_i64(cpu_lock
, t1
);
157 tcg_gen_qemu_ld32s(t0
, t1
, flags
);
160 static always_inline
void gen_qemu_ldq_l (TCGv t0
, TCGv t1
, int flags
)
162 tcg_gen_mov_i64(cpu_lock
, t1
);
163 tcg_gen_qemu_ld64(t0
, t1
, flags
);
166 static always_inline
void gen_load_mem (DisasContext
*ctx
,
167 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
168 int ra
, int rb
, int32_t disp16
,
173 if (unlikely(ra
== 31))
176 addr
= tcg_temp_new();
178 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
180 tcg_gen_andi_i64(addr
, addr
, ~0x7);
184 tcg_gen_movi_i64(addr
, disp16
);
187 tcg_gen_qemu_load(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
189 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
193 static always_inline
void gen_qemu_stf (TCGv t0
, TCGv t1
, int flags
)
195 TCGv_i32 tmp32
= tcg_temp_new_i32();
196 TCGv tmp
= tcg_temp_new();
197 gen_helper_f_to_memory(tmp32
, t0
);
198 tcg_gen_extu_i32_i64(tmp
, tmp32
);
199 tcg_gen_qemu_st32(tmp
, t1
, flags
);
201 tcg_temp_free_i32(tmp32
);
204 static always_inline
void gen_qemu_stg (TCGv t0
, TCGv t1
, int flags
)
206 TCGv tmp
= tcg_temp_new();
207 gen_helper_g_to_memory(tmp
, t0
);
208 tcg_gen_qemu_st64(tmp
, t1
, flags
);
212 static always_inline
void gen_qemu_sts (TCGv t0
, TCGv t1
, int flags
)
214 TCGv_i32 tmp32
= tcg_temp_new_i32();
215 TCGv tmp
= tcg_temp_new();
216 gen_helper_s_to_memory(tmp32
, t0
);
217 tcg_gen_extu_i32_i64(tmp
, tmp32
);
218 tcg_gen_qemu_st32(tmp
, t1
, flags
);
220 tcg_temp_free_i32(tmp32
);
223 static always_inline
void gen_qemu_stl_c (TCGv t0
, TCGv t1
, int flags
)
227 l1
= gen_new_label();
228 l2
= gen_new_label();
229 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
230 tcg_gen_qemu_st32(t0
, t1
, flags
);
231 tcg_gen_movi_i64(t0
, 1);
234 tcg_gen_movi_i64(t0
, 0);
236 tcg_gen_movi_i64(cpu_lock
, -1);
239 static always_inline
void gen_qemu_stq_c (TCGv t0
, TCGv t1
, int flags
)
243 l1
= gen_new_label();
244 l2
= gen_new_label();
245 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
246 tcg_gen_qemu_st64(t0
, t1
, flags
);
247 tcg_gen_movi_i64(t0
, 1);
250 tcg_gen_movi_i64(t0
, 0);
252 tcg_gen_movi_i64(cpu_lock
, -1);
255 static always_inline
void gen_store_mem (DisasContext
*ctx
,
256 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
257 int ra
, int rb
, int32_t disp16
,
258 int fp
, int clear
, int local
)
262 addr
= tcg_temp_local_new();
264 addr
= tcg_temp_new();
266 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
268 tcg_gen_andi_i64(addr
, addr
, ~0x7);
272 tcg_gen_movi_i64(addr
, disp16
);
276 tcg_gen_qemu_store(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
278 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
282 zero
= tcg_const_local_i64(0);
284 zero
= tcg_const_i64(0);
285 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
291 static always_inline
void gen_bcond (DisasContext
*ctx
,
293 int ra
, int32_t disp
, int mask
)
297 l1
= gen_new_label();
298 l2
= gen_new_label();
299 if (likely(ra
!= 31)) {
301 TCGv tmp
= tcg_temp_new();
302 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
303 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
306 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
308 /* Very uncommon case - Do not bother to optimize. */
309 TCGv tmp
= tcg_const_i64(0);
310 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
313 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
316 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp
<< 2));
320 static always_inline
void gen_fbcond (DisasContext
*ctx
, int opc
,
321 int ra
, int32_t disp16
)
327 l1
= gen_new_label();
328 l2
= gen_new_label();
330 tmp
= tcg_temp_new();
333 tmp
= tcg_const_i64(0);
337 case 0x31: /* FBEQ */
338 gen_helper_cmpfeq(tmp
, src
);
340 case 0x32: /* FBLT */
341 gen_helper_cmpflt(tmp
, src
);
343 case 0x33: /* FBLE */
344 gen_helper_cmpfle(tmp
, src
);
346 case 0x35: /* FBNE */
347 gen_helper_cmpfne(tmp
, src
);
349 case 0x36: /* FBGE */
350 gen_helper_cmpfge(tmp
, src
);
352 case 0x37: /* FBGT */
353 gen_helper_cmpfgt(tmp
, src
);
358 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp
, 0, l1
);
359 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
362 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
366 static always_inline
void gen_cmov (TCGCond inv_cond
,
367 int ra
, int rb
, int rc
,
368 int islit
, uint8_t lit
, int mask
)
372 if (unlikely(rc
== 31))
375 l1
= gen_new_label();
379 TCGv tmp
= tcg_temp_new();
380 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
381 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
384 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
386 /* Very uncommon case - Do not bother to optimize. */
387 TCGv tmp
= tcg_const_i64(0);
388 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
393 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
395 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
399 #define FARITH2(name) \
400 static always_inline void glue(gen_f, name)(int rb, int rc) \
402 if (unlikely(rc == 31)) \
406 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
408 TCGv tmp = tcg_const_i64(0); \
409 gen_helper_ ## name (cpu_fir[rc], tmp); \
410 tcg_temp_free(tmp); \
431 #define FARITH3(name) \
432 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
434 if (unlikely(rc == 31)) \
439 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
441 TCGv tmp = tcg_const_i64(0); \
442 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
443 tcg_temp_free(tmp); \
446 TCGv tmp = tcg_const_i64(0); \
448 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
450 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
451 tcg_temp_free(tmp); \
482 #define FCMOV(name) \
483 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
488 if (unlikely(rc == 31)) \
491 l1 = gen_new_label(); \
492 tmp = tcg_temp_new(); \
494 tmp = tcg_temp_new(); \
495 gen_helper_ ## name (tmp, cpu_fir[ra]); \
497 tmp = tcg_const_i64(0); \
498 gen_helper_ ## name (tmp, tmp); \
500 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
502 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
504 tcg_gen_movi_i64(cpu_fir[rc], 0); \
514 /* EXTWH, EXTWH, EXTLH, EXTQH */
515 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
516 int ra
, int rb
, int rc
,
517 int islit
, uint8_t lit
)
519 if (unlikely(rc
== 31))
525 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
527 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
530 tmp1
= tcg_temp_new();
531 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
532 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
533 tmp2
= tcg_const_i64(64);
534 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
536 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
540 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
542 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
545 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
546 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
547 int ra
, int rb
, int rc
,
548 int islit
, uint8_t lit
)
550 if (unlikely(rc
== 31))
555 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
557 TCGv tmp
= tcg_temp_new();
558 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
559 tcg_gen_shli_i64(tmp
, tmp
, 3);
560 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
564 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
566 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
569 /* Code to call arith3 helpers */
570 #define ARITH3(name) \
571 static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
572 int islit, uint8_t lit) \
574 if (unlikely(rc == 31)) \
579 TCGv tmp = tcg_const_i64(lit); \
580 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
581 tcg_temp_free(tmp); \
583 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
585 TCGv tmp1 = tcg_const_i64(0); \
587 TCGv tmp2 = tcg_const_i64(lit); \
588 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
589 tcg_temp_free(tmp2); \
591 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
592 tcg_temp_free(tmp1); \
620 static always_inline
void gen_cmp(TCGCond cond
,
621 int ra
, int rb
, int rc
,
622 int islit
, uint8_t lit
)
627 if (unlikely(rc
== 31))
630 l1
= gen_new_label();
631 l2
= gen_new_label();
634 tmp
= tcg_temp_new();
635 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
637 tmp
= tcg_const_i64(0);
639 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
641 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
643 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
646 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
650 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
653 int32_t disp21
, disp16
, disp12
;
655 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
659 /* Decode all instruction fields */
661 ra
= (insn
>> 21) & 0x1F;
662 rb
= (insn
>> 16) & 0x1F;
664 sbz
= (insn
>> 13) & 0x07;
665 islit
= (insn
>> 12) & 1;
666 if (rb
== 31 && !islit
) {
670 lit
= (insn
>> 13) & 0xFF;
671 palcode
= insn
& 0x03FFFFFF;
672 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
673 disp16
= (int16_t)(insn
& 0x0000FFFF);
674 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
675 fn16
= insn
& 0x0000FFFF;
676 fn11
= (insn
>> 5) & 0x000007FF;
678 fn7
= (insn
>> 5) & 0x0000007F;
679 fn2
= (insn
>> 5) & 0x00000003;
681 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
682 opc
, ra
, rb
, rc
, disp16
);
686 if (palcode
>= 0x80 && palcode
< 0xC0) {
687 /* Unprivileged PAL call */
688 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x3F) << 6), 0);
689 #if !defined (CONFIG_USER_ONLY)
690 } else if (palcode
< 0x40) {
691 /* Privileged PAL code */
692 if (ctx
->mem_idx
& 1)
695 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x3F) << 6), 0);
698 /* Invalid PAL call */
726 if (likely(ra
!= 31)) {
728 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
730 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
735 if (likely(ra
!= 31)) {
737 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
739 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
744 if (!(ctx
->amask
& AMASK_BWX
))
746 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0, 0);
750 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 1);
754 if (!(ctx
->amask
& AMASK_BWX
))
756 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 0, 0);
760 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0, 0, 0);
764 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0, 0, 0);
768 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 1, 0);
774 if (likely(rc
!= 31)) {
777 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
778 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
780 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
781 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
785 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
787 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
793 if (likely(rc
!= 31)) {
795 TCGv tmp
= tcg_temp_new();
796 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
798 tcg_gen_addi_i64(tmp
, tmp
, lit
);
800 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
801 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
805 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
807 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
813 if (likely(rc
!= 31)) {
816 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
818 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
819 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
822 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
824 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
825 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
831 if (likely(rc
!= 31)) {
833 TCGv tmp
= tcg_temp_new();
834 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
836 tcg_gen_subi_i64(tmp
, tmp
, lit
);
838 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
839 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
843 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
845 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
846 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
853 gen_cmpbge(ra
, rb
, rc
, islit
, lit
);
857 if (likely(rc
!= 31)) {
859 TCGv tmp
= tcg_temp_new();
860 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
862 tcg_gen_addi_i64(tmp
, tmp
, lit
);
864 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
865 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
869 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
871 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
877 if (likely(rc
!= 31)) {
879 TCGv tmp
= tcg_temp_new();
880 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
882 tcg_gen_subi_i64(tmp
, tmp
, lit
);
884 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
885 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
889 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
891 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
892 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
899 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
903 if (likely(rc
!= 31)) {
906 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
908 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
911 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
913 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
919 if (likely(rc
!= 31)) {
921 TCGv tmp
= tcg_temp_new();
922 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
924 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
926 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
930 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
932 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
938 if (likely(rc
!= 31)) {
941 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
943 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
946 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
948 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
954 if (likely(rc
!= 31)) {
956 TCGv tmp
= tcg_temp_new();
957 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
959 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
961 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
965 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
967 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
973 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
977 if (likely(rc
!= 31)) {
979 TCGv tmp
= tcg_temp_new();
980 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
982 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
984 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
988 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
990 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
996 if (likely(rc
!= 31)) {
998 TCGv tmp
= tcg_temp_new();
999 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
1001 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
1003 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
1007 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
1009 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1015 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
1019 gen_addlv(ra
, rb
, rc
, islit
, lit
);
1023 gen_sublv(ra
, rb
, rc
, islit
, lit
);
1027 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
1031 gen_addqv(ra
, rb
, rc
, islit
, lit
);
1035 gen_subqv(ra
, rb
, rc
, islit
, lit
);
1039 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
1049 if (likely(rc
!= 31)) {
1051 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1053 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1055 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1060 if (likely(rc
!= 31)) {
1063 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1065 tcg_gen_andc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1067 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1072 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1076 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1080 if (likely(rc
!= 31)) {
1083 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1085 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1088 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1090 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1096 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1100 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1104 if (likely(rc
!= 31)) {
1107 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1109 tcg_gen_orc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1112 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1114 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1120 if (likely(rc
!= 31)) {
1123 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1125 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1128 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1130 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1136 gen_cmov(TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1140 gen_cmov(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1144 if (likely(rc
!= 31)) {
1147 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1149 tcg_gen_eqv_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1152 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1154 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1160 if (likely(rc
!= 31)) {
1162 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1164 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1165 switch (ctx
->env
->implver
) {
1167 /* EV4, EV45, LCA, LCA45 & EV5 */
1172 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[rc
],
1173 ~(uint64_t)ctx
->amask
);
1180 gen_cmov(TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1184 gen_cmov(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1189 tcg_gen_movi_i64(cpu_ir
[rc
], ctx
->env
->implver
);
1199 gen_mskbl(ra
, rb
, rc
, islit
, lit
);
1203 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1207 gen_insbl(ra
, rb
, rc
, islit
, lit
);
1211 gen_mskwl(ra
, rb
, rc
, islit
, lit
);
1215 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1219 gen_inswl(ra
, rb
, rc
, islit
, lit
);
1223 gen_mskll(ra
, rb
, rc
, islit
, lit
);
1227 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1231 gen_insll(ra
, rb
, rc
, islit
, lit
);
1235 gen_zap(ra
, rb
, rc
, islit
, lit
);
1239 gen_zapnot(ra
, rb
, rc
, islit
, lit
);
1243 gen_mskql(ra
, rb
, rc
, islit
, lit
);
1247 if (likely(rc
!= 31)) {
1250 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1252 TCGv shift
= tcg_temp_new();
1253 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1254 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1255 tcg_temp_free(shift
);
1258 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1263 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1267 if (likely(rc
!= 31)) {
1270 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1272 TCGv shift
= tcg_temp_new();
1273 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1274 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1275 tcg_temp_free(shift
);
1278 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1283 gen_insql(ra
, rb
, rc
, islit
, lit
);
1287 if (likely(rc
!= 31)) {
1290 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1292 TCGv shift
= tcg_temp_new();
1293 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1294 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1295 tcg_temp_free(shift
);
1298 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1303 gen_mskwh(ra
, rb
, rc
, islit
, lit
);
1307 gen_inswh(ra
, rb
, rc
, islit
, lit
);
1311 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1315 gen_msklh(ra
, rb
, rc
, islit
, lit
);
1319 gen_inslh(ra
, rb
, rc
, islit
, lit
);
1323 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1327 gen_mskqh(ra
, rb
, rc
, islit
, lit
);
1331 gen_insqh(ra
, rb
, rc
, islit
, lit
);
1335 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1345 if (likely(rc
!= 31)) {
1347 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1350 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1352 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1353 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1359 if (likely(rc
!= 31)) {
1361 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1363 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1365 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1370 gen_umulh(ra
, rb
, rc
, islit
, lit
);
1374 gen_mullv(ra
, rb
, rc
, islit
, lit
);
1378 gen_mulqv(ra
, rb
, rc
, islit
, lit
);
1385 switch (fpfn
) { /* f11 & 0x3F */
1388 if (!(ctx
->amask
& AMASK_FIX
))
1390 if (likely(rc
!= 31)) {
1392 TCGv_i32 tmp
= tcg_temp_new_i32();
1393 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1394 gen_helper_memory_to_s(cpu_fir
[rc
], tmp
);
1395 tcg_temp_free_i32(tmp
);
1397 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1402 if (!(ctx
->amask
& AMASK_FIX
))
1408 if (!(ctx
->amask
& AMASK_FIX
))
1414 if (!(ctx
->amask
& AMASK_FIX
))
1416 if (likely(rc
!= 31)) {
1418 TCGv_i32 tmp
= tcg_temp_new_i32();
1419 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1420 gen_helper_memory_to_f(cpu_fir
[rc
], tmp
);
1421 tcg_temp_free_i32(tmp
);
1423 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1428 if (!(ctx
->amask
& AMASK_FIX
))
1430 if (likely(rc
!= 31)) {
1432 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_ir
[ra
]);
1434 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1439 if (!(ctx
->amask
& AMASK_FIX
))
1445 if (!(ctx
->amask
& AMASK_FIX
))
1454 /* VAX floating point */
1455 /* XXX: rounding mode and trap are ignored (!) */
1456 switch (fpfn
) { /* f11 & 0x3F */
1459 gen_faddf(ra
, rb
, rc
);
1463 gen_fsubf(ra
, rb
, rc
);
1467 gen_fmulf(ra
, rb
, rc
);
1471 gen_fdivf(ra
, rb
, rc
);
1483 gen_faddg(ra
, rb
, rc
);
1487 gen_fsubg(ra
, rb
, rc
);
1491 gen_fmulg(ra
, rb
, rc
);
1495 gen_fdivg(ra
, rb
, rc
);
1499 gen_fcmpgeq(ra
, rb
, rc
);
1503 gen_fcmpglt(ra
, rb
, rc
);
1507 gen_fcmpgle(ra
, rb
, rc
);
1538 /* IEEE floating-point */
1539 /* XXX: rounding mode and traps are ignored (!) */
1540 switch (fpfn
) { /* f11 & 0x3F */
1543 gen_fadds(ra
, rb
, rc
);
1547 gen_fsubs(ra
, rb
, rc
);
1551 gen_fmuls(ra
, rb
, rc
);
1555 gen_fdivs(ra
, rb
, rc
);
1559 gen_faddt(ra
, rb
, rc
);
1563 gen_fsubt(ra
, rb
, rc
);
1567 gen_fmult(ra
, rb
, rc
);
1571 gen_fdivt(ra
, rb
, rc
);
1575 gen_fcmptun(ra
, rb
, rc
);
1579 gen_fcmpteq(ra
, rb
, rc
);
1583 gen_fcmptlt(ra
, rb
, rc
);
1587 gen_fcmptle(ra
, rb
, rc
);
1590 /* XXX: incorrect */
1591 if (fn11
== 0x2AC || fn11
== 0x6AC) {
1622 if (likely(rc
!= 31)) {
1625 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
1628 gen_fcpys(ra
, rb
, rc
);
1633 gen_fcpysn(ra
, rb
, rc
);
1637 gen_fcpyse(ra
, rb
, rc
);
1641 if (likely(ra
!= 31))
1642 gen_helper_store_fpcr(cpu_fir
[ra
]);
1644 TCGv tmp
= tcg_const_i64(0);
1645 gen_helper_store_fpcr(tmp
);
1651 if (likely(ra
!= 31))
1652 gen_helper_load_fpcr(cpu_fir
[ra
]);
1656 gen_fcmpfeq(ra
, rb
, rc
);
1660 gen_fcmpfne(ra
, rb
, rc
);
1664 gen_fcmpflt(ra
, rb
, rc
);
1668 gen_fcmpfge(ra
, rb
, rc
);
1672 gen_fcmpfle(ra
, rb
, rc
);
1676 gen_fcmpfgt(ra
, rb
, rc
);
1684 gen_fcvtqlv(rb
, rc
);
1688 gen_fcvtqlsv(rb
, rc
);
1695 switch ((uint16_t)disp16
) {
1698 /* No-op. Just exit from the current tb */
1703 /* No-op. Just exit from the current tb */
1725 gen_helper_load_pcc(cpu_ir
[ra
]);
1730 gen_helper_rc(cpu_ir
[ra
]);
1738 gen_helper_rs(cpu_ir
[ra
]);
1749 /* HW_MFPR (PALcode) */
1750 #if defined (CONFIG_USER_ONLY)
1756 TCGv tmp
= tcg_const_i32(insn
& 0xFF);
1757 gen_helper_mfpr(cpu_ir
[ra
], tmp
, cpu_ir
[ra
]);
1764 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1766 tcg_gen_movi_i64(cpu_pc
, 0);
1768 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1769 /* Those four jumps only differ by the branch prediction hint */
1787 /* HW_LD (PALcode) */
1788 #if defined (CONFIG_USER_ONLY)
1794 TCGv addr
= tcg_temp_new();
1796 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1798 tcg_gen_movi_i64(addr
, disp12
);
1799 switch ((insn
>> 12) & 0xF) {
1801 /* Longword physical access (hw_ldl/p) */
1802 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1805 /* Quadword physical access (hw_ldq/p) */
1806 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1809 /* Longword physical access with lock (hw_ldl_l/p) */
1810 gen_helper_ldl_l_raw(cpu_ir
[ra
], addr
);
1813 /* Quadword physical access with lock (hw_ldq_l/p) */
1814 gen_helper_ldq_l_raw(cpu_ir
[ra
], addr
);
1817 /* Longword virtual PTE fetch (hw_ldl/v) */
1818 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, 0);
1821 /* Quadword virtual PTE fetch (hw_ldq/v) */
1822 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, 0);
1825 /* Incpu_ir[ra]id */
1828 /* Incpu_ir[ra]id */
1831 /* Longword virtual access (hw_ldl) */
1832 gen_helper_st_virt_to_phys(addr
, addr
);
1833 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1836 /* Quadword virtual access (hw_ldq) */
1837 gen_helper_st_virt_to_phys(addr
, addr
);
1838 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1841 /* Longword virtual access with protection check (hw_ldl/w) */
1842 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, 0);
1845 /* Quadword virtual access with protection check (hw_ldq/w) */
1846 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, 0);
1849 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1850 gen_helper_set_alt_mode();
1851 gen_helper_st_virt_to_phys(addr
, addr
);
1852 gen_helper_ldl_raw(cpu_ir
[ra
], addr
);
1853 gen_helper_restore_mode();
1856 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1857 gen_helper_set_alt_mode();
1858 gen_helper_st_virt_to_phys(addr
, addr
);
1859 gen_helper_ldq_raw(cpu_ir
[ra
], addr
);
1860 gen_helper_restore_mode();
1863 /* Longword virtual access with alternate access mode and
1864 * protection checks (hw_ldl/wa)
1866 gen_helper_set_alt_mode();
1867 gen_helper_ldl_data(cpu_ir
[ra
], addr
);
1868 gen_helper_restore_mode();
1871 /* Quadword virtual access with alternate access mode and
1872 * protection checks (hw_ldq/wa)
1874 gen_helper_set_alt_mode();
1875 gen_helper_ldq_data(cpu_ir
[ra
], addr
);
1876 gen_helper_restore_mode();
1879 tcg_temp_free(addr
);
1887 if (!(ctx
->amask
& AMASK_BWX
))
1889 if (likely(rc
!= 31)) {
1891 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1893 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1898 if (!(ctx
->amask
& AMASK_BWX
))
1900 if (likely(rc
!= 31)) {
1902 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1904 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1909 if (!(ctx
->amask
& AMASK_CIX
))
1911 if (likely(rc
!= 31)) {
1913 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1915 gen_helper_ctpop(cpu_ir
[rc
], cpu_ir
[rb
]);
1920 if (!(ctx
->amask
& AMASK_MVI
))
1927 if (!(ctx
->amask
& AMASK_CIX
))
1929 if (likely(rc
!= 31)) {
1931 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1933 gen_helper_ctlz(cpu_ir
[rc
], cpu_ir
[rb
]);
1938 if (!(ctx
->amask
& AMASK_CIX
))
1940 if (likely(rc
!= 31)) {
1942 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1944 gen_helper_cttz(cpu_ir
[rc
], cpu_ir
[rb
]);
1949 if (!(ctx
->amask
& AMASK_MVI
))
1956 if (!(ctx
->amask
& AMASK_MVI
))
1963 if (!(ctx
->amask
& AMASK_MVI
))
1970 if (!(ctx
->amask
& AMASK_MVI
))
1977 if (!(ctx
->amask
& AMASK_MVI
))
1984 if (!(ctx
->amask
& AMASK_MVI
))
1991 if (!(ctx
->amask
& AMASK_MVI
))
1998 if (!(ctx
->amask
& AMASK_MVI
))
2005 if (!(ctx
->amask
& AMASK_MVI
))
2012 if (!(ctx
->amask
& AMASK_MVI
))
2019 if (!(ctx
->amask
& AMASK_MVI
))
2026 if (!(ctx
->amask
& AMASK_MVI
))
2033 if (!(ctx
->amask
& AMASK_FIX
))
2035 if (likely(rc
!= 31)) {
2037 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_fir
[ra
]);
2039 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
2044 if (!(ctx
->amask
& AMASK_FIX
))
2047 TCGv_i32 tmp1
= tcg_temp_new_i32();
2049 gen_helper_s_to_memory(tmp1
, cpu_fir
[ra
]);
2051 TCGv tmp2
= tcg_const_i64(0);
2052 gen_helper_s_to_memory(tmp1
, tmp2
);
2053 tcg_temp_free(tmp2
);
2055 tcg_gen_ext_i32_i64(cpu_ir
[rc
], tmp1
);
2056 tcg_temp_free_i32(tmp1
);
2064 /* HW_MTPR (PALcode) */
2065 #if defined (CONFIG_USER_ONLY)
2071 TCGv tmp1
= tcg_const_i32(insn
& 0xFF);
2073 gen_helper_mtpr(tmp1
, cpu_ir
[ra
]);
2075 TCGv tmp2
= tcg_const_i64(0);
2076 gen_helper_mtpr(tmp1
, tmp2
);
2077 tcg_temp_free(tmp2
);
2079 tcg_temp_free(tmp1
);
2085 /* HW_REI (PALcode) */
2086 #if defined (CONFIG_USER_ONLY)
2093 gen_helper_hw_rei();
2098 tmp
= tcg_temp_new();
2099 tcg_gen_addi_i64(tmp
, cpu_ir
[rb
], (((int64_t)insn
<< 51) >> 51));
2101 tmp
= tcg_const_i64(((int64_t)insn
<< 51) >> 51);
2102 gen_helper_hw_ret(tmp
);
2109 /* HW_ST (PALcode) */
2110 #if defined (CONFIG_USER_ONLY)
2117 addr
= tcg_temp_new();
2119 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
2121 tcg_gen_movi_i64(addr
, disp12
);
2125 val
= tcg_temp_new();
2126 tcg_gen_movi_i64(val
, 0);
2128 switch ((insn
>> 12) & 0xF) {
2130 /* Longword physical access */
2131 gen_helper_stl_raw(val
, addr
);
2134 /* Quadword physical access */
2135 gen_helper_stq_raw(val
, addr
);
2138 /* Longword physical access with lock */
2139 gen_helper_stl_c_raw(val
, val
, addr
);
2142 /* Quadword physical access with lock */
2143 gen_helper_stq_c_raw(val
, val
, addr
);
2146 /* Longword virtual access */
2147 gen_helper_st_virt_to_phys(addr
, addr
);
2148 gen_helper_stl_raw(val
, addr
);
2151 /* Quadword virtual access */
2152 gen_helper_st_virt_to_phys(addr
, addr
);
2153 gen_helper_stq_raw(val
, addr
);
2174 /* Longword virtual access with alternate access mode */
2175 gen_helper_set_alt_mode();
2176 gen_helper_st_virt_to_phys(addr
, addr
);
2177 gen_helper_stl_raw(val
, addr
);
2178 gen_helper_restore_mode();
2181 /* Quadword virtual access with alternate access mode */
2182 gen_helper_set_alt_mode();
2183 gen_helper_st_virt_to_phys(addr
, addr
);
2184 gen_helper_stl_raw(val
, addr
);
2185 gen_helper_restore_mode();
2196 tcg_temp_free(addr
);
2202 gen_load_mem(ctx
, &gen_qemu_ldf
, ra
, rb
, disp16
, 1, 0);
2206 gen_load_mem(ctx
, &gen_qemu_ldg
, ra
, rb
, disp16
, 1, 0);
2210 gen_load_mem(ctx
, &gen_qemu_lds
, ra
, rb
, disp16
, 1, 0);
2214 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1, 0);
2218 gen_store_mem(ctx
, &gen_qemu_stf
, ra
, rb
, disp16
, 1, 0, 0);
2222 gen_store_mem(ctx
, &gen_qemu_stg
, ra
, rb
, disp16
, 1, 0, 0);
2226 gen_store_mem(ctx
, &gen_qemu_sts
, ra
, rb
, disp16
, 1, 0, 0);
2230 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1, 0, 0);
2234 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0, 0);
2238 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 0);
2242 gen_load_mem(ctx
, &gen_qemu_ldl_l
, ra
, rb
, disp16
, 0, 0);
2246 gen_load_mem(ctx
, &gen_qemu_ldq_l
, ra
, rb
, disp16
, 0, 0);
2250 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0, 0, 0);
2254 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 0, 0);
2258 gen_store_mem(ctx
, &gen_qemu_stl_c
, ra
, rb
, disp16
, 0, 0, 1);
2262 gen_store_mem(ctx
, &gen_qemu_stq_c
, ra
, rb
, disp16
, 0, 0, 1);
2267 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2268 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2271 case 0x31: /* FBEQ */
2272 case 0x32: /* FBLT */
2273 case 0x33: /* FBLE */
2274 gen_fbcond(ctx
, opc
, ra
, disp16
);
2280 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2281 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2284 case 0x35: /* FBNE */
2285 case 0x36: /* FBGE */
2286 case 0x37: /* FBGT */
2287 gen_fbcond(ctx
, opc
, ra
, disp16
);
2292 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp21
, 1);
2297 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp21
, 0);
2302 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp21
, 0);
2307 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp21
, 0);
2312 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp21
, 1);
2317 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp21
, 0);
2322 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp21
, 0);
2327 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp21
, 0);
2339 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2340 TranslationBlock
*tb
,
2343 #if defined ALPHA_DEBUG_DISAS
2344 static int insn_count
;
2346 DisasContext ctx
, *ctxp
= &ctx
;
2347 target_ulong pc_start
;
2349 uint16_t *gen_opc_end
;
2357 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2359 ctx
.amask
= env
->amask
;
2361 #if defined (CONFIG_USER_ONLY)
2364 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2365 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2368 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2370 max_insns
= CF_COUNT_MASK
;
2373 for (ret
= 0; ret
== 0;) {
2374 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
2375 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
2376 if (bp
->pc
== ctx
.pc
) {
2377 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2383 j
= gen_opc_ptr
- gen_opc_buf
;
2387 gen_opc_instr_start
[lj
++] = 0;
2389 gen_opc_pc
[lj
] = ctx
.pc
;
2390 gen_opc_instr_start
[lj
] = 1;
2391 gen_opc_icount
[lj
] = num_insns
;
2393 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2395 #if defined ALPHA_DEBUG_DISAS
2397 LOG_DISAS("pc " TARGET_FMT_lx
" mem_idx %d\n",
2398 ctx
.pc
, ctx
.mem_idx
);
2400 insn
= ldl_code(ctx
.pc
);
2401 #if defined ALPHA_DEBUG_DISAS
2403 LOG_DISAS("opcode %08x %d\n", insn
, insn_count
);
2407 ret
= translate_one(ctxp
, insn
);
2410 /* if we reach a page boundary or are single stepping, stop
2413 if (env
->singlestep_enabled
) {
2414 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2418 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
2421 if (gen_opc_ptr
>= gen_opc_end
)
2424 if (num_insns
>= max_insns
)
2431 if (ret
!= 1 && ret
!= 3) {
2432 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2434 #if defined (DO_TB_FLUSH)
2435 gen_helper_tb_flush();
2437 if (tb
->cflags
& CF_LAST_IO
)
2439 /* Generate the return instruction */
2441 gen_icount_end(tb
, num_insns
);
2442 *gen_opc_ptr
= INDEX_op_end
;
2444 j
= gen_opc_ptr
- gen_opc_buf
;
2447 gen_opc_instr_start
[lj
++] = 0;
2449 tb
->size
= ctx
.pc
- pc_start
;
2450 tb
->icount
= num_insns
;
2452 #if defined ALPHA_DEBUG_DISAS
2453 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
2454 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2455 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
2456 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 1);
2462 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2464 gen_intermediate_code_internal(env
, tb
, 0);
2467 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2469 gen_intermediate_code_internal(env
, tb
, 1);
2472 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2477 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2479 alpha_translate_init();
2481 /* XXX: should not be hardcoded */
2482 env
->implver
= IMPLVER_2106x
;
2484 #if defined (CONFIG_USER_ONLY)
2488 /* Initialize IPR */
2489 hwpcb
= env
->ipr
[IPR_PCBB
];
2490 env
->ipr
[IPR_ASN
] = 0;
2491 env
->ipr
[IPR_ASTEN
] = 0;
2492 env
->ipr
[IPR_ASTSR
] = 0;
2493 env
->ipr
[IPR_DATFX
] = 0;
2495 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2496 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2497 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2498 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2499 env
->ipr
[IPR_FEN
] = 0;
2500 env
->ipr
[IPR_IPL
] = 31;
2501 env
->ipr
[IPR_MCES
] = 0;
2502 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2503 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2504 env
->ipr
[IPR_SISR
] = 0;
2505 env
->ipr
[IPR_VIRBND
] = -1ULL;
2507 qemu_init_vcpu(env
);
2511 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2512 unsigned long searched_pc
, int pc_pos
, void *puc
)
2514 env
->pc
= gen_opc_pc
[pc_pos
];