2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
37 typedef struct DisasContext DisasContext
;
41 #if !defined (CONFIG_USER_ONLY)
47 /* global register indexes */
49 static TCGv cpu_ir
[31];
50 static TCGv cpu_fir
[31];
55 static char cpu_reg_names
[10*4+21*5 + 10*5+21*6];
57 #include "gen-icount.h"
59 static void alpha_translate_init(void)
63 static int done_init
= 0;
68 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
71 for (i
= 0; i
< 31; i
++) {
72 sprintf(p
, "ir%d", i
);
73 cpu_ir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
74 offsetof(CPUState
, ir
[i
]), p
);
75 p
+= (i
< 10) ? 4 : 5;
77 sprintf(p
, "fir%d", i
);
78 cpu_fir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
79 offsetof(CPUState
, fir
[i
]), p
);
80 p
+= (i
< 10) ? 5 : 6;
83 cpu_pc
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
84 offsetof(CPUState
, pc
), "pc");
86 cpu_lock
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
87 offsetof(CPUState
, lock
), "lock");
89 /* register helpers */
91 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
97 static always_inline
void gen_excp (DisasContext
*ctx
,
98 int exception
, int error_code
)
102 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
103 tmp1
= tcg_const_i32(exception
);
104 tmp2
= tcg_const_i32(error_code
);
105 tcg_gen_helper_0_2(helper_excp
, tmp1
, tmp2
);
110 static always_inline
void gen_invalid (DisasContext
*ctx
)
112 gen_excp(ctx
, EXCP_OPCDEC
, 0);
115 static always_inline
void gen_qemu_ldf (TCGv t0
, TCGv t1
, int flags
)
117 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
118 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
119 tcg_gen_helper_1_1(helper_memory_to_f
, t0
, tmp
);
123 static always_inline
void gen_qemu_ldg (TCGv t0
, TCGv t1
, int flags
)
125 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
126 tcg_gen_qemu_ld64(tmp
, t1
, flags
);
127 tcg_gen_helper_1_1(helper_memory_to_g
, t0
, tmp
);
131 static always_inline
void gen_qemu_lds (TCGv t0
, TCGv t1
, int flags
)
133 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
134 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
135 tcg_gen_helper_1_1(helper_memory_to_s
, t0
, tmp
);
139 static always_inline
void gen_qemu_ldl_l (TCGv t0
, TCGv t1
, int flags
)
141 tcg_gen_mov_i64(cpu_lock
, t1
);
142 tcg_gen_qemu_ld32s(t0
, t1
, flags
);
145 static always_inline
void gen_qemu_ldq_l (TCGv t0
, TCGv t1
, int flags
)
147 tcg_gen_mov_i64(cpu_lock
, t1
);
148 tcg_gen_qemu_ld64(t0
, t1
, flags
);
151 static always_inline
void gen_load_mem (DisasContext
*ctx
,
152 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
153 int ra
, int rb
, int32_t disp16
,
158 if (unlikely(ra
== 31))
161 addr
= tcg_temp_new(TCG_TYPE_I64
);
163 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
165 tcg_gen_andi_i64(addr
, addr
, ~0x7);
169 tcg_gen_movi_i64(addr
, disp16
);
172 tcg_gen_qemu_load(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
174 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
178 static always_inline
void gen_qemu_stf (TCGv t0
, TCGv t1
, int flags
)
180 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
181 tcg_gen_helper_1_1(helper_f_to_memory
, tmp
, t0
);
182 tcg_gen_qemu_st32(tmp
, t1
, flags
);
186 static always_inline
void gen_qemu_stg (TCGv t0
, TCGv t1
, int flags
)
188 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
189 tcg_gen_helper_1_1(helper_g_to_memory
, tmp
, t0
);
190 tcg_gen_qemu_st64(tmp
, t1
, flags
);
194 static always_inline
void gen_qemu_sts (TCGv t0
, TCGv t1
, int flags
)
196 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
197 tcg_gen_helper_1_1(helper_s_to_memory
, tmp
, t0
);
198 tcg_gen_qemu_st32(tmp
, t1
, flags
);
202 static always_inline
void gen_qemu_stl_c (TCGv t0
, TCGv t1
, int flags
)
206 l1
= gen_new_label();
207 l2
= gen_new_label();
208 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
209 tcg_gen_qemu_st32(t0
, t1
, flags
);
210 tcg_gen_movi_i64(t0
, 0);
213 tcg_gen_movi_i64(t0
, 1);
215 tcg_gen_movi_i64(cpu_lock
, -1);
218 static always_inline
void gen_qemu_stq_c (TCGv t0
, TCGv t1
, int flags
)
222 l1
= gen_new_label();
223 l2
= gen_new_label();
224 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
225 tcg_gen_qemu_st64(t0
, t1
, flags
);
226 tcg_gen_movi_i64(t0
, 0);
229 tcg_gen_movi_i64(t0
, 1);
231 tcg_gen_movi_i64(cpu_lock
, -1);
234 static always_inline
void gen_store_mem (DisasContext
*ctx
,
235 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
236 int ra
, int rb
, int32_t disp16
,
239 TCGv addr
= tcg_temp_new(TCG_TYPE_I64
);
241 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
243 tcg_gen_andi_i64(addr
, addr
, ~0x7);
247 tcg_gen_movi_i64(addr
, disp16
);
251 tcg_gen_qemu_store(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
253 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
255 TCGv zero
= tcg_const_i64(0);
256 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
262 static always_inline
void gen_bcond (DisasContext
*ctx
,
264 int ra
, int32_t disp16
, int mask
)
268 l1
= gen_new_label();
269 l2
= gen_new_label();
270 if (likely(ra
!= 31)) {
272 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
273 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
274 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
277 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
279 /* Very uncommon case - Do not bother to optimize. */
280 TCGv tmp
= tcg_const_i64(0);
281 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
284 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
287 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
291 static always_inline
void gen_fbcond (DisasContext
*ctx
,
293 int ra
, int32_t disp16
)
298 l1
= gen_new_label();
299 l2
= gen_new_label();
301 tmp
= tcg_temp_new(TCG_TYPE_I64
);
302 tcg_gen_helper_1_1(func
, tmp
, cpu_fir
[ra
]);
304 tmp
= tcg_const_i64(0);
305 tcg_gen_helper_1_1(func
, tmp
, tmp
);
307 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp
, 0, l1
);
308 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
311 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
315 static always_inline
void gen_cmov (TCGCond inv_cond
,
316 int ra
, int rb
, int rc
,
317 int islit
, uint8_t lit
, int mask
)
321 if (unlikely(rc
== 31))
324 l1
= gen_new_label();
328 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
329 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
330 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
333 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
335 /* Very uncommon case - Do not bother to optimize. */
336 TCGv tmp
= tcg_const_i64(0);
337 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
342 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
344 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
348 static always_inline
void gen_farith2 (void *helper
,
351 if (unlikely(rc
== 31))
355 tcg_gen_helper_1_1(helper
, cpu_fir
[rc
], cpu_fir
[rb
]);
357 TCGv tmp
= tcg_const_i64(0);
358 tcg_gen_helper_1_1(helper
, cpu_fir
[rc
], tmp
);
363 static always_inline
void gen_farith3 (void *helper
,
364 int ra
, int rb
, int rc
)
366 if (unlikely(rc
== 31))
371 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], cpu_fir
[ra
], cpu_fir
[rb
]);
373 TCGv tmp
= tcg_const_i64(0);
374 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], cpu_fir
[ra
], tmp
);
378 TCGv tmp
= tcg_const_i64(0);
380 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], tmp
, cpu_fir
[rb
]);
382 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], tmp
, tmp
);
387 static always_inline
void gen_fcmov (void *func
,
388 int ra
, int rb
, int rc
)
393 if (unlikely(rc
== 31))
396 l1
= gen_new_label();
397 tmp
= tcg_temp_new(TCG_TYPE_I64
);
399 tmp
= tcg_temp_new(TCG_TYPE_I64
);
400 tcg_gen_helper_1_1(func
, tmp
, cpu_fir
[ra
]);
402 tmp
= tcg_const_i64(0);
403 tcg_gen_helper_1_1(func
, tmp
, tmp
);
405 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
407 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
409 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
413 /* EXTWH, EXTWH, EXTLH, EXTQH */
414 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
415 int ra
, int rb
, int rc
,
416 int islit
, uint8_t lit
)
418 if (unlikely(rc
== 31))
424 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
426 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
429 tmp1
= tcg_temp_new(TCG_TYPE_I64
);
430 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
431 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
432 tmp2
= tcg_const_i64(64);
433 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
435 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
439 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
441 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
444 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
445 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
446 int ra
, int rb
, int rc
,
447 int islit
, uint8_t lit
)
449 if (unlikely(rc
== 31))
454 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
456 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
457 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
458 tcg_gen_shli_i64(tmp
, tmp
, 3);
459 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
463 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
465 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
468 /* Code to call arith3 helpers */
469 static always_inline
void gen_arith3 (void *helper
,
470 int ra
, int rb
, int rc
,
471 int islit
, uint8_t lit
)
473 if (unlikely(rc
== 31))
478 TCGv tmp
= tcg_const_i64(lit
);
479 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
482 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
484 TCGv tmp1
= tcg_const_i64(0);
486 TCGv tmp2
= tcg_const_i64(lit
);
487 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, tmp2
);
490 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, cpu_ir
[rb
]);
495 static always_inline
void gen_cmp(TCGCond cond
,
496 int ra
, int rb
, int rc
,
497 int islit
, uint8_t lit
)
502 if (unlikely(rc
== 31))
505 l1
= gen_new_label();
506 l2
= gen_new_label();
509 tmp
= tcg_temp_new(TCG_TYPE_I64
);
510 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
512 tmp
= tcg_const_i64(0);
514 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
516 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
518 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
521 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
525 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
528 int32_t disp21
, disp16
, disp12
;
530 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
534 /* Decode all instruction fields */
536 ra
= (insn
>> 21) & 0x1F;
537 rb
= (insn
>> 16) & 0x1F;
539 sbz
= (insn
>> 13) & 0x07;
540 islit
= (insn
>> 12) & 1;
541 if (rb
== 31 && !islit
) {
545 lit
= (insn
>> 13) & 0xFF;
546 palcode
= insn
& 0x03FFFFFF;
547 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
548 disp16
= (int16_t)(insn
& 0x0000FFFF);
549 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
550 fn16
= insn
& 0x0000FFFF;
551 fn11
= (insn
>> 5) & 0x000007FF;
553 fn7
= (insn
>> 5) & 0x0000007F;
554 fn2
= (insn
>> 5) & 0x00000003;
556 #if defined ALPHA_DEBUG_DISAS
557 if (logfile
!= NULL
) {
558 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
559 opc
, ra
, rb
, rc
, disp16
);
565 if (palcode
>= 0x80 && palcode
< 0xC0) {
566 /* Unprivileged PAL call */
567 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
568 #if !defined (CONFIG_USER_ONLY)
569 } else if (palcode
< 0x40) {
570 /* Privileged PAL code */
571 if (ctx
->mem_idx
& 1)
574 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
577 /* Invalid PAL call */
605 if (likely(ra
!= 31)) {
607 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
609 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
614 if (likely(ra
!= 31)) {
616 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
618 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
623 if (!(ctx
->amask
& AMASK_BWX
))
625 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0, 0);
629 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 1);
633 if (!(ctx
->amask
& AMASK_BWX
))
635 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 0, 1);
639 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0, 0);
643 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0, 0);
647 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 1);
653 if (likely(rc
!= 31)) {
656 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
657 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
659 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
660 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
664 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
666 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
672 if (likely(rc
!= 31)) {
674 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
675 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
677 tcg_gen_addi_i64(tmp
, tmp
, lit
);
679 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
680 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
684 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
686 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
692 if (likely(rc
!= 31)) {
695 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
697 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
698 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
701 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
703 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
704 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
710 if (likely(rc
!= 31)) {
712 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
713 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
715 tcg_gen_subi_i64(tmp
, tmp
, lit
);
717 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
718 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
722 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
724 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
725 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
732 gen_arith3(helper_cmpbge
, ra
, rb
, rc
, islit
, lit
);
736 if (likely(rc
!= 31)) {
738 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
739 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
741 tcg_gen_addi_i64(tmp
, tmp
, lit
);
743 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
744 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
748 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
750 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
756 if (likely(rc
!= 31)) {
758 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
759 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
761 tcg_gen_subi_i64(tmp
, tmp
, lit
);
763 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
764 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
768 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
770 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
771 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
778 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
782 if (likely(rc
!= 31)) {
785 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
787 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
790 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
792 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
798 if (likely(rc
!= 31)) {
800 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
801 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
803 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
805 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
809 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
811 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
817 if (likely(rc
!= 31)) {
820 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
822 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
825 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
827 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
833 if (likely(rc
!= 31)) {
835 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
836 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
838 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
840 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
844 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
846 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
852 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
856 if (likely(rc
!= 31)) {
858 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
859 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
861 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
863 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
867 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
869 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
875 if (likely(rc
!= 31)) {
877 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
878 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
880 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
882 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
886 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
888 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
894 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
898 gen_arith3(helper_addlv
, ra
, rb
, rc
, islit
, lit
);
902 gen_arith3(helper_sublv
, ra
, rb
, rc
, islit
, lit
);
906 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
910 gen_arith3(helper_addqv
, ra
, rb
, rc
, islit
, lit
);
914 gen_arith3(helper_subqv
, ra
, rb
, rc
, islit
, lit
);
918 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
928 if (likely(rc
!= 31)) {
930 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
932 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
934 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
939 if (likely(rc
!= 31)) {
942 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
944 tcg_gen_andc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
946 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
951 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
955 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
959 if (likely(rc
!= 31)) {
962 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
964 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
967 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
969 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
975 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
979 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
983 if (likely(rc
!= 31)) {
986 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
988 tcg_gen_orc_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
991 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
993 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
999 if (likely(rc
!= 31)) {
1002 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1004 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1007 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1009 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1015 gen_cmov(TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1019 gen_cmov(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1023 if (likely(rc
!= 31)) {
1026 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1028 tcg_gen_eqv_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1031 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1033 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1039 if (likely(rc
!= 31)) {
1041 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1043 tcg_gen_helper_1_1(helper_amask
, cpu_ir
[rc
], cpu_ir
[rb
]);
1048 gen_cmov(TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1052 gen_cmov(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1057 tcg_gen_helper_1_0(helper_load_implver
, cpu_ir
[rc
]);
1067 gen_arith3(helper_mskbl
, ra
, rb
, rc
, islit
, lit
);
1071 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1075 gen_arith3(helper_insbl
, ra
, rb
, rc
, islit
, lit
);
1079 gen_arith3(helper_mskwl
, ra
, rb
, rc
, islit
, lit
);
1083 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1087 gen_arith3(helper_inswl
, ra
, rb
, rc
, islit
, lit
);
1091 gen_arith3(helper_mskll
, ra
, rb
, rc
, islit
, lit
);
1095 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1099 gen_arith3(helper_insll
, ra
, rb
, rc
, islit
, lit
);
1103 gen_arith3(helper_zap
, ra
, rb
, rc
, islit
, lit
);
1107 gen_arith3(helper_zapnot
, ra
, rb
, rc
, islit
, lit
);
1111 gen_arith3(helper_mskql
, ra
, rb
, rc
, islit
, lit
);
1115 if (likely(rc
!= 31)) {
1118 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1120 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1121 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1122 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1123 tcg_temp_free(shift
);
1126 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1131 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1135 if (likely(rc
!= 31)) {
1138 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1140 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1141 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1142 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1143 tcg_temp_free(shift
);
1146 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1151 gen_arith3(helper_insql
, ra
, rb
, rc
, islit
, lit
);
1155 if (likely(rc
!= 31)) {
1158 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1160 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1161 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1162 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1163 tcg_temp_free(shift
);
1166 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1171 gen_arith3(helper_mskwh
, ra
, rb
, rc
, islit
, lit
);
1175 gen_arith3(helper_inswh
, ra
, rb
, rc
, islit
, lit
);
1179 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1183 gen_arith3(helper_msklh
, ra
, rb
, rc
, islit
, lit
);
1187 gen_arith3(helper_inslh
, ra
, rb
, rc
, islit
, lit
);
1191 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1195 gen_arith3(helper_mskqh
, ra
, rb
, rc
, islit
, lit
);
1199 gen_arith3(helper_insqh
, ra
, rb
, rc
, islit
, lit
);
1203 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1213 if (likely(rc
!= 31)) {
1215 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1218 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1220 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1221 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1227 if (likely(rc
!= 31)) {
1229 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1231 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1233 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1238 gen_arith3(helper_umulh
, ra
, rb
, rc
, islit
, lit
);
1242 gen_arith3(helper_mullv
, ra
, rb
, rc
, islit
, lit
);
1246 gen_arith3(helper_mulqv
, ra
, rb
, rc
, islit
, lit
);
1253 switch (fpfn
) { /* f11 & 0x3F */
1256 if (!(ctx
->amask
& AMASK_FIX
))
1258 if (likely(rc
!= 31)) {
1260 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
1261 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1262 tcg_gen_helper_1_1(helper_memory_to_s
, cpu_fir
[rc
], tmp
);
1265 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1270 if (!(ctx
->amask
& AMASK_FIX
))
1272 gen_farith2(&helper_sqrtf
, rb
, rc
);
1276 if (!(ctx
->amask
& AMASK_FIX
))
1278 gen_farith2(&helper_sqrts
, rb
, rc
);
1282 if (!(ctx
->amask
& AMASK_FIX
))
1284 if (likely(rc
!= 31)) {
1286 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
1287 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1288 tcg_gen_helper_1_1(helper_memory_to_f
, cpu_fir
[rc
], tmp
);
1291 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1296 if (!(ctx
->amask
& AMASK_FIX
))
1298 if (likely(rc
!= 31)) {
1300 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_ir
[ra
]);
1302 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1307 if (!(ctx
->amask
& AMASK_FIX
))
1309 gen_farith2(&helper_sqrtg
, rb
, rc
);
1313 if (!(ctx
->amask
& AMASK_FIX
))
1315 gen_farith2(&helper_sqrtt
, rb
, rc
);
1322 /* VAX floating point */
1323 /* XXX: rounding mode and trap are ignored (!) */
1324 switch (fpfn
) { /* f11 & 0x3F */
1327 gen_farith3(&helper_addf
, ra
, rb
, rc
);
1331 gen_farith3(&helper_subf
, ra
, rb
, rc
);
1335 gen_farith3(&helper_mulf
, ra
, rb
, rc
);
1339 gen_farith3(&helper_divf
, ra
, rb
, rc
);
1344 gen_farith2(&helper_cvtdg
, rb
, rc
);
1351 gen_farith3(&helper_addg
, ra
, rb
, rc
);
1355 gen_farith3(&helper_subg
, ra
, rb
, rc
);
1359 gen_farith3(&helper_mulg
, ra
, rb
, rc
);
1363 gen_farith3(&helper_divg
, ra
, rb
, rc
);
1367 gen_farith3(&helper_cmpgeq
, ra
, rb
, rc
);
1371 gen_farith3(&helper_cmpglt
, ra
, rb
, rc
);
1375 gen_farith3(&helper_cmpgle
, ra
, rb
, rc
);
1379 gen_farith2(&helper_cvtgf
, rb
, rc
);
1384 gen_farith2(ctx
, &helper_cvtgd
, rb
, rc
);
1391 gen_farith2(&helper_cvtgq
, rb
, rc
);
1395 gen_farith2(&helper_cvtqf
, rb
, rc
);
1399 gen_farith2(&helper_cvtqg
, rb
, rc
);
1406 /* IEEE floating-point */
1407 /* XXX: rounding mode and traps are ignored (!) */
1408 switch (fpfn
) { /* f11 & 0x3F */
1411 gen_farith3(&helper_adds
, ra
, rb
, rc
);
1415 gen_farith3(&helper_subs
, ra
, rb
, rc
);
1419 gen_farith3(&helper_muls
, ra
, rb
, rc
);
1423 gen_farith3(&helper_divs
, ra
, rb
, rc
);
1427 gen_farith3(&helper_addt
, ra
, rb
, rc
);
1431 gen_farith3(&helper_subt
, ra
, rb
, rc
);
1435 gen_farith3(&helper_mult
, ra
, rb
, rc
);
1439 gen_farith3(&helper_divt
, ra
, rb
, rc
);
1443 gen_farith3(&helper_cmptun
, ra
, rb
, rc
);
1447 gen_farith3(&helper_cmpteq
, ra
, rb
, rc
);
1451 gen_farith3(&helper_cmptlt
, ra
, rb
, rc
);
1455 gen_farith3(&helper_cmptle
, ra
, rb
, rc
);
1458 /* XXX: incorrect */
1459 if (fn11
== 0x2AC) {
1461 gen_farith2(&helper_cvtst
, rb
, rc
);
1464 gen_farith2(&helper_cvtts
, rb
, rc
);
1469 gen_farith2(&helper_cvttq
, rb
, rc
);
1473 gen_farith2(&helper_cvtqs
, rb
, rc
);
1477 gen_farith2(&helper_cvtqt
, rb
, rc
);
1487 gen_farith2(&helper_cvtlq
, rb
, rc
);
1490 if (likely(rc
!= 31)) {
1493 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
1496 gen_farith3(&helper_cpys
, ra
, rb
, rc
);
1501 gen_farith3(&helper_cpysn
, ra
, rb
, rc
);
1505 gen_farith3(&helper_cpyse
, ra
, rb
, rc
);
1509 if (likely(ra
!= 31))
1510 tcg_gen_helper_0_1(helper_store_fpcr
, cpu_fir
[ra
]);
1512 TCGv tmp
= tcg_const_i64(0);
1513 tcg_gen_helper_0_1(helper_store_fpcr
, tmp
);
1519 if (likely(ra
!= 31))
1520 tcg_gen_helper_1_0(helper_load_fpcr
, cpu_fir
[ra
]);
1524 gen_fcmov(&helper_cmpfeq
, ra
, rb
, rc
);
1528 gen_fcmov(&helper_cmpfne
, ra
, rb
, rc
);
1532 gen_fcmov(&helper_cmpflt
, ra
, rb
, rc
);
1536 gen_fcmov(&helper_cmpfge
, ra
, rb
, rc
);
1540 gen_fcmov(&helper_cmpfle
, ra
, rb
, rc
);
1544 gen_fcmov(&helper_cmpfgt
, ra
, rb
, rc
);
1548 gen_farith2(&helper_cvtql
, rb
, rc
);
1552 gen_farith2(&helper_cvtqlv
, rb
, rc
);
1556 gen_farith2(&helper_cvtqlsv
, rb
, rc
);
1563 switch ((uint16_t)disp16
) {
1566 /* No-op. Just exit from the current tb */
1571 /* No-op. Just exit from the current tb */
1593 tcg_gen_helper_1_0(helper_load_pcc
, cpu_ir
[ra
]);
1598 tcg_gen_helper_1_0(helper_rc
, cpu_ir
[ra
]);
1602 /* XXX: TODO: evict tb cache at address rb */
1612 tcg_gen_helper_1_0(helper_rs
, cpu_ir
[ra
]);
1623 /* HW_MFPR (PALcode) */
1624 #if defined (CONFIG_USER_ONLY)
1630 TCGv tmp
= tcg_const_i32(insn
& 0xFF);
1631 tcg_gen_helper_1_2(helper_mfpr
, cpu_ir
[ra
], tmp
, cpu_ir
[ra
]);
1638 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1640 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1642 tcg_gen_movi_i64(cpu_pc
, 0);
1643 /* Those four jumps only differ by the branch prediction hint */
1661 /* HW_LD (PALcode) */
1662 #if defined (CONFIG_USER_ONLY)
1668 TCGv addr
= tcg_temp_new(TCG_TYPE_I64
);
1670 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1672 tcg_gen_movi_i64(addr
, disp12
);
1673 switch ((insn
>> 12) & 0xF) {
1675 /* Longword physical access */
1676 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1679 /* Quadword physical access */
1680 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1683 /* Longword physical access with lock */
1684 tcg_gen_helper_0_2(helper_ldl_l_raw
, cpu_ir
[ra
], addr
);
1687 /* Quadword physical access with lock */
1688 tcg_gen_helper_0_2(helper_ldq_l_raw
, cpu_ir
[ra
], addr
);
1691 /* Longword virtual PTE fetch */
1692 tcg_gen_helper_0_2(helper_ldl_kernel
, cpu_ir
[ra
], addr
);
1695 /* Quadword virtual PTE fetch */
1696 tcg_gen_helper_0_2(helper_ldq_kernel
, cpu_ir
[ra
], addr
);
1699 /* Incpu_ir[ra]id */
1700 goto incpu_ir
[ra
]id_opc
;
1702 /* Incpu_ir[ra]id */
1703 goto incpu_ir
[ra
]id_opc
;
1705 /* Longword virtual access */
1706 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1707 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1710 /* Quadword virtual access */
1711 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1712 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1715 /* Longword virtual access with protection check */
1716 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, ctx
->flags
);
1719 /* Quadword virtual access with protection check */
1720 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, ctx
->flags
);
1723 /* Longword virtual access with altenate access mode */
1724 tcg_gen_helper_0_0(helper_set_alt_mode
);
1725 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1726 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1727 tcg_gen_helper_0_0(helper_restore_mode
);
1730 /* Quadword virtual access with altenate access mode */
1731 tcg_gen_helper_0_0(helper_set_alt_mode
);
1732 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1733 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1734 tcg_gen_helper_0_0(helper_restore_mode
);
1737 /* Longword virtual access with alternate access mode and
1740 tcg_gen_helper_0_0(helper_set_alt_mode
);
1741 tcg_gen_helper_0_2(helper_ldl_data
, cpu_ir
[ra
], addr
);
1742 tcg_gen_helper_0_0(helper_restore_mode
);
1745 /* Quadword virtual access with alternate access mode and
1748 tcg_gen_helper_0_0(helper_set_alt_mode
);
1749 tcg_gen_helper_0_2(helper_ldq_data
, cpu_ir
[ra
], addr
);
1750 tcg_gen_helper_0_0(helper_restore_mode
);
1753 tcg_temp_free(addr
);
1761 if (!(ctx
->amask
& AMASK_BWX
))
1763 if (likely(rc
!= 31)) {
1765 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1767 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1772 if (!(ctx
->amask
& AMASK_BWX
))
1774 if (likely(rc
!= 31)) {
1776 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1778 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1783 if (!(ctx
->amask
& AMASK_CIX
))
1785 if (likely(rc
!= 31)) {
1787 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1789 tcg_gen_helper_1_1(helper_ctpop
, cpu_ir
[rc
], cpu_ir
[rb
]);
1794 if (!(ctx
->amask
& AMASK_MVI
))
1801 if (!(ctx
->amask
& AMASK_CIX
))
1803 if (likely(rc
!= 31)) {
1805 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1807 tcg_gen_helper_1_1(helper_ctlz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1812 if (!(ctx
->amask
& AMASK_CIX
))
1814 if (likely(rc
!= 31)) {
1816 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1818 tcg_gen_helper_1_1(helper_cttz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1823 if (!(ctx
->amask
& AMASK_MVI
))
1830 if (!(ctx
->amask
& AMASK_MVI
))
1837 if (!(ctx
->amask
& AMASK_MVI
))
1844 if (!(ctx
->amask
& AMASK_MVI
))
1851 if (!(ctx
->amask
& AMASK_MVI
))
1858 if (!(ctx
->amask
& AMASK_MVI
))
1865 if (!(ctx
->amask
& AMASK_MVI
))
1872 if (!(ctx
->amask
& AMASK_MVI
))
1879 if (!(ctx
->amask
& AMASK_MVI
))
1886 if (!(ctx
->amask
& AMASK_MVI
))
1893 if (!(ctx
->amask
& AMASK_MVI
))
1900 if (!(ctx
->amask
& AMASK_MVI
))
1907 if (!(ctx
->amask
& AMASK_FIX
))
1909 if (likely(rc
!= 31)) {
1911 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_fir
[ra
]);
1913 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1918 if (!(ctx
->amask
& AMASK_FIX
))
1921 TCGv tmp1
= tcg_temp_new(TCG_TYPE_I32
);
1923 tcg_gen_helper_1_1(helper_s_to_memory
, tmp1
, cpu_fir
[ra
]);
1925 TCGv tmp2
= tcg_const_i64(0);
1926 tcg_gen_helper_1_1(helper_s_to_memory
, tmp1
, tmp2
);
1927 tcg_temp_free(tmp2
);
1929 tcg_gen_ext_i32_i64(cpu_ir
[rc
], tmp1
);
1930 tcg_temp_free(tmp1
);
1938 /* HW_MTPR (PALcode) */
1939 #if defined (CONFIG_USER_ONLY)
1945 TCGv tmp1
= tcg_const_i32(insn
& 0xFF);
1947 tcg_gen_helper(helper_mtpr
, tmp1
, cpu_ir
[ra
]);
1949 TCGv tmp2
= tcg_const_i64(0);
1950 tcg_gen_helper(helper_mtpr
, tmp1
, tmp2
);
1951 tcg_temp_free(tmp2
);
1953 tcg_temp_free(tmp1
);
1959 /* HW_REI (PALcode) */
1960 #if defined (CONFIG_USER_ONLY)
1967 tcg_gen_helper_0_0(helper_hw_rei
);
1972 tmp
= tcg_temp_new(TCG_TYPE_I64
);
1973 tcg_gen_addi_i64(tmp
, cpu_ir
[rb
], (((int64_t)insn
<< 51) >> 51));
1975 tmp
= tcg_const_i64(((int64_t)insn
<< 51) >> 51);
1976 tcg_gen_helper_0_1(helper_hw_ret
, tmp
);
1983 /* HW_ST (PALcode) */
1984 #if defined (CONFIG_USER_ONLY)
1991 addr
= tcg_temp_new(TCG_TYPE_I64
);
1993 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1995 tcg_gen_movi_i64(addr
, disp12
);
1999 val
= tcg_temp_new(TCG_TYPE_I64
);
2000 tcg_gen_movi_i64(val
, 0);
2002 switch ((insn
>> 12) & 0xF) {
2004 /* Longword physical access */
2005 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2008 /* Quadword physical access */
2009 tcg_gen_helper_0_2(helper_stq_raw
, val
, addr
);
2012 /* Longword physical access with lock */
2013 tcg_gen_helper_1_2(helper_stl_c_raw
, val
, val
, addr
);
2016 /* Quadword physical access with lock */
2017 tcg_gen_helper_1_2(helper_stq_c_raw
, val
, val
, addr
);
2020 /* Longword virtual access */
2021 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2022 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2025 /* Quadword virtual access */
2026 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2027 tcg_gen_helper_0_2(helper_stq_raw
, val
, addr
);
2048 /* Longword virtual access with alternate access mode */
2049 tcg_gen_helper_0_0(helper_set_alt_mode
);
2050 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2051 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2052 tcg_gen_helper_0_0(helper_restore_mode
);
2055 /* Quadword virtual access with alternate access mode */
2056 tcg_gen_helper_0_0(helper_set_alt_mode
);
2057 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2058 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2059 tcg_gen_helper_0_0(helper_restore_mode
);
2070 tcg_temp_free(addr
);
2077 gen_load_mem(ctx
, &gen_qemu_ldf
, ra
, rb
, disp16
, 1, 0);
2081 gen_load_mem(ctx
, &gen_qemu_ldg
, ra
, rb
, disp16
, 1, 0);
2085 gen_load_mem(ctx
, &gen_qemu_lds
, ra
, rb
, disp16
, 1, 0);
2089 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1, 0);
2093 gen_store_mem(ctx
, &gen_qemu_stf
, ra
, rb
, disp16
, 1, 0);
2097 gen_store_mem(ctx
, &gen_qemu_stg
, ra
, rb
, disp16
, 1, 0);
2101 gen_store_mem(ctx
, &gen_qemu_sts
, ra
, rb
, disp16
, 1, 0);
2105 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1, 0);
2109 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0, 0);
2113 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 0);
2117 gen_load_mem(ctx
, &gen_qemu_ldl_l
, ra
, rb
, disp16
, 0, 0);
2121 gen_load_mem(ctx
, &gen_qemu_ldq_l
, ra
, rb
, disp16
, 0, 0);
2125 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0, 0);
2129 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 0);
2133 gen_store_mem(ctx
, &gen_qemu_stl_c
, ra
, rb
, disp16
, 0, 0);
2137 gen_store_mem(ctx
, &gen_qemu_stq_c
, ra
, rb
, disp16
, 0, 0);
2142 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2143 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2148 gen_fbcond(ctx
, &helper_cmpfeq
, ra
, disp16
);
2153 gen_fbcond(ctx
, &helper_cmpflt
, ra
, disp16
);
2158 gen_fbcond(ctx
, &helper_cmpfle
, ra
, disp16
);
2164 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2165 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2170 gen_fbcond(ctx
, &helper_cmpfne
, ra
, disp16
);
2175 gen_fbcond(ctx
, &helper_cmpfge
, ra
, disp16
);
2180 gen_fbcond(ctx
, &helper_cmpfgt
, ra
, disp16
);
2185 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2190 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2195 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2200 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2205 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2210 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2215 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2220 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2232 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2233 TranslationBlock
*tb
,
2236 #if defined ALPHA_DEBUG_DISAS
2237 static int insn_count
;
2239 DisasContext ctx
, *ctxp
= &ctx
;
2240 target_ulong pc_start
;
2242 uint16_t *gen_opc_end
;
2249 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2251 ctx
.amask
= env
->amask
;
2252 #if defined (CONFIG_USER_ONLY)
2255 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2256 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2259 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2261 max_insns
= CF_COUNT_MASK
;
2264 for (ret
= 0; ret
== 0;) {
2265 if (env
->nb_breakpoints
> 0) {
2266 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2267 if (env
->breakpoints
[j
] == ctx
.pc
) {
2268 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2274 j
= gen_opc_ptr
- gen_opc_buf
;
2278 gen_opc_instr_start
[lj
++] = 0;
2279 gen_opc_pc
[lj
] = ctx
.pc
;
2280 gen_opc_instr_start
[lj
] = 1;
2281 gen_opc_icount
[lj
] = num_insns
;
2284 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2286 #if defined ALPHA_DEBUG_DISAS
2288 if (logfile
!= NULL
) {
2289 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2290 ctx
.pc
, ctx
.mem_idx
);
2293 insn
= ldl_code(ctx
.pc
);
2294 #if defined ALPHA_DEBUG_DISAS
2296 if (logfile
!= NULL
) {
2297 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2302 ret
= translate_one(ctxp
, insn
);
2305 /* if we reach a page boundary or are single stepping, stop
2308 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2309 (env
->singlestep_enabled
) ||
2310 num_insns
>= max_insns
) {
2313 #if defined (DO_SINGLE_STEP)
2317 if (ret
!= 1 && ret
!= 3) {
2318 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2320 #if defined (DO_TB_FLUSH)
2321 tcg_gen_helper_0_0(helper_tb_flush
);
2323 if (tb
->cflags
& CF_LAST_IO
)
2325 /* Generate the return instruction */
2327 gen_icount_end(tb
, num_insns
);
2328 *gen_opc_ptr
= INDEX_op_end
;
2330 j
= gen_opc_ptr
- gen_opc_buf
;
2333 gen_opc_instr_start
[lj
++] = 0;
2335 tb
->size
= ctx
.pc
- pc_start
;
2336 tb
->icount
= num_insns
;
2338 #if defined ALPHA_DEBUG_DISAS
2339 if (loglevel
& CPU_LOG_TB_CPU
) {
2340 cpu_dump_state(env
, logfile
, fprintf
, 0);
2342 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2343 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2344 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2345 fprintf(logfile
, "\n");
2350 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2352 gen_intermediate_code_internal(env
, tb
, 0);
2355 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2357 gen_intermediate_code_internal(env
, tb
, 1);
2360 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2365 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2369 alpha_translate_init();
2371 /* XXX: should not be hardcoded */
2372 env
->implver
= IMPLVER_2106x
;
2374 #if defined (CONFIG_USER_ONLY)
2378 /* Initialize IPR */
2379 hwpcb
= env
->ipr
[IPR_PCBB
];
2380 env
->ipr
[IPR_ASN
] = 0;
2381 env
->ipr
[IPR_ASTEN
] = 0;
2382 env
->ipr
[IPR_ASTSR
] = 0;
2383 env
->ipr
[IPR_DATFX
] = 0;
2385 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2386 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2387 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2388 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2389 env
->ipr
[IPR_FEN
] = 0;
2390 env
->ipr
[IPR_IPL
] = 31;
2391 env
->ipr
[IPR_MCES
] = 0;
2392 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2393 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2394 env
->ipr
[IPR_SISR
] = 0;
2395 env
->ipr
[IPR_VIRBND
] = -1ULL;
2400 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2401 unsigned long searched_pc
, int pc_pos
, void *puc
)
2403 env
->pc
= gen_opc_pc
[pc_pos
];