Support out-of-the-tree building of tests
[qemu/mini2440.git] / target-alpha / translate.c
blobe0acba5fa13dc508f00ae08186eb86d7efd6ac9d
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
34 #define GENERATE_NOP
35 #define ALPHA_DEBUG_DISAS
36 #define DO_TB_FLUSH
38 typedef struct DisasContext DisasContext;
39 struct DisasContext {
40 uint64_t pc;
41 int mem_idx;
42 #if !defined (CONFIG_USER_ONLY)
43 int pal_mode;
44 #endif
45 uint32_t amask;
48 /* global register indexes */
49 static TCGv cpu_env;
50 static TCGv cpu_ir[31];
51 static TCGv cpu_pc;
53 /* dyngen register indexes */
54 static TCGv cpu_T[2];
56 /* register names */
57 static char cpu_reg_names[10*4+21*5];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
63 int i;
64 char *p;
65 static int done_init = 0;
67 if (done_init)
68 return;
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 #else
78 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
79 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
80 #endif
82 p = cpu_reg_names;
83 for (i = 0; i < 31; i++) {
84 sprintf(p, "ir%d", i);
85 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
86 offsetof(CPUState, ir[i]), p);
87 p += (i < 10) ? 4 : 5;
90 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
91 offsetof(CPUState, pc), "pc");
93 /* register helpers */
94 #undef DEF_HELPER
95 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
96 #include "helper.h"
98 done_init = 1;
101 static always_inline void gen_op_nop (void)
103 #if defined(GENERATE_NOP)
104 gen_op_no_op();
105 #endif
108 #define GEN32(func, NAME) \
109 static GenOpFunc *NAME ## _table [32] = { \
110 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
111 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
112 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
113 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
114 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
115 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
116 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
117 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
118 }; \
119 static always_inline void func (int n) \
121 NAME ## _table[n](); \
124 /* FIR moves */
125 /* Special hacks for fir31 */
126 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
127 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
128 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
129 #define gen_op_store_FT0_fir31 gen_op_nop
130 #define gen_op_store_FT1_fir31 gen_op_nop
131 #define gen_op_store_FT2_fir31 gen_op_nop
132 #define gen_op_cmov_fir31 gen_op_nop
133 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
134 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
135 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
136 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
137 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
138 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
139 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
141 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
143 switch (Tn) {
144 case 0:
145 gen_op_load_FT0_fir(firn);
146 break;
147 case 1:
148 gen_op_load_FT1_fir(firn);
149 break;
150 case 2:
151 gen_op_load_FT2_fir(firn);
152 break;
156 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
158 switch (Tn) {
159 case 0:
160 gen_op_store_FT0_fir(firn);
161 break;
162 case 1:
163 gen_op_store_FT1_fir(firn);
164 break;
165 case 2:
166 gen_op_store_FT2_fir(firn);
167 break;
171 /* Memory moves */
172 #if defined(CONFIG_USER_ONLY)
173 #define OP_LD_TABLE(width) \
174 static GenOpFunc *gen_op_ld##width[] = { \
175 &gen_op_ld##width##_raw, \
177 #define OP_ST_TABLE(width) \
178 static GenOpFunc *gen_op_st##width[] = { \
179 &gen_op_st##width##_raw, \
181 #else
182 #define OP_LD_TABLE(width) \
183 static GenOpFunc *gen_op_ld##width[] = { \
184 &gen_op_ld##width##_kernel, \
185 &gen_op_ld##width##_executive, \
186 &gen_op_ld##width##_supervisor, \
187 &gen_op_ld##width##_user, \
189 #define OP_ST_TABLE(width) \
190 static GenOpFunc *gen_op_st##width[] = { \
191 &gen_op_st##width##_kernel, \
192 &gen_op_st##width##_executive, \
193 &gen_op_st##width##_supervisor, \
194 &gen_op_st##width##_user, \
196 #endif
198 #define GEN_LD(width) \
199 OP_LD_TABLE(width); \
200 static always_inline void gen_ld##width (DisasContext *ctx) \
202 (*gen_op_ld##width[ctx->mem_idx])(); \
205 #define GEN_ST(width) \
206 OP_ST_TABLE(width); \
207 static always_inline void gen_st##width (DisasContext *ctx) \
209 (*gen_op_st##width[ctx->mem_idx])(); \
212 GEN_LD(l);
213 GEN_ST(l);
214 GEN_LD(q);
215 GEN_ST(q);
216 GEN_LD(l_l);
217 GEN_ST(l_c);
218 GEN_LD(q_l);
219 GEN_ST(q_c);
221 #if 0 /* currently unused */
222 GEN_LD(f);
223 GEN_ST(f);
224 GEN_LD(g);
225 GEN_ST(g);
226 #endif /* 0 */
227 GEN_LD(s);
228 GEN_ST(s);
229 GEN_LD(t);
230 GEN_ST(t);
232 static always_inline void _gen_op_bcond (DisasContext *ctx)
234 #if 0 // Qemu does not know how to do this...
235 gen_op_bcond(ctx->pc);
236 #else
237 gen_op_bcond(ctx->pc >> 32, ctx->pc);
238 #endif
241 static always_inline void gen_excp (DisasContext *ctx,
242 int exception, int error_code)
244 TCGv tmp1, tmp2;
246 tcg_gen_movi_i64(cpu_pc, ctx->pc);
247 tmp1 = tcg_const_i32(exception);
248 tmp2 = tcg_const_i32(error_code);
249 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
250 tcg_temp_free(tmp2);
251 tcg_temp_free(tmp1);
254 static always_inline void gen_invalid (DisasContext *ctx)
256 gen_excp(ctx, EXCP_OPCDEC, 0);
259 static always_inline void gen_load_mem (DisasContext *ctx,
260 void (*gen_load_op)(DisasContext *ctx),
261 int ra, int rb, int32_t disp16,
262 int clear)
264 if (ra == 31 && disp16 == 0) {
265 /* UNOP */
266 gen_op_nop();
267 } else {
268 if (rb != 31)
269 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
270 else
271 tcg_gen_movi_i64(cpu_T[0], disp16);
272 if (clear)
273 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
274 (*gen_load_op)(ctx);
275 if (ra != 31)
276 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
280 static always_inline void gen_store_mem (DisasContext *ctx,
281 void (*gen_store_op)(DisasContext *ctx),
282 int ra, int rb, int32_t disp16,
283 int clear)
285 if (rb != 31)
286 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
287 else
288 tcg_gen_movi_i64(cpu_T[0], disp16);
289 if (clear)
290 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
291 if (ra != 31)
292 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
293 else
294 tcg_gen_movi_i64(cpu_T[1], 0);
295 (*gen_store_op)(ctx);
298 static always_inline void gen_load_fmem (DisasContext *ctx,
299 void (*gen_load_fop)(DisasContext *ctx),
300 int ra, int rb, int32_t disp16)
302 if (rb != 31)
303 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
304 else
305 tcg_gen_movi_i64(cpu_T[0], disp16);
306 (*gen_load_fop)(ctx);
307 gen_store_fir(ctx, ra, 1);
310 static always_inline void gen_store_fmem (DisasContext *ctx,
311 void (*gen_store_fop)(DisasContext *ctx),
312 int ra, int rb, int32_t disp16)
314 if (rb != 31)
315 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
316 else
317 tcg_gen_movi_i64(cpu_T[0], disp16);
318 gen_load_fir(ctx, ra, 1);
319 (*gen_store_fop)(ctx);
322 static always_inline void gen_bcond (DisasContext *ctx,
323 TCGCond cond,
324 int ra, int32_t disp16, int mask)
326 int l1, l2;
328 l1 = gen_new_label();
329 l2 = gen_new_label();
330 if (likely(ra != 31)) {
331 if (mask) {
332 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
333 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
334 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
335 tcg_temp_free(tmp);
336 } else
337 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
338 } else {
339 /* Very uncommon case - Do not bother to optimize. */
340 TCGv tmp = tcg_const_i64(0);
341 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
342 tcg_temp_free(tmp);
344 tcg_gen_movi_i64(cpu_pc, ctx->pc);
345 tcg_gen_br(l2);
346 gen_set_label(l1);
347 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
348 gen_set_label(l2);
351 static always_inline void gen_fbcond (DisasContext *ctx,
352 void (*gen_test_op)(void),
353 int ra, int32_t disp16)
355 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
356 gen_load_fir(ctx, ra, 0);
357 (*gen_test_op)();
358 _gen_op_bcond(ctx);
361 static always_inline void gen_cmov (DisasContext *ctx,
362 TCGCond inv_cond,
363 int ra, int rb, int rc,
364 int islit, uint8_t lit, int mask)
366 int l1;
368 if (unlikely(rc == 31))
369 return;
371 l1 = gen_new_label();
373 if (ra != 31) {
374 if (mask) {
375 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
376 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
377 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
378 tcg_temp_free(tmp);
379 } else
380 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
381 } else {
382 /* Very uncommon case - Do not bother to optimize. */
383 TCGv tmp = tcg_const_i64(0);
384 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385 tcg_temp_free(tmp);
388 if (islit)
389 tcg_gen_movi_i64(cpu_ir[rc], lit);
390 else
391 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
392 gen_set_label(l1);
395 static always_inline void gen_farith2 (DisasContext *ctx,
396 void (*gen_arith_fop)(void),
397 int rb, int rc)
399 gen_load_fir(ctx, rb, 0);
400 (*gen_arith_fop)();
401 gen_store_fir(ctx, rc, 0);
404 static always_inline void gen_farith3 (DisasContext *ctx,
405 void (*gen_arith_fop)(void),
406 int ra, int rb, int rc)
408 gen_load_fir(ctx, ra, 0);
409 gen_load_fir(ctx, rb, 1);
410 (*gen_arith_fop)();
411 gen_store_fir(ctx, rc, 0);
414 static always_inline void gen_fcmov (DisasContext *ctx,
415 void (*gen_test_fop)(void),
416 int ra, int rb, int rc)
418 gen_load_fir(ctx, ra, 0);
419 gen_load_fir(ctx, rb, 1);
420 (*gen_test_fop)();
421 gen_op_cmov_fir(rc);
424 static always_inline void gen_fti (DisasContext *ctx,
425 void (*gen_move_fop)(void),
426 int ra, int rc)
428 gen_load_fir(ctx, rc, 0);
429 (*gen_move_fop)();
430 if (ra != 31)
431 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
434 static always_inline void gen_itf (DisasContext *ctx,
435 void (*gen_move_fop)(void),
436 int ra, int rc)
438 if (ra != 31)
439 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
440 else
441 tcg_gen_movi_i64(cpu_T[0], 0);
442 (*gen_move_fop)();
443 gen_store_fir(ctx, rc, 0);
446 /* EXTWH, EXTWH, EXTLH, EXTQH */
447 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
448 int ra, int rb, int rc,
449 int islit, uint8_t lit)
451 if (unlikely(rc == 31))
452 return;
454 if (ra != 31) {
455 if (islit) {
456 if (lit != 0)
457 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
458 else
459 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
460 } else {
461 TCGv tmp1, tmp2;
462 tmp1 = tcg_temp_new(TCG_TYPE_I64);
463 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
464 tcg_gen_shli_i64(tmp1, tmp1, 3);
465 tmp2 = tcg_const_i64(64);
466 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
467 tcg_temp_free(tmp2);
468 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
469 tcg_temp_free(tmp1);
471 if (tcg_gen_ext_i64)
472 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
473 } else
474 tcg_gen_movi_i64(cpu_ir[rc], 0);
477 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
478 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
479 int ra, int rb, int rc,
480 int islit, uint8_t lit)
482 if (unlikely(rc == 31))
483 return;
485 if (ra != 31) {
486 if (islit) {
487 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
488 } else {
489 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
490 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
491 tcg_gen_shli_i64(tmp, tmp, 3);
492 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
493 tcg_temp_free(tmp);
495 if (tcg_gen_ext_i64)
496 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
497 } else
498 tcg_gen_movi_i64(cpu_ir[rc], 0);
501 /* Code to call arith3 helpers */
502 static always_inline void gen_arith3_helper(void *helper,
503 int ra, int rb, int rc,
504 int islit, uint8_t lit)
506 if (unlikely(rc == 31))
507 return;
509 if (ra != 31) {
510 if (islit) {
511 TCGv tmp = tcg_const_i64(lit);
512 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
513 tcg_temp_free(tmp);
514 } else
515 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
516 } else {
517 TCGv tmp1 = tcg_const_i64(0);
518 if (islit) {
519 TCGv tmp2 = tcg_const_i64(lit);
520 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
521 tcg_temp_free(tmp2);
522 } else
523 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
524 tcg_temp_free(tmp1);
528 static always_inline void gen_cmp(TCGCond cond,
529 int ra, int rb, int rc,
530 int islit, uint8_t lit)
532 int l1, l2;
533 TCGv tmp;
535 if (unlikely(rc == 31))
536 return;
538 l1 = gen_new_label();
539 l2 = gen_new_label();
541 if (ra != 31) {
542 tmp = tcg_temp_new(TCG_TYPE_I64);
543 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
544 } else
545 tmp = tcg_const_i64(0);
546 if (islit)
547 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
548 else
549 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
551 tcg_gen_movi_i64(cpu_ir[rc], 0);
552 tcg_gen_br(l2);
553 gen_set_label(l1);
554 tcg_gen_movi_i64(cpu_ir[rc], 1);
555 gen_set_label(l2);
558 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
560 uint32_t palcode;
561 int32_t disp21, disp16, disp12;
562 uint16_t fn11, fn16;
563 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
564 uint8_t lit;
565 int ret;
567 /* Decode all instruction fields */
568 opc = insn >> 26;
569 ra = (insn >> 21) & 0x1F;
570 rb = (insn >> 16) & 0x1F;
571 rc = insn & 0x1F;
572 sbz = (insn >> 13) & 0x07;
573 islit = (insn >> 12) & 1;
574 if (rb == 31 && !islit) {
575 islit = 1;
576 lit = 0;
577 } else
578 lit = (insn >> 13) & 0xFF;
579 palcode = insn & 0x03FFFFFF;
580 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
581 disp16 = (int16_t)(insn & 0x0000FFFF);
582 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
583 fn16 = insn & 0x0000FFFF;
584 fn11 = (insn >> 5) & 0x000007FF;
585 fpfn = fn11 & 0x3F;
586 fn7 = (insn >> 5) & 0x0000007F;
587 fn2 = (insn >> 5) & 0x00000003;
588 ret = 0;
589 #if defined ALPHA_DEBUG_DISAS
590 if (logfile != NULL) {
591 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
592 opc, ra, rb, rc, disp16);
594 #endif
595 switch (opc) {
596 case 0x00:
597 /* CALL_PAL */
598 if (palcode >= 0x80 && palcode < 0xC0) {
599 /* Unprivileged PAL call */
600 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
601 #if !defined (CONFIG_USER_ONLY)
602 } else if (palcode < 0x40) {
603 /* Privileged PAL code */
604 if (ctx->mem_idx & 1)
605 goto invalid_opc;
606 else
607 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
608 #endif
609 } else {
610 /* Invalid PAL call */
611 goto invalid_opc;
613 ret = 3;
614 break;
615 case 0x01:
616 /* OPC01 */
617 goto invalid_opc;
618 case 0x02:
619 /* OPC02 */
620 goto invalid_opc;
621 case 0x03:
622 /* OPC03 */
623 goto invalid_opc;
624 case 0x04:
625 /* OPC04 */
626 goto invalid_opc;
627 case 0x05:
628 /* OPC05 */
629 goto invalid_opc;
630 case 0x06:
631 /* OPC06 */
632 goto invalid_opc;
633 case 0x07:
634 /* OPC07 */
635 goto invalid_opc;
636 case 0x08:
637 /* LDA */
638 if (likely(ra != 31)) {
639 if (rb != 31)
640 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
641 else
642 tcg_gen_movi_i64(cpu_ir[ra], disp16);
644 break;
645 case 0x09:
646 /* LDAH */
647 if (likely(ra != 31)) {
648 if (rb != 31)
649 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
650 else
651 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
653 break;
654 case 0x0A:
655 /* LDBU */
656 if (!(ctx->amask & AMASK_BWX))
657 goto invalid_opc;
658 if (likely(ra != 31)) {
659 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
660 if (rb != 31)
661 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
662 else
663 tcg_gen_movi_i64(addr, disp16);
664 tcg_gen_qemu_ld8u(cpu_ir[ra], addr, ctx->mem_idx);
665 tcg_temp_free(addr);
667 break;
668 case 0x0B:
669 /* LDQ_U */
670 if (likely(ra != 31)) {
671 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
672 if (rb != 31) {
673 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
674 tcg_gen_andi_i64(addr, addr, ~0x7);
675 } else
676 tcg_gen_movi_i64(addr, disp16 & ~0x7);
677 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->mem_idx);
678 tcg_temp_free(addr);
680 break;
681 case 0x0C:
682 /* LDWU */
683 if (!(ctx->amask & AMASK_BWX))
684 goto invalid_opc;
685 if (likely(ra != 31)) {
686 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
687 if (rb != 31)
688 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
689 else
690 tcg_gen_movi_i64(addr, disp16);
691 tcg_gen_qemu_ld16u(cpu_ir[ra], addr, ctx->mem_idx);
692 tcg_temp_free(addr);
694 break;
695 case 0x0D:
696 /* STW */
698 TCGv addr;
699 if (!(ctx->amask & AMASK_BWX))
700 goto invalid_opc;
701 addr = tcg_temp_new(TCG_TYPE_I64);
702 if (rb != 31)
703 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
704 else
705 tcg_gen_movi_i64(addr, disp16);
706 if (ra != 31)
707 tcg_gen_qemu_st16(cpu_ir[ra], addr, ctx->mem_idx);
708 else {
709 TCGv zero = tcg_const_i64(0);
710 tcg_gen_qemu_st16(zero, addr, ctx->mem_idx);
711 tcg_temp_free(zero);
713 tcg_temp_free(addr);
715 break;
716 case 0x0E:
717 /* STB */
719 TCGv addr;
720 if (!(ctx->amask & AMASK_BWX))
721 goto invalid_opc;
722 addr = tcg_temp_new(TCG_TYPE_I64);
723 if (rb != 31)
724 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
725 else
726 tcg_gen_movi_i64(addr, disp16);
727 if (ra != 31)
728 tcg_gen_qemu_st8(cpu_ir[ra], addr, ctx->mem_idx);
729 else {
730 TCGv zero = tcg_const_i64(0);
731 tcg_gen_qemu_st8(zero, addr, ctx->mem_idx);
732 tcg_temp_free(zero);
734 tcg_temp_free(addr);
736 break;
737 case 0x0F:
738 /* STQ_U */
740 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
741 if (rb != 31) {
742 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
743 tcg_gen_andi_i64(addr, addr, ~0x7);
744 } else
745 tcg_gen_movi_i64(addr, disp16 & ~0x7);
746 if (ra != 31)
747 tcg_gen_qemu_st64(cpu_ir[ra], addr, ctx->mem_idx);
748 else {
749 TCGv zero = tcg_const_i64(0);
750 tcg_gen_qemu_st64(zero, addr, ctx->mem_idx);
751 tcg_temp_free(zero);
753 tcg_temp_free(addr);
755 break;
756 case 0x10:
757 switch (fn7) {
758 case 0x00:
759 /* ADDL */
760 if (likely(rc != 31)) {
761 if (ra != 31) {
762 if (islit) {
763 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
764 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
765 } else {
766 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
767 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
769 } else {
770 if (islit)
771 tcg_gen_movi_i64(cpu_ir[rc], lit);
772 else
773 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
776 break;
777 case 0x02:
778 /* S4ADDL */
779 if (likely(rc != 31)) {
780 if (ra != 31) {
781 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
782 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
783 if (islit)
784 tcg_gen_addi_i64(tmp, tmp, lit);
785 else
786 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
787 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
788 tcg_temp_free(tmp);
789 } else {
790 if (islit)
791 tcg_gen_movi_i64(cpu_ir[rc], lit);
792 else
793 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
796 break;
797 case 0x09:
798 /* SUBL */
799 if (likely(rc != 31)) {
800 if (ra != 31) {
801 if (islit)
802 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
803 else
804 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
805 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
806 } else {
807 if (islit)
808 tcg_gen_movi_i64(cpu_ir[rc], -lit);
809 else {
810 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
811 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
814 break;
815 case 0x0B:
816 /* S4SUBL */
817 if (likely(rc != 31)) {
818 if (ra != 31) {
819 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
820 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
821 if (islit)
822 tcg_gen_subi_i64(tmp, tmp, lit);
823 else
824 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
825 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
826 tcg_temp_free(tmp);
827 } else {
828 if (islit)
829 tcg_gen_movi_i64(cpu_ir[rc], -lit);
830 else {
831 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
832 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
836 break;
837 case 0x0F:
838 /* CMPBGE */
839 gen_arith3_helper(helper_cmpbge, ra, rb, rc, islit, lit);
840 break;
841 case 0x12:
842 /* S8ADDL */
843 if (likely(rc != 31)) {
844 if (ra != 31) {
845 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
846 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
847 if (islit)
848 tcg_gen_addi_i64(tmp, tmp, lit);
849 else
850 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
851 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
852 tcg_temp_free(tmp);
853 } else {
854 if (islit)
855 tcg_gen_movi_i64(cpu_ir[rc], lit);
856 else
857 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
860 break;
861 case 0x1B:
862 /* S8SUBL */
863 if (likely(rc != 31)) {
864 if (ra != 31) {
865 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
866 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
867 if (islit)
868 tcg_gen_subi_i64(tmp, tmp, lit);
869 else
870 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
871 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
872 tcg_temp_free(tmp);
873 } else {
874 if (islit)
875 tcg_gen_movi_i64(cpu_ir[rc], -lit);
876 else
877 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
878 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
882 break;
883 case 0x1D:
884 /* CMPULT */
885 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
886 break;
887 case 0x20:
888 /* ADDQ */
889 if (likely(rc != 31)) {
890 if (ra != 31) {
891 if (islit)
892 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
893 else
894 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
895 } else {
896 if (islit)
897 tcg_gen_movi_i64(cpu_ir[rc], lit);
898 else
899 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
902 break;
903 case 0x22:
904 /* S4ADDQ */
905 if (likely(rc != 31)) {
906 if (ra != 31) {
907 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
908 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
909 if (islit)
910 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
911 else
912 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
913 tcg_temp_free(tmp);
914 } else {
915 if (islit)
916 tcg_gen_movi_i64(cpu_ir[rc], lit);
917 else
918 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
921 break;
922 case 0x29:
923 /* SUBQ */
924 if (likely(rc != 31)) {
925 if (ra != 31) {
926 if (islit)
927 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
928 else
929 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
930 } else {
931 if (islit)
932 tcg_gen_movi_i64(cpu_ir[rc], -lit);
933 else
934 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
937 break;
938 case 0x2B:
939 /* S4SUBQ */
940 if (likely(rc != 31)) {
941 if (ra != 31) {
942 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
943 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
944 if (islit)
945 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
946 else
947 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
948 tcg_temp_free(tmp);
949 } else {
950 if (islit)
951 tcg_gen_movi_i64(cpu_ir[rc], -lit);
952 else
953 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
956 break;
957 case 0x2D:
958 /* CMPEQ */
959 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
960 break;
961 case 0x32:
962 /* S8ADDQ */
963 if (likely(rc != 31)) {
964 if (ra != 31) {
965 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
966 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
967 if (islit)
968 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
969 else
970 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
971 tcg_temp_free(tmp);
972 } else {
973 if (islit)
974 tcg_gen_movi_i64(cpu_ir[rc], lit);
975 else
976 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
979 break;
980 case 0x3B:
981 /* S8SUBQ */
982 if (likely(rc != 31)) {
983 if (ra != 31) {
984 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
985 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
986 if (islit)
987 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
988 else
989 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
990 tcg_temp_free(tmp);
991 } else {
992 if (islit)
993 tcg_gen_movi_i64(cpu_ir[rc], -lit);
994 else
995 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
998 break;
999 case 0x3D:
1000 /* CMPULE */
1001 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1002 break;
1003 case 0x40:
1004 /* ADDL/V */
1005 gen_arith3_helper(helper_addlv, ra, rb, rc, islit, lit);
1006 break;
1007 case 0x49:
1008 /* SUBL/V */
1009 gen_arith3_helper(helper_sublv, ra, rb, rc, islit, lit);
1010 break;
1011 case 0x4D:
1012 /* CMPLT */
1013 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1014 break;
1015 case 0x60:
1016 /* ADDQ/V */
1017 gen_arith3_helper(helper_addqv, ra, rb, rc, islit, lit);
1018 break;
1019 case 0x69:
1020 /* SUBQ/V */
1021 gen_arith3_helper(helper_subqv, ra, rb, rc, islit, lit);
1022 break;
1023 case 0x6D:
1024 /* CMPLE */
1025 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1026 break;
1027 default:
1028 goto invalid_opc;
1030 break;
1031 case 0x11:
1032 switch (fn7) {
1033 case 0x00:
1034 /* AND */
1035 if (likely(rc != 31)) {
1036 if (ra == 31)
1037 tcg_gen_movi_i64(cpu_ir[rc], 0);
1038 else if (islit)
1039 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1040 else
1041 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1043 break;
1044 case 0x08:
1045 /* BIC */
1046 if (likely(rc != 31)) {
1047 if (ra != 31) {
1048 if (islit)
1049 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1050 else {
1051 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1052 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1053 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1054 tcg_temp_free(tmp);
1056 } else
1057 tcg_gen_movi_i64(cpu_ir[rc], 0);
1059 break;
1060 case 0x14:
1061 /* CMOVLBS */
1062 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1063 break;
1064 case 0x16:
1065 /* CMOVLBC */
1066 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1067 break;
1068 case 0x20:
1069 /* BIS */
1070 if (likely(rc != 31)) {
1071 if (ra != 31) {
1072 if (islit)
1073 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1074 else
1075 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1076 } else {
1077 if (islit)
1078 tcg_gen_movi_i64(cpu_ir[rc], lit);
1079 else
1080 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1083 break;
1084 case 0x24:
1085 /* CMOVEQ */
1086 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1087 break;
1088 case 0x26:
1089 /* CMOVNE */
1090 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1091 break;
1092 case 0x28:
1093 /* ORNOT */
1094 if (likely(rc != 31)) {
1095 if (ra != 31) {
1096 if (islit)
1097 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1098 else {
1099 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1100 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1101 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1102 tcg_temp_free(tmp);
1104 } else {
1105 if (islit)
1106 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1107 else
1108 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1111 break;
1112 case 0x40:
1113 /* XOR */
1114 if (likely(rc != 31)) {
1115 if (ra != 31) {
1116 if (islit)
1117 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1118 else
1119 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1120 } else {
1121 if (islit)
1122 tcg_gen_movi_i64(cpu_ir[rc], lit);
1123 else
1124 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1127 break;
1128 case 0x44:
1129 /* CMOVLT */
1130 gen_cmov(ctx, TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1131 break;
1132 case 0x46:
1133 /* CMOVGE */
1134 gen_cmov(ctx, TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1135 break;
1136 case 0x48:
1137 /* EQV */
1138 if (likely(rc != 31)) {
1139 if (ra != 31) {
1140 if (islit)
1141 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1142 else {
1143 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1144 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1145 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1146 tcg_temp_free(tmp);
1148 } else {
1149 if (islit)
1150 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1151 else
1152 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1155 break;
1156 case 0x61:
1157 /* AMASK */
1158 if (likely(rc != 31)) {
1159 if (islit)
1160 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1161 else
1162 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1164 break;
1165 case 0x64:
1166 /* CMOVLE */
1167 gen_cmov(ctx, TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1168 break;
1169 case 0x66:
1170 /* CMOVGT */
1171 gen_cmov(ctx, TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1172 break;
1173 case 0x6C:
1174 /* IMPLVER */
1175 if (rc != 31)
1176 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1177 break;
1178 default:
1179 goto invalid_opc;
1181 break;
1182 case 0x12:
1183 switch (fn7) {
1184 case 0x02:
1185 /* MSKBL */
1186 gen_arith3_helper(helper_mskbl, ra, rb, rc, islit, lit);
1187 break;
1188 case 0x06:
1189 /* EXTBL */
1190 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1191 break;
1192 case 0x0B:
1193 /* INSBL */
1194 gen_arith3_helper(helper_insbl, ra, rb, rc, islit, lit);
1195 break;
1196 case 0x12:
1197 /* MSKWL */
1198 gen_arith3_helper(helper_mskwl, ra, rb, rc, islit, lit);
1199 break;
1200 case 0x16:
1201 /* EXTWL */
1202 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1203 break;
1204 case 0x1B:
1205 /* INSWL */
1206 gen_arith3_helper(helper_inswl, ra, rb, rc, islit, lit);
1207 break;
1208 case 0x22:
1209 /* MSKLL */
1210 gen_arith3_helper(helper_mskll, ra, rb, rc, islit, lit);
1211 break;
1212 case 0x26:
1213 /* EXTLL */
1214 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1215 break;
1216 case 0x2B:
1217 /* INSLL */
1218 gen_arith3_helper(helper_insll, ra, rb, rc, islit, lit);
1219 break;
1220 case 0x30:
1221 /* ZAP */
1222 gen_arith3_helper(helper_zap, ra, rb, rc, islit, lit);
1223 break;
1224 case 0x31:
1225 /* ZAPNOT */
1226 gen_arith3_helper(helper_zapnot, ra, rb, rc, islit, lit);
1227 break;
1228 case 0x32:
1229 /* MSKQL */
1230 gen_arith3_helper(helper_mskql, ra, rb, rc, islit, lit);
1231 break;
1232 case 0x34:
1233 /* SRL */
1234 if (likely(rc != 31)) {
1235 if (ra != 31) {
1236 if (islit)
1237 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1238 else {
1239 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1240 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1241 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1242 tcg_temp_free(shift);
1244 } else
1245 tcg_gen_movi_i64(cpu_ir[rc], 0);
1247 break;
1248 case 0x36:
1249 /* EXTQL */
1250 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1251 break;
1252 case 0x39:
1253 /* SLL */
1254 if (likely(rc != 31)) {
1255 if (ra != 31) {
1256 if (islit)
1257 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1258 else {
1259 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1260 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1261 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1262 tcg_temp_free(shift);
1264 } else
1265 tcg_gen_movi_i64(cpu_ir[rc], 0);
1267 break;
1268 case 0x3B:
1269 /* INSQL */
1270 gen_arith3_helper(helper_insql, ra, rb, rc, islit, lit);
1271 break;
1272 case 0x3C:
1273 /* SRA */
1274 if (likely(rc != 31)) {
1275 if (ra != 31) {
1276 if (islit)
1277 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1278 else {
1279 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1280 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1281 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1282 tcg_temp_free(shift);
1284 } else
1285 tcg_gen_movi_i64(cpu_ir[rc], 0);
1287 break;
1288 case 0x52:
1289 /* MSKWH */
1290 gen_arith3_helper(helper_mskwh, ra, rb, rc, islit, lit);
1291 break;
1292 case 0x57:
1293 /* INSWH */
1294 gen_arith3_helper(helper_inswh, ra, rb, rc, islit, lit);
1295 break;
1296 case 0x5A:
1297 /* EXTWH */
1298 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1299 break;
1300 case 0x62:
1301 /* MSKLH */
1302 gen_arith3_helper(helper_msklh, ra, rb, rc, islit, lit);
1303 break;
1304 case 0x67:
1305 /* INSLH */
1306 gen_arith3_helper(helper_inslh, ra, rb, rc, islit, lit);
1307 break;
1308 case 0x6A:
1309 /* EXTLH */
1310 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1311 break;
1312 case 0x72:
1313 /* MSKQH */
1314 gen_arith3_helper(helper_mskqh, ra, rb, rc, islit, lit);
1315 break;
1316 case 0x77:
1317 /* INSQH */
1318 gen_arith3_helper(helper_insqh, ra, rb, rc, islit, lit);
1319 break;
1320 case 0x7A:
1321 /* EXTQH */
1322 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1323 break;
1324 default:
1325 goto invalid_opc;
1327 break;
1328 case 0x13:
1329 switch (fn7) {
1330 case 0x00:
1331 /* MULL */
1332 if (likely(rc != 31)) {
1333 if (ra == 31)
1334 tcg_gen_movi_i64(cpu_ir[rc], 0);
1335 else {
1336 if (islit)
1337 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1338 else
1339 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1340 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1343 break;
1344 case 0x20:
1345 /* MULQ */
1346 if (likely(rc != 31)) {
1347 if (ra == 31)
1348 tcg_gen_movi_i64(cpu_ir[rc], 0);
1349 else if (islit)
1350 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1351 else
1352 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1354 break;
1355 case 0x30:
1356 /* UMULH */
1357 gen_arith3_helper(helper_umulh, ra, rb, rc, islit, lit);
1358 break;
1359 case 0x40:
1360 /* MULL/V */
1361 gen_arith3_helper(helper_mullv, ra, rb, rc, islit, lit);
1362 break;
1363 case 0x60:
1364 /* MULQ/V */
1365 gen_arith3_helper(helper_mulqv, ra, rb, rc, islit, lit);
1366 break;
1367 default:
1368 goto invalid_opc;
1370 break;
1371 case 0x14:
1372 switch (fpfn) { /* f11 & 0x3F */
1373 case 0x04:
1374 /* ITOFS */
1375 if (!(ctx->amask & AMASK_FIX))
1376 goto invalid_opc;
1377 gen_itf(ctx, &gen_op_itofs, ra, rc);
1378 break;
1379 case 0x0A:
1380 /* SQRTF */
1381 if (!(ctx->amask & AMASK_FIX))
1382 goto invalid_opc;
1383 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
1384 break;
1385 case 0x0B:
1386 /* SQRTS */
1387 if (!(ctx->amask & AMASK_FIX))
1388 goto invalid_opc;
1389 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
1390 break;
1391 case 0x14:
1392 /* ITOFF */
1393 if (!(ctx->amask & AMASK_FIX))
1394 goto invalid_opc;
1395 #if 0 // TODO
1396 gen_itf(ctx, &gen_op_itoff, ra, rc);
1397 #else
1398 goto invalid_opc;
1399 #endif
1400 break;
1401 case 0x24:
1402 /* ITOFT */
1403 if (!(ctx->amask & AMASK_FIX))
1404 goto invalid_opc;
1405 gen_itf(ctx, &gen_op_itoft, ra, rc);
1406 break;
1407 case 0x2A:
1408 /* SQRTG */
1409 if (!(ctx->amask & AMASK_FIX))
1410 goto invalid_opc;
1411 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
1412 break;
1413 case 0x02B:
1414 /* SQRTT */
1415 if (!(ctx->amask & AMASK_FIX))
1416 goto invalid_opc;
1417 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1418 break;
1419 default:
1420 goto invalid_opc;
1422 break;
1423 case 0x15:
1424 /* VAX floating point */
1425 /* XXX: rounding mode and trap are ignored (!) */
1426 switch (fpfn) { /* f11 & 0x3F */
1427 case 0x00:
1428 /* ADDF */
1429 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1430 break;
1431 case 0x01:
1432 /* SUBF */
1433 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1434 break;
1435 case 0x02:
1436 /* MULF */
1437 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1438 break;
1439 case 0x03:
1440 /* DIVF */
1441 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1442 break;
1443 case 0x1E:
1444 /* CVTDG */
1445 #if 0 // TODO
1446 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1447 #else
1448 goto invalid_opc;
1449 #endif
1450 break;
1451 case 0x20:
1452 /* ADDG */
1453 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1454 break;
1455 case 0x21:
1456 /* SUBG */
1457 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1458 break;
1459 case 0x22:
1460 /* MULG */
1461 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1462 break;
1463 case 0x23:
1464 /* DIVG */
1465 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1466 break;
1467 case 0x25:
1468 /* CMPGEQ */
1469 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1470 break;
1471 case 0x26:
1472 /* CMPGLT */
1473 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1474 break;
1475 case 0x27:
1476 /* CMPGLE */
1477 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1478 break;
1479 case 0x2C:
1480 /* CVTGF */
1481 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1482 break;
1483 case 0x2D:
1484 /* CVTGD */
1485 #if 0 // TODO
1486 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1487 #else
1488 goto invalid_opc;
1489 #endif
1490 break;
1491 case 0x2F:
1492 /* CVTGQ */
1493 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1494 break;
1495 case 0x3C:
1496 /* CVTQF */
1497 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1498 break;
1499 case 0x3E:
1500 /* CVTQG */
1501 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1502 break;
1503 default:
1504 goto invalid_opc;
1506 break;
1507 case 0x16:
1508 /* IEEE floating-point */
1509 /* XXX: rounding mode and traps are ignored (!) */
1510 switch (fpfn) { /* f11 & 0x3F */
1511 case 0x00:
1512 /* ADDS */
1513 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1514 break;
1515 case 0x01:
1516 /* SUBS */
1517 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1518 break;
1519 case 0x02:
1520 /* MULS */
1521 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1522 break;
1523 case 0x03:
1524 /* DIVS */
1525 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1526 break;
1527 case 0x20:
1528 /* ADDT */
1529 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1530 break;
1531 case 0x21:
1532 /* SUBT */
1533 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1534 break;
1535 case 0x22:
1536 /* MULT */
1537 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1538 break;
1539 case 0x23:
1540 /* DIVT */
1541 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1542 break;
1543 case 0x24:
1544 /* CMPTUN */
1545 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1546 break;
1547 case 0x25:
1548 /* CMPTEQ */
1549 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1550 break;
1551 case 0x26:
1552 /* CMPTLT */
1553 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1554 break;
1555 case 0x27:
1556 /* CMPTLE */
1557 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1558 break;
1559 case 0x2C:
1560 /* XXX: incorrect */
1561 if (fn11 == 0x2AC) {
1562 /* CVTST */
1563 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1564 } else {
1565 /* CVTTS */
1566 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1568 break;
1569 case 0x2F:
1570 /* CVTTQ */
1571 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1572 break;
1573 case 0x3C:
1574 /* CVTQS */
1575 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1576 break;
1577 case 0x3E:
1578 /* CVTQT */
1579 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1580 break;
1581 default:
1582 goto invalid_opc;
1584 break;
1585 case 0x17:
1586 switch (fn11) {
1587 case 0x010:
1588 /* CVTLQ */
1589 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1590 break;
1591 case 0x020:
1592 /* CPYS */
1593 if (ra == rb) {
1594 if (ra == 31 && rc == 31) {
1595 /* FNOP */
1596 gen_op_nop();
1597 } else {
1598 /* FMOV */
1599 gen_load_fir(ctx, rb, 0);
1600 gen_store_fir(ctx, rc, 0);
1602 } else {
1603 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1605 break;
1606 case 0x021:
1607 /* CPYSN */
1608 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1609 break;
1610 case 0x022:
1611 /* CPYSE */
1612 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1613 break;
1614 case 0x024:
1615 /* MT_FPCR */
1616 gen_load_fir(ctx, ra, 0);
1617 gen_op_store_fpcr();
1618 break;
1619 case 0x025:
1620 /* MF_FPCR */
1621 gen_op_load_fpcr();
1622 gen_store_fir(ctx, ra, 0);
1623 break;
1624 case 0x02A:
1625 /* FCMOVEQ */
1626 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1627 break;
1628 case 0x02B:
1629 /* FCMOVNE */
1630 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1631 break;
1632 case 0x02C:
1633 /* FCMOVLT */
1634 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1635 break;
1636 case 0x02D:
1637 /* FCMOVGE */
1638 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1639 break;
1640 case 0x02E:
1641 /* FCMOVLE */
1642 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1643 break;
1644 case 0x02F:
1645 /* FCMOVGT */
1646 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1647 break;
1648 case 0x030:
1649 /* CVTQL */
1650 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1651 break;
1652 case 0x130:
1653 /* CVTQL/V */
1654 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1655 break;
1656 case 0x530:
1657 /* CVTQL/SV */
1658 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1659 break;
1660 default:
1661 goto invalid_opc;
1663 break;
1664 case 0x18:
1665 switch ((uint16_t)disp16) {
1666 case 0x0000:
1667 /* TRAPB */
1668 /* No-op. Just exit from the current tb */
1669 ret = 2;
1670 break;
1671 case 0x0400:
1672 /* EXCB */
1673 /* No-op. Just exit from the current tb */
1674 ret = 2;
1675 break;
1676 case 0x4000:
1677 /* MB */
1678 /* No-op */
1679 break;
1680 case 0x4400:
1681 /* WMB */
1682 /* No-op */
1683 break;
1684 case 0x8000:
1685 /* FETCH */
1686 /* No-op */
1687 break;
1688 case 0xA000:
1689 /* FETCH_M */
1690 /* No-op */
1691 break;
1692 case 0xC000:
1693 /* RPCC */
1694 if (ra != 31)
1695 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1696 break;
1697 case 0xE000:
1698 /* RC */
1699 if (ra != 31)
1700 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1701 break;
1702 case 0xE800:
1703 /* ECB */
1704 /* XXX: TODO: evict tb cache at address rb */
1705 #if 0
1706 ret = 2;
1707 #else
1708 goto invalid_opc;
1709 #endif
1710 break;
1711 case 0xF000:
1712 /* RS */
1713 if (ra != 31)
1714 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1715 break;
1716 case 0xF800:
1717 /* WH64 */
1718 /* No-op */
1719 break;
1720 default:
1721 goto invalid_opc;
1723 break;
1724 case 0x19:
1725 /* HW_MFPR (PALcode) */
1726 #if defined (CONFIG_USER_ONLY)
1727 goto invalid_opc;
1728 #else
1729 if (!ctx->pal_mode)
1730 goto invalid_opc;
1731 gen_op_mfpr(insn & 0xFF);
1732 if (ra != 31)
1733 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1734 break;
1735 #endif
1736 case 0x1A:
1737 if (ra != 31)
1738 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1739 if (rb != 31)
1740 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1741 else
1742 tcg_gen_movi_i64(cpu_pc, 0);
1743 /* Those four jumps only differ by the branch prediction hint */
1744 switch (fn2) {
1745 case 0x0:
1746 /* JMP */
1747 break;
1748 case 0x1:
1749 /* JSR */
1750 break;
1751 case 0x2:
1752 /* RET */
1753 break;
1754 case 0x3:
1755 /* JSR_COROUTINE */
1756 break;
1758 ret = 1;
1759 break;
1760 case 0x1B:
1761 /* HW_LD (PALcode) */
1762 #if defined (CONFIG_USER_ONLY)
1763 goto invalid_opc;
1764 #else
1765 if (!ctx->pal_mode)
1766 goto invalid_opc;
1767 if (rb != 31)
1768 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1769 else
1770 tcg_gen_movi_i64(cpu_T[0], 0);
1771 tcg_gen_movi_i64(cpu_T[1], disp12);
1772 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1773 switch ((insn >> 12) & 0xF) {
1774 case 0x0:
1775 /* Longword physical access */
1776 gen_op_ldl_raw();
1777 break;
1778 case 0x1:
1779 /* Quadword physical access */
1780 gen_op_ldq_raw();
1781 break;
1782 case 0x2:
1783 /* Longword physical access with lock */
1784 gen_op_ldl_l_raw();
1785 break;
1786 case 0x3:
1787 /* Quadword physical access with lock */
1788 gen_op_ldq_l_raw();
1789 break;
1790 case 0x4:
1791 /* Longword virtual PTE fetch */
1792 gen_op_ldl_kernel();
1793 break;
1794 case 0x5:
1795 /* Quadword virtual PTE fetch */
1796 gen_op_ldq_kernel();
1797 break;
1798 case 0x6:
1799 /* Invalid */
1800 goto invalid_opc;
1801 case 0x7:
1802 /* Invalid */
1803 goto invalid_opc;
1804 case 0x8:
1805 /* Longword virtual access */
1806 gen_op_ld_phys_to_virt();
1807 gen_op_ldl_raw();
1808 break;
1809 case 0x9:
1810 /* Quadword virtual access */
1811 gen_op_ld_phys_to_virt();
1812 gen_op_ldq_raw();
1813 break;
1814 case 0xA:
1815 /* Longword virtual access with protection check */
1816 gen_ldl(ctx);
1817 break;
1818 case 0xB:
1819 /* Quadword virtual access with protection check */
1820 gen_ldq(ctx);
1821 break;
1822 case 0xC:
1823 /* Longword virtual access with altenate access mode */
1824 gen_op_set_alt_mode();
1825 gen_op_ld_phys_to_virt();
1826 gen_op_ldl_raw();
1827 gen_op_restore_mode();
1828 break;
1829 case 0xD:
1830 /* Quadword virtual access with altenate access mode */
1831 gen_op_set_alt_mode();
1832 gen_op_ld_phys_to_virt();
1833 gen_op_ldq_raw();
1834 gen_op_restore_mode();
1835 break;
1836 case 0xE:
1837 /* Longword virtual access with alternate access mode and
1838 * protection checks
1840 gen_op_set_alt_mode();
1841 gen_op_ldl_data();
1842 gen_op_restore_mode();
1843 break;
1844 case 0xF:
1845 /* Quadword virtual access with alternate access mode and
1846 * protection checks
1848 gen_op_set_alt_mode();
1849 gen_op_ldq_data();
1850 gen_op_restore_mode();
1851 break;
1853 if (ra != 31)
1854 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1855 break;
1856 #endif
1857 case 0x1C:
1858 switch (fn7) {
1859 case 0x00:
1860 /* SEXTB */
1861 if (!(ctx->amask & AMASK_BWX))
1862 goto invalid_opc;
1863 if (likely(rc != 31)) {
1864 if (islit)
1865 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1866 else
1867 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1869 break;
1870 case 0x01:
1871 /* SEXTW */
1872 if (!(ctx->amask & AMASK_BWX))
1873 goto invalid_opc;
1874 if (likely(rc != 31)) {
1875 if (islit)
1876 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1877 else
1878 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1880 break;
1881 case 0x30:
1882 /* CTPOP */
1883 if (!(ctx->amask & AMASK_CIX))
1884 goto invalid_opc;
1885 if (likely(rc != 31)) {
1886 if (islit)
1887 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1888 else
1889 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1891 break;
1892 case 0x31:
1893 /* PERR */
1894 if (!(ctx->amask & AMASK_MVI))
1895 goto invalid_opc;
1896 /* XXX: TODO */
1897 goto invalid_opc;
1898 break;
1899 case 0x32:
1900 /* CTLZ */
1901 if (!(ctx->amask & AMASK_CIX))
1902 goto invalid_opc;
1903 if (likely(rc != 31)) {
1904 if (islit)
1905 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1906 else
1907 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1909 break;
1910 case 0x33:
1911 /* CTTZ */
1912 if (!(ctx->amask & AMASK_CIX))
1913 goto invalid_opc;
1914 if (likely(rc != 31)) {
1915 if (islit)
1916 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1917 else
1918 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1920 break;
1921 case 0x34:
1922 /* UNPKBW */
1923 if (!(ctx->amask & AMASK_MVI))
1924 goto invalid_opc;
1925 /* XXX: TODO */
1926 goto invalid_opc;
1927 break;
1928 case 0x35:
1929 /* UNPKWL */
1930 if (!(ctx->amask & AMASK_MVI))
1931 goto invalid_opc;
1932 /* XXX: TODO */
1933 goto invalid_opc;
1934 break;
1935 case 0x36:
1936 /* PKWB */
1937 if (!(ctx->amask & AMASK_MVI))
1938 goto invalid_opc;
1939 /* XXX: TODO */
1940 goto invalid_opc;
1941 break;
1942 case 0x37:
1943 /* PKLB */
1944 if (!(ctx->amask & AMASK_MVI))
1945 goto invalid_opc;
1946 /* XXX: TODO */
1947 goto invalid_opc;
1948 break;
1949 case 0x38:
1950 /* MINSB8 */
1951 if (!(ctx->amask & AMASK_MVI))
1952 goto invalid_opc;
1953 /* XXX: TODO */
1954 goto invalid_opc;
1955 break;
1956 case 0x39:
1957 /* MINSW4 */
1958 if (!(ctx->amask & AMASK_MVI))
1959 goto invalid_opc;
1960 /* XXX: TODO */
1961 goto invalid_opc;
1962 break;
1963 case 0x3A:
1964 /* MINUB8 */
1965 if (!(ctx->amask & AMASK_MVI))
1966 goto invalid_opc;
1967 /* XXX: TODO */
1968 goto invalid_opc;
1969 break;
1970 case 0x3B:
1971 /* MINUW4 */
1972 if (!(ctx->amask & AMASK_MVI))
1973 goto invalid_opc;
1974 /* XXX: TODO */
1975 goto invalid_opc;
1976 break;
1977 case 0x3C:
1978 /* MAXUB8 */
1979 if (!(ctx->amask & AMASK_MVI))
1980 goto invalid_opc;
1981 /* XXX: TODO */
1982 goto invalid_opc;
1983 break;
1984 case 0x3D:
1985 /* MAXUW4 */
1986 if (!(ctx->amask & AMASK_MVI))
1987 goto invalid_opc;
1988 /* XXX: TODO */
1989 goto invalid_opc;
1990 break;
1991 case 0x3E:
1992 /* MAXSB8 */
1993 if (!(ctx->amask & AMASK_MVI))
1994 goto invalid_opc;
1995 /* XXX: TODO */
1996 goto invalid_opc;
1997 break;
1998 case 0x3F:
1999 /* MAXSW4 */
2000 if (!(ctx->amask & AMASK_MVI))
2001 goto invalid_opc;
2002 /* XXX: TODO */
2003 goto invalid_opc;
2004 break;
2005 case 0x70:
2006 /* FTOIT */
2007 if (!(ctx->amask & AMASK_FIX))
2008 goto invalid_opc;
2009 gen_fti(ctx, &gen_op_ftoit, ra, rb);
2010 break;
2011 case 0x78:
2012 /* FTOIS */
2013 if (!(ctx->amask & AMASK_FIX))
2014 goto invalid_opc;
2015 gen_fti(ctx, &gen_op_ftois, ra, rb);
2016 break;
2017 default:
2018 goto invalid_opc;
2020 break;
2021 case 0x1D:
2022 /* HW_MTPR (PALcode) */
2023 #if defined (CONFIG_USER_ONLY)
2024 goto invalid_opc;
2025 #else
2026 if (!ctx->pal_mode)
2027 goto invalid_opc;
2028 if (ra != 31)
2029 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
2030 else
2031 tcg_gen_movi_i64(cpu_T[0], 0);
2032 gen_op_mtpr(insn & 0xFF);
2033 ret = 2;
2034 break;
2035 #endif
2036 case 0x1E:
2037 /* HW_REI (PALcode) */
2038 #if defined (CONFIG_USER_ONLY)
2039 goto invalid_opc;
2040 #else
2041 if (!ctx->pal_mode)
2042 goto invalid_opc;
2043 if (rb == 31) {
2044 /* "Old" alpha */
2045 gen_op_hw_rei();
2046 } else {
2047 if (ra != 31)
2048 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
2049 else
2050 tcg_gen_movi_i64(cpu_T[0], 0);
2051 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
2052 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2053 gen_op_hw_ret();
2055 ret = 2;
2056 break;
2057 #endif
2058 case 0x1F:
2059 /* HW_ST (PALcode) */
2060 #if defined (CONFIG_USER_ONLY)
2061 goto invalid_opc;
2062 #else
2063 if (!ctx->pal_mode)
2064 goto invalid_opc;
2065 if (ra != 31)
2066 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
2067 else
2068 tcg_gen_movi_i64(cpu_T[0], disp12);
2069 if (ra != 31)
2070 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
2071 else
2072 tcg_gen_movi_i64(cpu_T[1], 0);
2073 switch ((insn >> 12) & 0xF) {
2074 case 0x0:
2075 /* Longword physical access */
2076 gen_op_stl_raw();
2077 break;
2078 case 0x1:
2079 /* Quadword physical access */
2080 gen_op_stq_raw();
2081 break;
2082 case 0x2:
2083 /* Longword physical access with lock */
2084 gen_op_stl_c_raw();
2085 break;
2086 case 0x3:
2087 /* Quadword physical access with lock */
2088 gen_op_stq_c_raw();
2089 break;
2090 case 0x4:
2091 /* Longword virtual access */
2092 gen_op_st_phys_to_virt();
2093 gen_op_stl_raw();
2094 break;
2095 case 0x5:
2096 /* Quadword virtual access */
2097 gen_op_st_phys_to_virt();
2098 gen_op_stq_raw();
2099 break;
2100 case 0x6:
2101 /* Invalid */
2102 goto invalid_opc;
2103 case 0x7:
2104 /* Invalid */
2105 goto invalid_opc;
2106 case 0x8:
2107 /* Invalid */
2108 goto invalid_opc;
2109 case 0x9:
2110 /* Invalid */
2111 goto invalid_opc;
2112 case 0xA:
2113 /* Invalid */
2114 goto invalid_opc;
2115 case 0xB:
2116 /* Invalid */
2117 goto invalid_opc;
2118 case 0xC:
2119 /* Longword virtual access with alternate access mode */
2120 gen_op_set_alt_mode();
2121 gen_op_st_phys_to_virt();
2122 gen_op_ldl_raw();
2123 gen_op_restore_mode();
2124 break;
2125 case 0xD:
2126 /* Quadword virtual access with alternate access mode */
2127 gen_op_set_alt_mode();
2128 gen_op_st_phys_to_virt();
2129 gen_op_ldq_raw();
2130 gen_op_restore_mode();
2131 break;
2132 case 0xE:
2133 /* Invalid */
2134 goto invalid_opc;
2135 case 0xF:
2136 /* Invalid */
2137 goto invalid_opc;
2139 ret = 2;
2140 break;
2141 #endif
2142 case 0x20:
2143 /* LDF */
2144 #if 0 // TODO
2145 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
2146 #else
2147 goto invalid_opc;
2148 #endif
2149 break;
2150 case 0x21:
2151 /* LDG */
2152 #if 0 // TODO
2153 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
2154 #else
2155 goto invalid_opc;
2156 #endif
2157 break;
2158 case 0x22:
2159 /* LDS */
2160 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
2161 break;
2162 case 0x23:
2163 /* LDT */
2164 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
2165 break;
2166 case 0x24:
2167 /* STF */
2168 #if 0 // TODO
2169 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
2170 #else
2171 goto invalid_opc;
2172 #endif
2173 break;
2174 case 0x25:
2175 /* STG */
2176 #if 0 // TODO
2177 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
2178 #else
2179 goto invalid_opc;
2180 #endif
2181 break;
2182 case 0x26:
2183 /* STS */
2184 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
2185 break;
2186 case 0x27:
2187 /* STT */
2188 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
2189 break;
2190 case 0x28:
2191 /* LDL */
2192 if (likely(ra != 31)) {
2193 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
2194 if (rb != 31)
2195 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
2196 else
2197 tcg_gen_movi_i64(addr, disp16);
2198 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->mem_idx);
2199 tcg_temp_free(addr);
2201 break;
2202 case 0x29:
2203 /* LDQ */
2204 if (likely(ra != 31)) {
2205 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
2206 if (rb != 31)
2207 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
2208 else
2209 tcg_gen_movi_i64(addr, disp16);
2210 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->mem_idx);
2211 tcg_temp_free(addr);
2213 break;
2214 case 0x2A:
2215 /* LDL_L */
2216 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2217 break;
2218 case 0x2B:
2219 /* LDQ_L */
2220 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2221 break;
2222 case 0x2C:
2223 /* STL */
2225 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
2226 if (rb != 31)
2227 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
2228 else
2229 tcg_gen_movi_i64(addr, disp16);
2230 if (ra != 31)
2231 tcg_gen_qemu_st32(cpu_ir[ra], addr, ctx->mem_idx);
2232 else {
2233 TCGv zero = tcg_const_i64(0);
2234 tcg_gen_qemu_st32(zero, addr, ctx->mem_idx);
2235 tcg_temp_free(zero);
2237 tcg_temp_free(addr);
2239 break;
2240 case 0x2D:
2241 /* STQ */
2243 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
2244 if (rb != 31)
2245 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
2246 else
2247 tcg_gen_movi_i64(addr, disp16);
2248 if (ra != 31)
2249 tcg_gen_qemu_st64(cpu_ir[ra], addr, ctx->mem_idx);
2250 else {
2251 TCGv zero = tcg_const_i64(0);
2252 tcg_gen_qemu_st64(zero, addr, ctx->mem_idx);
2253 tcg_temp_free(zero);
2255 tcg_temp_free(addr);
2257 break;
2258 case 0x2E:
2259 /* STL_C */
2260 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
2261 break;
2262 case 0x2F:
2263 /* STQ_C */
2264 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
2265 break;
2266 case 0x30:
2267 /* BR */
2268 if (ra != 31)
2269 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2270 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2271 ret = 1;
2272 break;
2273 case 0x31:
2274 /* FBEQ */
2275 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
2276 ret = 1;
2277 break;
2278 case 0x32:
2279 /* FBLT */
2280 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
2281 ret = 1;
2282 break;
2283 case 0x33:
2284 /* FBLE */
2285 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
2286 ret = 1;
2287 break;
2288 case 0x34:
2289 /* BSR */
2290 if (ra != 31)
2291 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2292 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2293 ret = 1;
2294 break;
2295 case 0x35:
2296 /* FBNE */
2297 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
2298 ret = 1;
2299 break;
2300 case 0x36:
2301 /* FBGE */
2302 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
2303 ret = 1;
2304 break;
2305 case 0x37:
2306 /* FBGT */
2307 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
2308 ret = 1;
2309 break;
2310 case 0x38:
2311 /* BLBC */
2312 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2313 ret = 1;
2314 break;
2315 case 0x39:
2316 /* BEQ */
2317 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2318 ret = 1;
2319 break;
2320 case 0x3A:
2321 /* BLT */
2322 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2323 ret = 1;
2324 break;
2325 case 0x3B:
2326 /* BLE */
2327 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2328 ret = 1;
2329 break;
2330 case 0x3C:
2331 /* BLBS */
2332 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2333 ret = 1;
2334 break;
2335 case 0x3D:
2336 /* BNE */
2337 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2338 ret = 1;
2339 break;
2340 case 0x3E:
2341 /* BGE */
2342 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2343 ret = 1;
2344 break;
2345 case 0x3F:
2346 /* BGT */
2347 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2348 ret = 1;
2349 break;
2350 invalid_opc:
2351 gen_invalid(ctx);
2352 ret = 3;
2353 break;
2356 return ret;
2359 static always_inline void gen_intermediate_code_internal (CPUState *env,
2360 TranslationBlock *tb,
2361 int search_pc)
2363 #if defined ALPHA_DEBUG_DISAS
2364 static int insn_count;
2365 #endif
2366 DisasContext ctx, *ctxp = &ctx;
2367 target_ulong pc_start;
2368 uint32_t insn;
2369 uint16_t *gen_opc_end;
2370 int j, lj = -1;
2371 int ret;
2372 int num_insns;
2373 int max_insns;
2375 pc_start = tb->pc;
2376 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2377 ctx.pc = pc_start;
2378 ctx.amask = env->amask;
2379 #if defined (CONFIG_USER_ONLY)
2380 ctx.mem_idx = 0;
2381 #else
2382 ctx.mem_idx = ((env->ps >> 3) & 3);
2383 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2384 #endif
2385 num_insns = 0;
2386 max_insns = tb->cflags & CF_COUNT_MASK;
2387 if (max_insns == 0)
2388 max_insns = CF_COUNT_MASK;
2390 gen_icount_start();
2391 for (ret = 0; ret == 0;) {
2392 if (env->nb_breakpoints > 0) {
2393 for(j = 0; j < env->nb_breakpoints; j++) {
2394 if (env->breakpoints[j] == ctx.pc) {
2395 gen_excp(&ctx, EXCP_DEBUG, 0);
2396 break;
2400 if (search_pc) {
2401 j = gen_opc_ptr - gen_opc_buf;
2402 if (lj < j) {
2403 lj++;
2404 while (lj < j)
2405 gen_opc_instr_start[lj++] = 0;
2406 gen_opc_pc[lj] = ctx.pc;
2407 gen_opc_instr_start[lj] = 1;
2408 gen_opc_icount[lj] = num_insns;
2411 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2412 gen_io_start();
2413 #if defined ALPHA_DEBUG_DISAS
2414 insn_count++;
2415 if (logfile != NULL) {
2416 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2417 ctx.pc, ctx.mem_idx);
2419 #endif
2420 insn = ldl_code(ctx.pc);
2421 #if defined ALPHA_DEBUG_DISAS
2422 insn_count++;
2423 if (logfile != NULL) {
2424 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2426 #endif
2427 num_insns++;
2428 ctx.pc += 4;
2429 ret = translate_one(ctxp, insn);
2430 if (ret != 0)
2431 break;
2432 /* if we reach a page boundary or are single stepping, stop
2433 * generation
2435 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2436 (env->singlestep_enabled) ||
2437 num_insns >= max_insns) {
2438 break;
2440 #if defined (DO_SINGLE_STEP)
2441 break;
2442 #endif
2444 if (ret != 1 && ret != 3) {
2445 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2447 #if defined (DO_TB_FLUSH)
2448 tcg_gen_helper_0_0(helper_tb_flush);
2449 #endif
2450 if (tb->cflags & CF_LAST_IO)
2451 gen_io_end();
2452 /* Generate the return instruction */
2453 tcg_gen_exit_tb(0);
2454 gen_icount_end(tb, num_insns);
2455 *gen_opc_ptr = INDEX_op_end;
2456 if (search_pc) {
2457 j = gen_opc_ptr - gen_opc_buf;
2458 lj++;
2459 while (lj <= j)
2460 gen_opc_instr_start[lj++] = 0;
2461 } else {
2462 tb->size = ctx.pc - pc_start;
2463 tb->icount = num_insns;
2465 #if defined ALPHA_DEBUG_DISAS
2466 if (loglevel & CPU_LOG_TB_CPU) {
2467 cpu_dump_state(env, logfile, fprintf, 0);
2469 if (loglevel & CPU_LOG_TB_IN_ASM) {
2470 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2471 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2472 fprintf(logfile, "\n");
2474 #endif
2477 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2479 gen_intermediate_code_internal(env, tb, 0);
2482 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2484 gen_intermediate_code_internal(env, tb, 1);
2487 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2489 CPUAlphaState *env;
2490 uint64_t hwpcb;
2492 env = qemu_mallocz(sizeof(CPUAlphaState));
2493 if (!env)
2494 return NULL;
2495 cpu_exec_init(env);
2496 alpha_translate_init();
2497 tlb_flush(env, 1);
2498 /* XXX: should not be hardcoded */
2499 env->implver = IMPLVER_2106x;
2500 env->ps = 0x1F00;
2501 #if defined (CONFIG_USER_ONLY)
2502 env->ps |= 1 << 3;
2503 #endif
2504 pal_init(env);
2505 /* Initialize IPR */
2506 hwpcb = env->ipr[IPR_PCBB];
2507 env->ipr[IPR_ASN] = 0;
2508 env->ipr[IPR_ASTEN] = 0;
2509 env->ipr[IPR_ASTSR] = 0;
2510 env->ipr[IPR_DATFX] = 0;
2511 /* XXX: fix this */
2512 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2513 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2514 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2515 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2516 env->ipr[IPR_FEN] = 0;
2517 env->ipr[IPR_IPL] = 31;
2518 env->ipr[IPR_MCES] = 0;
2519 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2520 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2521 env->ipr[IPR_SISR] = 0;
2522 env->ipr[IPR_VIRBND] = -1ULL;
2524 return env;
2527 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2528 unsigned long searched_pc, int pc_pos, void *puc)
2530 env->pc = gen_opc_pc[pc_pos];