Fix interrupt exclusion via SSTEP_NOIRQ
[qemu/mini2440.git] / target-alpha / translate.c
blob5c88a12a999776b520b08311b98e11ac289b4cdb
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
35 #define DO_TB_FLUSH
37 typedef struct DisasContext DisasContext;
38 struct DisasContext {
39 uint64_t pc;
40 int mem_idx;
41 #if !defined (CONFIG_USER_ONLY)
42 int pal_mode;
43 #endif
44 uint32_t amask;
47 /* global register indexes */
48 static TCGv cpu_env;
49 static TCGv cpu_ir[31];
50 static TCGv cpu_fir[31];
51 static TCGv cpu_pc;
52 static TCGv cpu_lock;
54 /* register names */
55 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
57 #include "gen-icount.h"
59 static void alpha_translate_init(void)
61 int i;
62 char *p;
63 static int done_init = 0;
65 if (done_init)
66 return;
68 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
70 p = cpu_reg_names;
71 for (i = 0; i < 31; i++) {
72 sprintf(p, "ir%d", i);
73 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, ir[i]), p);
75 p += (i < 10) ? 4 : 5;
77 sprintf(p, "fir%d", i);
78 cpu_fir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
79 offsetof(CPUState, fir[i]), p);
80 p += (i < 10) ? 5 : 6;
83 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
84 offsetof(CPUState, pc), "pc");
86 cpu_lock = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
87 offsetof(CPUState, lock), "lock");
89 /* register helpers */
90 #undef DEF_HELPER
91 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
92 #include "helper.h"
94 done_init = 1;
97 static always_inline void gen_excp (DisasContext *ctx,
98 int exception, int error_code)
100 TCGv tmp1, tmp2;
102 tcg_gen_movi_i64(cpu_pc, ctx->pc);
103 tmp1 = tcg_const_i32(exception);
104 tmp2 = tcg_const_i32(error_code);
105 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
106 tcg_temp_free(tmp2);
107 tcg_temp_free(tmp1);
110 static always_inline void gen_invalid (DisasContext *ctx)
112 gen_excp(ctx, EXCP_OPCDEC, 0);
115 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
117 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
118 tcg_gen_qemu_ld32u(tmp, t1, flags);
119 tcg_gen_helper_1_1(helper_memory_to_f, t0, tmp);
120 tcg_temp_free(tmp);
123 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
125 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
126 tcg_gen_qemu_ld64(tmp, t1, flags);
127 tcg_gen_helper_1_1(helper_memory_to_g, t0, tmp);
128 tcg_temp_free(tmp);
131 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
133 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
134 tcg_gen_qemu_ld32u(tmp, t1, flags);
135 tcg_gen_helper_1_1(helper_memory_to_s, t0, tmp);
136 tcg_temp_free(tmp);
139 static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
141 tcg_gen_mov_i64(cpu_lock, t1);
142 tcg_gen_qemu_ld32s(t0, t1, flags);
145 static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
147 tcg_gen_mov_i64(cpu_lock, t1);
148 tcg_gen_qemu_ld64(t0, t1, flags);
151 static always_inline void gen_load_mem (DisasContext *ctx,
152 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
153 int ra, int rb, int32_t disp16,
154 int fp, int clear)
156 TCGv addr;
158 if (unlikely(ra == 31))
159 return;
161 addr = tcg_temp_new(TCG_TYPE_I64);
162 if (rb != 31) {
163 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
164 if (clear)
165 tcg_gen_andi_i64(addr, addr, ~0x7);
166 } else {
167 if (clear)
168 disp16 &= ~0x7;
169 tcg_gen_movi_i64(addr, disp16);
171 if (fp)
172 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
173 else
174 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
175 tcg_temp_free(addr);
178 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
180 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
181 tcg_gen_helper_1_1(helper_f_to_memory, tmp, t0);
182 tcg_gen_qemu_st32(tmp, t1, flags);
183 tcg_temp_free(tmp);
186 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
188 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
189 tcg_gen_helper_1_1(helper_g_to_memory, tmp, t0);
190 tcg_gen_qemu_st64(tmp, t1, flags);
191 tcg_temp_free(tmp);
194 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
196 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
197 tcg_gen_helper_1_1(helper_s_to_memory, tmp, t0);
198 tcg_gen_qemu_st32(tmp, t1, flags);
199 tcg_temp_free(tmp);
202 static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
204 int l1, l2;
206 l1 = gen_new_label();
207 l2 = gen_new_label();
208 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
209 tcg_gen_qemu_st32(t0, t1, flags);
210 tcg_gen_movi_i64(t0, 0);
211 tcg_gen_br(l2);
212 gen_set_label(l1);
213 tcg_gen_movi_i64(t0, 1);
214 gen_set_label(l2);
215 tcg_gen_movi_i64(cpu_lock, -1);
218 static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
220 int l1, l2;
222 l1 = gen_new_label();
223 l2 = gen_new_label();
224 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
225 tcg_gen_qemu_st64(t0, t1, flags);
226 tcg_gen_movi_i64(t0, 0);
227 tcg_gen_br(l2);
228 gen_set_label(l1);
229 tcg_gen_movi_i64(t0, 1);
230 gen_set_label(l2);
231 tcg_gen_movi_i64(cpu_lock, -1);
234 static always_inline void gen_store_mem (DisasContext *ctx,
235 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
236 int ra, int rb, int32_t disp16,
237 int fp, int clear)
239 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
240 if (rb != 31) {
241 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
242 if (clear)
243 tcg_gen_andi_i64(addr, addr, ~0x7);
244 } else {
245 if (clear)
246 disp16 &= ~0x7;
247 tcg_gen_movi_i64(addr, disp16);
249 if (ra != 31) {
250 if (fp)
251 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
252 else
253 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
254 } else {
255 TCGv zero = tcg_const_i64(0);
256 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
257 tcg_temp_free(zero);
259 tcg_temp_free(addr);
262 static always_inline void gen_bcond (DisasContext *ctx,
263 TCGCond cond,
264 int ra, int32_t disp16, int mask)
266 int l1, l2;
268 l1 = gen_new_label();
269 l2 = gen_new_label();
270 if (likely(ra != 31)) {
271 if (mask) {
272 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
273 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
274 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
275 tcg_temp_free(tmp);
276 } else
277 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
278 } else {
279 /* Very uncommon case - Do not bother to optimize. */
280 TCGv tmp = tcg_const_i64(0);
281 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
282 tcg_temp_free(tmp);
284 tcg_gen_movi_i64(cpu_pc, ctx->pc);
285 tcg_gen_br(l2);
286 gen_set_label(l1);
287 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
288 gen_set_label(l2);
291 static always_inline void gen_fbcond (DisasContext *ctx,
292 void* func,
293 int ra, int32_t disp16)
295 int l1, l2;
296 TCGv tmp;
298 l1 = gen_new_label();
299 l2 = gen_new_label();
300 if (ra != 31) {
301 tmp = tcg_temp_new(TCG_TYPE_I64);
302 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
303 } else {
304 tmp = tcg_const_i64(0);
305 tcg_gen_helper_1_1(func, tmp, tmp);
307 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
308 tcg_gen_movi_i64(cpu_pc, ctx->pc);
309 tcg_gen_br(l2);
310 gen_set_label(l1);
311 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
312 gen_set_label(l2);
315 static always_inline void gen_cmov (TCGCond inv_cond,
316 int ra, int rb, int rc,
317 int islit, uint8_t lit, int mask)
319 int l1;
321 if (unlikely(rc == 31))
322 return;
324 l1 = gen_new_label();
326 if (ra != 31) {
327 if (mask) {
328 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
329 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
330 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
331 tcg_temp_free(tmp);
332 } else
333 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
334 } else {
335 /* Very uncommon case - Do not bother to optimize. */
336 TCGv tmp = tcg_const_i64(0);
337 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
338 tcg_temp_free(tmp);
341 if (islit)
342 tcg_gen_movi_i64(cpu_ir[rc], lit);
343 else
344 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
345 gen_set_label(l1);
348 static always_inline void gen_farith2 (void *helper,
349 int rb, int rc)
351 if (unlikely(rc == 31))
352 return;
354 if (rb != 31)
355 tcg_gen_helper_1_1(helper, cpu_fir[rc], cpu_fir[rb]);
356 else {
357 TCGv tmp = tcg_const_i64(0);
358 tcg_gen_helper_1_1(helper, cpu_fir[rc], tmp);
359 tcg_temp_free(tmp);
363 static always_inline void gen_farith3 (void *helper,
364 int ra, int rb, int rc)
366 if (unlikely(rc == 31))
367 return;
369 if (ra != 31) {
370 if (rb != 31)
371 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);
372 else {
373 TCGv tmp = tcg_const_i64(0);
374 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], tmp);
375 tcg_temp_free(tmp);
377 } else {
378 TCGv tmp = tcg_const_i64(0);
379 if (rb != 31)
380 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, cpu_fir[rb]);
381 else
382 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, tmp);
383 tcg_temp_free(tmp);
387 static always_inline void gen_fcmov (void *func,
388 int ra, int rb, int rc)
390 int l1;
391 TCGv tmp;
393 if (unlikely(rc == 31))
394 return;
396 l1 = gen_new_label();
397 tmp = tcg_temp_new(TCG_TYPE_I64);
398 if (ra != 31) {
399 tmp = tcg_temp_new(TCG_TYPE_I64);
400 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
401 } else {
402 tmp = tcg_const_i64(0);
403 tcg_gen_helper_1_1(func, tmp, tmp);
405 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
406 if (rb != 31)
407 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
408 else
409 tcg_gen_movi_i64(cpu_fir[rc], 0);
410 gen_set_label(l1);
413 /* EXTWH, EXTWH, EXTLH, EXTQH */
414 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
415 int ra, int rb, int rc,
416 int islit, uint8_t lit)
418 if (unlikely(rc == 31))
419 return;
421 if (ra != 31) {
422 if (islit) {
423 if (lit != 0)
424 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
425 else
426 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
427 } else {
428 TCGv tmp1, tmp2;
429 tmp1 = tcg_temp_new(TCG_TYPE_I64);
430 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
431 tcg_gen_shli_i64(tmp1, tmp1, 3);
432 tmp2 = tcg_const_i64(64);
433 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
434 tcg_temp_free(tmp2);
435 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
436 tcg_temp_free(tmp1);
438 if (tcg_gen_ext_i64)
439 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
440 } else
441 tcg_gen_movi_i64(cpu_ir[rc], 0);
444 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
445 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
446 int ra, int rb, int rc,
447 int islit, uint8_t lit)
449 if (unlikely(rc == 31))
450 return;
452 if (ra != 31) {
453 if (islit) {
454 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
455 } else {
456 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
457 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
458 tcg_gen_shli_i64(tmp, tmp, 3);
459 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
460 tcg_temp_free(tmp);
462 if (tcg_gen_ext_i64)
463 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
464 } else
465 tcg_gen_movi_i64(cpu_ir[rc], 0);
468 /* Code to call arith3 helpers */
469 static always_inline void gen_arith3 (void *helper,
470 int ra, int rb, int rc,
471 int islit, uint8_t lit)
473 if (unlikely(rc == 31))
474 return;
476 if (ra != 31) {
477 if (islit) {
478 TCGv tmp = tcg_const_i64(lit);
479 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
480 tcg_temp_free(tmp);
481 } else
482 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
483 } else {
484 TCGv tmp1 = tcg_const_i64(0);
485 if (islit) {
486 TCGv tmp2 = tcg_const_i64(lit);
487 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
488 tcg_temp_free(tmp2);
489 } else
490 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
491 tcg_temp_free(tmp1);
495 static always_inline void gen_cmp(TCGCond cond,
496 int ra, int rb, int rc,
497 int islit, uint8_t lit)
499 int l1, l2;
500 TCGv tmp;
502 if (unlikely(rc == 31))
503 return;
505 l1 = gen_new_label();
506 l2 = gen_new_label();
508 if (ra != 31) {
509 tmp = tcg_temp_new(TCG_TYPE_I64);
510 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
511 } else
512 tmp = tcg_const_i64(0);
513 if (islit)
514 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
515 else
516 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
518 tcg_gen_movi_i64(cpu_ir[rc], 0);
519 tcg_gen_br(l2);
520 gen_set_label(l1);
521 tcg_gen_movi_i64(cpu_ir[rc], 1);
522 gen_set_label(l2);
525 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
527 uint32_t palcode;
528 int32_t disp21, disp16, disp12;
529 uint16_t fn11, fn16;
530 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
531 uint8_t lit;
532 int ret;
534 /* Decode all instruction fields */
535 opc = insn >> 26;
536 ra = (insn >> 21) & 0x1F;
537 rb = (insn >> 16) & 0x1F;
538 rc = insn & 0x1F;
539 sbz = (insn >> 13) & 0x07;
540 islit = (insn >> 12) & 1;
541 if (rb == 31 && !islit) {
542 islit = 1;
543 lit = 0;
544 } else
545 lit = (insn >> 13) & 0xFF;
546 palcode = insn & 0x03FFFFFF;
547 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
548 disp16 = (int16_t)(insn & 0x0000FFFF);
549 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
550 fn16 = insn & 0x0000FFFF;
551 fn11 = (insn >> 5) & 0x000007FF;
552 fpfn = fn11 & 0x3F;
553 fn7 = (insn >> 5) & 0x0000007F;
554 fn2 = (insn >> 5) & 0x00000003;
555 ret = 0;
556 #if defined ALPHA_DEBUG_DISAS
557 if (logfile != NULL) {
558 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
559 opc, ra, rb, rc, disp16);
561 #endif
562 switch (opc) {
563 case 0x00:
564 /* CALL_PAL */
565 if (palcode >= 0x80 && palcode < 0xC0) {
566 /* Unprivileged PAL call */
567 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
568 #if !defined (CONFIG_USER_ONLY)
569 } else if (palcode < 0x40) {
570 /* Privileged PAL code */
571 if (ctx->mem_idx & 1)
572 goto invalid_opc;
573 else
574 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
575 #endif
576 } else {
577 /* Invalid PAL call */
578 goto invalid_opc;
580 ret = 3;
581 break;
582 case 0x01:
583 /* OPC01 */
584 goto invalid_opc;
585 case 0x02:
586 /* OPC02 */
587 goto invalid_opc;
588 case 0x03:
589 /* OPC03 */
590 goto invalid_opc;
591 case 0x04:
592 /* OPC04 */
593 goto invalid_opc;
594 case 0x05:
595 /* OPC05 */
596 goto invalid_opc;
597 case 0x06:
598 /* OPC06 */
599 goto invalid_opc;
600 case 0x07:
601 /* OPC07 */
602 goto invalid_opc;
603 case 0x08:
604 /* LDA */
605 if (likely(ra != 31)) {
606 if (rb != 31)
607 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
608 else
609 tcg_gen_movi_i64(cpu_ir[ra], disp16);
611 break;
612 case 0x09:
613 /* LDAH */
614 if (likely(ra != 31)) {
615 if (rb != 31)
616 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
617 else
618 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
620 break;
621 case 0x0A:
622 /* LDBU */
623 if (!(ctx->amask & AMASK_BWX))
624 goto invalid_opc;
625 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
626 break;
627 case 0x0B:
628 /* LDQ_U */
629 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
630 break;
631 case 0x0C:
632 /* LDWU */
633 if (!(ctx->amask & AMASK_BWX))
634 goto invalid_opc;
635 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
636 break;
637 case 0x0D:
638 /* STW */
639 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
640 break;
641 case 0x0E:
642 /* STB */
643 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
644 break;
645 case 0x0F:
646 /* STQ_U */
647 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
648 break;
649 case 0x10:
650 switch (fn7) {
651 case 0x00:
652 /* ADDL */
653 if (likely(rc != 31)) {
654 if (ra != 31) {
655 if (islit) {
656 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
657 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
658 } else {
659 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
660 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
662 } else {
663 if (islit)
664 tcg_gen_movi_i64(cpu_ir[rc], lit);
665 else
666 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
669 break;
670 case 0x02:
671 /* S4ADDL */
672 if (likely(rc != 31)) {
673 if (ra != 31) {
674 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
675 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
676 if (islit)
677 tcg_gen_addi_i64(tmp, tmp, lit);
678 else
679 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
680 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
681 tcg_temp_free(tmp);
682 } else {
683 if (islit)
684 tcg_gen_movi_i64(cpu_ir[rc], lit);
685 else
686 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
689 break;
690 case 0x09:
691 /* SUBL */
692 if (likely(rc != 31)) {
693 if (ra != 31) {
694 if (islit)
695 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
696 else
697 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
698 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
699 } else {
700 if (islit)
701 tcg_gen_movi_i64(cpu_ir[rc], -lit);
702 else {
703 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
704 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
707 break;
708 case 0x0B:
709 /* S4SUBL */
710 if (likely(rc != 31)) {
711 if (ra != 31) {
712 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
713 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
714 if (islit)
715 tcg_gen_subi_i64(tmp, tmp, lit);
716 else
717 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
718 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
719 tcg_temp_free(tmp);
720 } else {
721 if (islit)
722 tcg_gen_movi_i64(cpu_ir[rc], -lit);
723 else {
724 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
725 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
729 break;
730 case 0x0F:
731 /* CMPBGE */
732 gen_arith3(helper_cmpbge, ra, rb, rc, islit, lit);
733 break;
734 case 0x12:
735 /* S8ADDL */
736 if (likely(rc != 31)) {
737 if (ra != 31) {
738 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
739 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
740 if (islit)
741 tcg_gen_addi_i64(tmp, tmp, lit);
742 else
743 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
744 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
745 tcg_temp_free(tmp);
746 } else {
747 if (islit)
748 tcg_gen_movi_i64(cpu_ir[rc], lit);
749 else
750 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
753 break;
754 case 0x1B:
755 /* S8SUBL */
756 if (likely(rc != 31)) {
757 if (ra != 31) {
758 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
759 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
760 if (islit)
761 tcg_gen_subi_i64(tmp, tmp, lit);
762 else
763 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
764 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
765 tcg_temp_free(tmp);
766 } else {
767 if (islit)
768 tcg_gen_movi_i64(cpu_ir[rc], -lit);
769 else
770 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
771 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
775 break;
776 case 0x1D:
777 /* CMPULT */
778 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
779 break;
780 case 0x20:
781 /* ADDQ */
782 if (likely(rc != 31)) {
783 if (ra != 31) {
784 if (islit)
785 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
786 else
787 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
788 } else {
789 if (islit)
790 tcg_gen_movi_i64(cpu_ir[rc], lit);
791 else
792 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
795 break;
796 case 0x22:
797 /* S4ADDQ */
798 if (likely(rc != 31)) {
799 if (ra != 31) {
800 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
801 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
802 if (islit)
803 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
804 else
805 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
806 tcg_temp_free(tmp);
807 } else {
808 if (islit)
809 tcg_gen_movi_i64(cpu_ir[rc], lit);
810 else
811 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
814 break;
815 case 0x29:
816 /* SUBQ */
817 if (likely(rc != 31)) {
818 if (ra != 31) {
819 if (islit)
820 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
821 else
822 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
823 } else {
824 if (islit)
825 tcg_gen_movi_i64(cpu_ir[rc], -lit);
826 else
827 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
830 break;
831 case 0x2B:
832 /* S4SUBQ */
833 if (likely(rc != 31)) {
834 if (ra != 31) {
835 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
836 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
837 if (islit)
838 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
839 else
840 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
841 tcg_temp_free(tmp);
842 } else {
843 if (islit)
844 tcg_gen_movi_i64(cpu_ir[rc], -lit);
845 else
846 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
849 break;
850 case 0x2D:
851 /* CMPEQ */
852 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
853 break;
854 case 0x32:
855 /* S8ADDQ */
856 if (likely(rc != 31)) {
857 if (ra != 31) {
858 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
859 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
860 if (islit)
861 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
862 else
863 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
864 tcg_temp_free(tmp);
865 } else {
866 if (islit)
867 tcg_gen_movi_i64(cpu_ir[rc], lit);
868 else
869 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
872 break;
873 case 0x3B:
874 /* S8SUBQ */
875 if (likely(rc != 31)) {
876 if (ra != 31) {
877 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
878 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
879 if (islit)
880 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
881 else
882 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
883 tcg_temp_free(tmp);
884 } else {
885 if (islit)
886 tcg_gen_movi_i64(cpu_ir[rc], -lit);
887 else
888 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
891 break;
892 case 0x3D:
893 /* CMPULE */
894 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
895 break;
896 case 0x40:
897 /* ADDL/V */
898 gen_arith3(helper_addlv, ra, rb, rc, islit, lit);
899 break;
900 case 0x49:
901 /* SUBL/V */
902 gen_arith3(helper_sublv, ra, rb, rc, islit, lit);
903 break;
904 case 0x4D:
905 /* CMPLT */
906 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
907 break;
908 case 0x60:
909 /* ADDQ/V */
910 gen_arith3(helper_addqv, ra, rb, rc, islit, lit);
911 break;
912 case 0x69:
913 /* SUBQ/V */
914 gen_arith3(helper_subqv, ra, rb, rc, islit, lit);
915 break;
916 case 0x6D:
917 /* CMPLE */
918 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
919 break;
920 default:
921 goto invalid_opc;
923 break;
924 case 0x11:
925 switch (fn7) {
926 case 0x00:
927 /* AND */
928 if (likely(rc != 31)) {
929 if (ra == 31)
930 tcg_gen_movi_i64(cpu_ir[rc], 0);
931 else if (islit)
932 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
933 else
934 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
936 break;
937 case 0x08:
938 /* BIC */
939 if (likely(rc != 31)) {
940 if (ra != 31) {
941 if (islit)
942 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
943 else
944 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
945 } else
946 tcg_gen_movi_i64(cpu_ir[rc], 0);
948 break;
949 case 0x14:
950 /* CMOVLBS */
951 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
952 break;
953 case 0x16:
954 /* CMOVLBC */
955 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
956 break;
957 case 0x20:
958 /* BIS */
959 if (likely(rc != 31)) {
960 if (ra != 31) {
961 if (islit)
962 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
963 else
964 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
965 } else {
966 if (islit)
967 tcg_gen_movi_i64(cpu_ir[rc], lit);
968 else
969 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
972 break;
973 case 0x24:
974 /* CMOVEQ */
975 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
976 break;
977 case 0x26:
978 /* CMOVNE */
979 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
980 break;
981 case 0x28:
982 /* ORNOT */
983 if (likely(rc != 31)) {
984 if (ra != 31) {
985 if (islit)
986 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
987 else
988 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
989 } else {
990 if (islit)
991 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
992 else
993 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
996 break;
997 case 0x40:
998 /* XOR */
999 if (likely(rc != 31)) {
1000 if (ra != 31) {
1001 if (islit)
1002 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1003 else
1004 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1005 } else {
1006 if (islit)
1007 tcg_gen_movi_i64(cpu_ir[rc], lit);
1008 else
1009 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1012 break;
1013 case 0x44:
1014 /* CMOVLT */
1015 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1016 break;
1017 case 0x46:
1018 /* CMOVGE */
1019 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1020 break;
1021 case 0x48:
1022 /* EQV */
1023 if (likely(rc != 31)) {
1024 if (ra != 31) {
1025 if (islit)
1026 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1027 else
1028 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1029 } else {
1030 if (islit)
1031 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1032 else
1033 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1036 break;
1037 case 0x61:
1038 /* AMASK */
1039 if (likely(rc != 31)) {
1040 if (islit)
1041 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1042 else
1043 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1045 break;
1046 case 0x64:
1047 /* CMOVLE */
1048 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1049 break;
1050 case 0x66:
1051 /* CMOVGT */
1052 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1053 break;
1054 case 0x6C:
1055 /* IMPLVER */
1056 if (rc != 31)
1057 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1058 break;
1059 default:
1060 goto invalid_opc;
1062 break;
1063 case 0x12:
1064 switch (fn7) {
1065 case 0x02:
1066 /* MSKBL */
1067 gen_arith3(helper_mskbl, ra, rb, rc, islit, lit);
1068 break;
1069 case 0x06:
1070 /* EXTBL */
1071 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1072 break;
1073 case 0x0B:
1074 /* INSBL */
1075 gen_arith3(helper_insbl, ra, rb, rc, islit, lit);
1076 break;
1077 case 0x12:
1078 /* MSKWL */
1079 gen_arith3(helper_mskwl, ra, rb, rc, islit, lit);
1080 break;
1081 case 0x16:
1082 /* EXTWL */
1083 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1084 break;
1085 case 0x1B:
1086 /* INSWL */
1087 gen_arith3(helper_inswl, ra, rb, rc, islit, lit);
1088 break;
1089 case 0x22:
1090 /* MSKLL */
1091 gen_arith3(helper_mskll, ra, rb, rc, islit, lit);
1092 break;
1093 case 0x26:
1094 /* EXTLL */
1095 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1096 break;
1097 case 0x2B:
1098 /* INSLL */
1099 gen_arith3(helper_insll, ra, rb, rc, islit, lit);
1100 break;
1101 case 0x30:
1102 /* ZAP */
1103 gen_arith3(helper_zap, ra, rb, rc, islit, lit);
1104 break;
1105 case 0x31:
1106 /* ZAPNOT */
1107 gen_arith3(helper_zapnot, ra, rb, rc, islit, lit);
1108 break;
1109 case 0x32:
1110 /* MSKQL */
1111 gen_arith3(helper_mskql, ra, rb, rc, islit, lit);
1112 break;
1113 case 0x34:
1114 /* SRL */
1115 if (likely(rc != 31)) {
1116 if (ra != 31) {
1117 if (islit)
1118 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1119 else {
1120 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1121 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1122 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1123 tcg_temp_free(shift);
1125 } else
1126 tcg_gen_movi_i64(cpu_ir[rc], 0);
1128 break;
1129 case 0x36:
1130 /* EXTQL */
1131 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1132 break;
1133 case 0x39:
1134 /* SLL */
1135 if (likely(rc != 31)) {
1136 if (ra != 31) {
1137 if (islit)
1138 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1139 else {
1140 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1141 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1142 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1143 tcg_temp_free(shift);
1145 } else
1146 tcg_gen_movi_i64(cpu_ir[rc], 0);
1148 break;
1149 case 0x3B:
1150 /* INSQL */
1151 gen_arith3(helper_insql, ra, rb, rc, islit, lit);
1152 break;
1153 case 0x3C:
1154 /* SRA */
1155 if (likely(rc != 31)) {
1156 if (ra != 31) {
1157 if (islit)
1158 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1159 else {
1160 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1161 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1162 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1163 tcg_temp_free(shift);
1165 } else
1166 tcg_gen_movi_i64(cpu_ir[rc], 0);
1168 break;
1169 case 0x52:
1170 /* MSKWH */
1171 gen_arith3(helper_mskwh, ra, rb, rc, islit, lit);
1172 break;
1173 case 0x57:
1174 /* INSWH */
1175 gen_arith3(helper_inswh, ra, rb, rc, islit, lit);
1176 break;
1177 case 0x5A:
1178 /* EXTWH */
1179 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1180 break;
1181 case 0x62:
1182 /* MSKLH */
1183 gen_arith3(helper_msklh, ra, rb, rc, islit, lit);
1184 break;
1185 case 0x67:
1186 /* INSLH */
1187 gen_arith3(helper_inslh, ra, rb, rc, islit, lit);
1188 break;
1189 case 0x6A:
1190 /* EXTLH */
1191 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1192 break;
1193 case 0x72:
1194 /* MSKQH */
1195 gen_arith3(helper_mskqh, ra, rb, rc, islit, lit);
1196 break;
1197 case 0x77:
1198 /* INSQH */
1199 gen_arith3(helper_insqh, ra, rb, rc, islit, lit);
1200 break;
1201 case 0x7A:
1202 /* EXTQH */
1203 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1204 break;
1205 default:
1206 goto invalid_opc;
1208 break;
1209 case 0x13:
1210 switch (fn7) {
1211 case 0x00:
1212 /* MULL */
1213 if (likely(rc != 31)) {
1214 if (ra == 31)
1215 tcg_gen_movi_i64(cpu_ir[rc], 0);
1216 else {
1217 if (islit)
1218 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1219 else
1220 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1221 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1224 break;
1225 case 0x20:
1226 /* MULQ */
1227 if (likely(rc != 31)) {
1228 if (ra == 31)
1229 tcg_gen_movi_i64(cpu_ir[rc], 0);
1230 else if (islit)
1231 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1232 else
1233 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1235 break;
1236 case 0x30:
1237 /* UMULH */
1238 gen_arith3(helper_umulh, ra, rb, rc, islit, lit);
1239 break;
1240 case 0x40:
1241 /* MULL/V */
1242 gen_arith3(helper_mullv, ra, rb, rc, islit, lit);
1243 break;
1244 case 0x60:
1245 /* MULQ/V */
1246 gen_arith3(helper_mulqv, ra, rb, rc, islit, lit);
1247 break;
1248 default:
1249 goto invalid_opc;
1251 break;
1252 case 0x14:
1253 switch (fpfn) { /* f11 & 0x3F */
1254 case 0x04:
1255 /* ITOFS */
1256 if (!(ctx->amask & AMASK_FIX))
1257 goto invalid_opc;
1258 if (likely(rc != 31)) {
1259 if (ra != 31) {
1260 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1261 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1262 tcg_gen_helper_1_1(helper_memory_to_s, cpu_fir[rc], tmp);
1263 tcg_temp_free(tmp);
1264 } else
1265 tcg_gen_movi_i64(cpu_fir[rc], 0);
1267 break;
1268 case 0x0A:
1269 /* SQRTF */
1270 if (!(ctx->amask & AMASK_FIX))
1271 goto invalid_opc;
1272 gen_farith2(&helper_sqrtf, rb, rc);
1273 break;
1274 case 0x0B:
1275 /* SQRTS */
1276 if (!(ctx->amask & AMASK_FIX))
1277 goto invalid_opc;
1278 gen_farith2(&helper_sqrts, rb, rc);
1279 break;
1280 case 0x14:
1281 /* ITOFF */
1282 if (!(ctx->amask & AMASK_FIX))
1283 goto invalid_opc;
1284 if (likely(rc != 31)) {
1285 if (ra != 31) {
1286 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1287 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1288 tcg_gen_helper_1_1(helper_memory_to_f, cpu_fir[rc], tmp);
1289 tcg_temp_free(tmp);
1290 } else
1291 tcg_gen_movi_i64(cpu_fir[rc], 0);
1293 break;
1294 case 0x24:
1295 /* ITOFT */
1296 if (!(ctx->amask & AMASK_FIX))
1297 goto invalid_opc;
1298 if (likely(rc != 31)) {
1299 if (ra != 31)
1300 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1301 else
1302 tcg_gen_movi_i64(cpu_fir[rc], 0);
1304 break;
1305 case 0x2A:
1306 /* SQRTG */
1307 if (!(ctx->amask & AMASK_FIX))
1308 goto invalid_opc;
1309 gen_farith2(&helper_sqrtg, rb, rc);
1310 break;
1311 case 0x02B:
1312 /* SQRTT */
1313 if (!(ctx->amask & AMASK_FIX))
1314 goto invalid_opc;
1315 gen_farith2(&helper_sqrtt, rb, rc);
1316 break;
1317 default:
1318 goto invalid_opc;
1320 break;
1321 case 0x15:
1322 /* VAX floating point */
1323 /* XXX: rounding mode and trap are ignored (!) */
1324 switch (fpfn) { /* f11 & 0x3F */
1325 case 0x00:
1326 /* ADDF */
1327 gen_farith3(&helper_addf, ra, rb, rc);
1328 break;
1329 case 0x01:
1330 /* SUBF */
1331 gen_farith3(&helper_subf, ra, rb, rc);
1332 break;
1333 case 0x02:
1334 /* MULF */
1335 gen_farith3(&helper_mulf, ra, rb, rc);
1336 break;
1337 case 0x03:
1338 /* DIVF */
1339 gen_farith3(&helper_divf, ra, rb, rc);
1340 break;
1341 case 0x1E:
1342 /* CVTDG */
1343 #if 0 // TODO
1344 gen_farith2(&helper_cvtdg, rb, rc);
1345 #else
1346 goto invalid_opc;
1347 #endif
1348 break;
1349 case 0x20:
1350 /* ADDG */
1351 gen_farith3(&helper_addg, ra, rb, rc);
1352 break;
1353 case 0x21:
1354 /* SUBG */
1355 gen_farith3(&helper_subg, ra, rb, rc);
1356 break;
1357 case 0x22:
1358 /* MULG */
1359 gen_farith3(&helper_mulg, ra, rb, rc);
1360 break;
1361 case 0x23:
1362 /* DIVG */
1363 gen_farith3(&helper_divg, ra, rb, rc);
1364 break;
1365 case 0x25:
1366 /* CMPGEQ */
1367 gen_farith3(&helper_cmpgeq, ra, rb, rc);
1368 break;
1369 case 0x26:
1370 /* CMPGLT */
1371 gen_farith3(&helper_cmpglt, ra, rb, rc);
1372 break;
1373 case 0x27:
1374 /* CMPGLE */
1375 gen_farith3(&helper_cmpgle, ra, rb, rc);
1376 break;
1377 case 0x2C:
1378 /* CVTGF */
1379 gen_farith2(&helper_cvtgf, rb, rc);
1380 break;
1381 case 0x2D:
1382 /* CVTGD */
1383 #if 0 // TODO
1384 gen_farith2(ctx, &helper_cvtgd, rb, rc);
1385 #else
1386 goto invalid_opc;
1387 #endif
1388 break;
1389 case 0x2F:
1390 /* CVTGQ */
1391 gen_farith2(&helper_cvtgq, rb, rc);
1392 break;
1393 case 0x3C:
1394 /* CVTQF */
1395 gen_farith2(&helper_cvtqf, rb, rc);
1396 break;
1397 case 0x3E:
1398 /* CVTQG */
1399 gen_farith2(&helper_cvtqg, rb, rc);
1400 break;
1401 default:
1402 goto invalid_opc;
1404 break;
1405 case 0x16:
1406 /* IEEE floating-point */
1407 /* XXX: rounding mode and traps are ignored (!) */
1408 switch (fpfn) { /* f11 & 0x3F */
1409 case 0x00:
1410 /* ADDS */
1411 gen_farith3(&helper_adds, ra, rb, rc);
1412 break;
1413 case 0x01:
1414 /* SUBS */
1415 gen_farith3(&helper_subs, ra, rb, rc);
1416 break;
1417 case 0x02:
1418 /* MULS */
1419 gen_farith3(&helper_muls, ra, rb, rc);
1420 break;
1421 case 0x03:
1422 /* DIVS */
1423 gen_farith3(&helper_divs, ra, rb, rc);
1424 break;
1425 case 0x20:
1426 /* ADDT */
1427 gen_farith3(&helper_addt, ra, rb, rc);
1428 break;
1429 case 0x21:
1430 /* SUBT */
1431 gen_farith3(&helper_subt, ra, rb, rc);
1432 break;
1433 case 0x22:
1434 /* MULT */
1435 gen_farith3(&helper_mult, ra, rb, rc);
1436 break;
1437 case 0x23:
1438 /* DIVT */
1439 gen_farith3(&helper_divt, ra, rb, rc);
1440 break;
1441 case 0x24:
1442 /* CMPTUN */
1443 gen_farith3(&helper_cmptun, ra, rb, rc);
1444 break;
1445 case 0x25:
1446 /* CMPTEQ */
1447 gen_farith3(&helper_cmpteq, ra, rb, rc);
1448 break;
1449 case 0x26:
1450 /* CMPTLT */
1451 gen_farith3(&helper_cmptlt, ra, rb, rc);
1452 break;
1453 case 0x27:
1454 /* CMPTLE */
1455 gen_farith3(&helper_cmptle, ra, rb, rc);
1456 break;
1457 case 0x2C:
1458 /* XXX: incorrect */
1459 if (fn11 == 0x2AC) {
1460 /* CVTST */
1461 gen_farith2(&helper_cvtst, rb, rc);
1462 } else {
1463 /* CVTTS */
1464 gen_farith2(&helper_cvtts, rb, rc);
1466 break;
1467 case 0x2F:
1468 /* CVTTQ */
1469 gen_farith2(&helper_cvttq, rb, rc);
1470 break;
1471 case 0x3C:
1472 /* CVTQS */
1473 gen_farith2(&helper_cvtqs, rb, rc);
1474 break;
1475 case 0x3E:
1476 /* CVTQT */
1477 gen_farith2(&helper_cvtqt, rb, rc);
1478 break;
1479 default:
1480 goto invalid_opc;
1482 break;
1483 case 0x17:
1484 switch (fn11) {
1485 case 0x010:
1486 /* CVTLQ */
1487 gen_farith2(&helper_cvtlq, rb, rc);
1488 break;
1489 case 0x020:
1490 if (likely(rc != 31)) {
1491 if (ra == rb)
1492 /* FMOV */
1493 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1494 else
1495 /* CPYS */
1496 gen_farith3(&helper_cpys, ra, rb, rc);
1498 break;
1499 case 0x021:
1500 /* CPYSN */
1501 gen_farith3(&helper_cpysn, ra, rb, rc);
1502 break;
1503 case 0x022:
1504 /* CPYSE */
1505 gen_farith3(&helper_cpyse, ra, rb, rc);
1506 break;
1507 case 0x024:
1508 /* MT_FPCR */
1509 if (likely(ra != 31))
1510 tcg_gen_helper_0_1(helper_store_fpcr, cpu_fir[ra]);
1511 else {
1512 TCGv tmp = tcg_const_i64(0);
1513 tcg_gen_helper_0_1(helper_store_fpcr, tmp);
1514 tcg_temp_free(tmp);
1516 break;
1517 case 0x025:
1518 /* MF_FPCR */
1519 if (likely(ra != 31))
1520 tcg_gen_helper_1_0(helper_load_fpcr, cpu_fir[ra]);
1521 break;
1522 case 0x02A:
1523 /* FCMOVEQ */
1524 gen_fcmov(&helper_cmpfeq, ra, rb, rc);
1525 break;
1526 case 0x02B:
1527 /* FCMOVNE */
1528 gen_fcmov(&helper_cmpfne, ra, rb, rc);
1529 break;
1530 case 0x02C:
1531 /* FCMOVLT */
1532 gen_fcmov(&helper_cmpflt, ra, rb, rc);
1533 break;
1534 case 0x02D:
1535 /* FCMOVGE */
1536 gen_fcmov(&helper_cmpfge, ra, rb, rc);
1537 break;
1538 case 0x02E:
1539 /* FCMOVLE */
1540 gen_fcmov(&helper_cmpfle, ra, rb, rc);
1541 break;
1542 case 0x02F:
1543 /* FCMOVGT */
1544 gen_fcmov(&helper_cmpfgt, ra, rb, rc);
1545 break;
1546 case 0x030:
1547 /* CVTQL */
1548 gen_farith2(&helper_cvtql, rb, rc);
1549 break;
1550 case 0x130:
1551 /* CVTQL/V */
1552 gen_farith2(&helper_cvtqlv, rb, rc);
1553 break;
1554 case 0x530:
1555 /* CVTQL/SV */
1556 gen_farith2(&helper_cvtqlsv, rb, rc);
1557 break;
1558 default:
1559 goto invalid_opc;
1561 break;
1562 case 0x18:
1563 switch ((uint16_t)disp16) {
1564 case 0x0000:
1565 /* TRAPB */
1566 /* No-op. Just exit from the current tb */
1567 ret = 2;
1568 break;
1569 case 0x0400:
1570 /* EXCB */
1571 /* No-op. Just exit from the current tb */
1572 ret = 2;
1573 break;
1574 case 0x4000:
1575 /* MB */
1576 /* No-op */
1577 break;
1578 case 0x4400:
1579 /* WMB */
1580 /* No-op */
1581 break;
1582 case 0x8000:
1583 /* FETCH */
1584 /* No-op */
1585 break;
1586 case 0xA000:
1587 /* FETCH_M */
1588 /* No-op */
1589 break;
1590 case 0xC000:
1591 /* RPCC */
1592 if (ra != 31)
1593 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1594 break;
1595 case 0xE000:
1596 /* RC */
1597 if (ra != 31)
1598 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1599 break;
1600 case 0xE800:
1601 /* ECB */
1602 /* XXX: TODO: evict tb cache at address rb */
1603 #if 0
1604 ret = 2;
1605 #else
1606 goto invalid_opc;
1607 #endif
1608 break;
1609 case 0xF000:
1610 /* RS */
1611 if (ra != 31)
1612 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1613 break;
1614 case 0xF800:
1615 /* WH64 */
1616 /* No-op */
1617 break;
1618 default:
1619 goto invalid_opc;
1621 break;
1622 case 0x19:
1623 /* HW_MFPR (PALcode) */
1624 #if defined (CONFIG_USER_ONLY)
1625 goto invalid_opc;
1626 #else
1627 if (!ctx->pal_mode)
1628 goto invalid_opc;
1629 if (ra != 31) {
1630 TCGv tmp = tcg_const_i32(insn & 0xFF);
1631 tcg_gen_helper_1_2(helper_mfpr, cpu_ir[ra], tmp, cpu_ir[ra]);
1632 tcg_temp_free(tmp);
1634 break;
1635 #endif
1636 case 0x1A:
1637 if (rb != 31)
1638 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1639 else
1640 tcg_gen_movi_i64(cpu_pc, 0);
1641 if (ra != 31)
1642 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1643 /* Those four jumps only differ by the branch prediction hint */
1644 switch (fn2) {
1645 case 0x0:
1646 /* JMP */
1647 break;
1648 case 0x1:
1649 /* JSR */
1650 break;
1651 case 0x2:
1652 /* RET */
1653 break;
1654 case 0x3:
1655 /* JSR_COROUTINE */
1656 break;
1658 ret = 1;
1659 break;
1660 case 0x1B:
1661 /* HW_LD (PALcode) */
1662 #if defined (CONFIG_USER_ONLY)
1663 goto invalid_opc;
1664 #else
1665 if (!ctx->pal_mode)
1666 goto invalid_opc;
1667 if (ra != 31) {
1668 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
1669 if (rb != 31)
1670 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1671 else
1672 tcg_gen_movi_i64(addr, disp12);
1673 switch ((insn >> 12) & 0xF) {
1674 case 0x0:
1675 /* Longword physical access */
1676 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1677 break;
1678 case 0x1:
1679 /* Quadword physical access */
1680 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1681 break;
1682 case 0x2:
1683 /* Longword physical access with lock */
1684 tcg_gen_helper_0_2(helper_ldl_l_raw, cpu_ir[ra], addr);
1685 break;
1686 case 0x3:
1687 /* Quadword physical access with lock */
1688 tcg_gen_helper_0_2(helper_ldq_l_raw, cpu_ir[ra], addr);
1689 break;
1690 case 0x4:
1691 /* Longword virtual PTE fetch */
1692 tcg_gen_helper_0_2(helper_ldl_kernel, cpu_ir[ra], addr);
1693 break;
1694 case 0x5:
1695 /* Quadword virtual PTE fetch */
1696 tcg_gen_helper_0_2(helper_ldq_kernel, cpu_ir[ra], addr);
1697 break;
1698 case 0x6:
1699 /* Incpu_ir[ra]id */
1700 goto incpu_ir[ra]id_opc;
1701 case 0x7:
1702 /* Incpu_ir[ra]id */
1703 goto incpu_ir[ra]id_opc;
1704 case 0x8:
1705 /* Longword virtual access */
1706 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1707 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1708 break;
1709 case 0x9:
1710 /* Quadword virtual access */
1711 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1712 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1713 break;
1714 case 0xA:
1715 /* Longword virtual access with protection check */
1716 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->flags);
1717 break;
1718 case 0xB:
1719 /* Quadword virtual access with protection check */
1720 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->flags);
1721 break;
1722 case 0xC:
1723 /* Longword virtual access with altenate access mode */
1724 tcg_gen_helper_0_0(helper_set_alt_mode);
1725 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1726 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1727 tcg_gen_helper_0_0(helper_restore_mode);
1728 break;
1729 case 0xD:
1730 /* Quadword virtual access with altenate access mode */
1731 tcg_gen_helper_0_0(helper_set_alt_mode);
1732 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1733 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1734 tcg_gen_helper_0_0(helper_restore_mode);
1735 break;
1736 case 0xE:
1737 /* Longword virtual access with alternate access mode and
1738 * protection checks
1740 tcg_gen_helper_0_0(helper_set_alt_mode);
1741 tcg_gen_helper_0_2(helper_ldl_data, cpu_ir[ra], addr);
1742 tcg_gen_helper_0_0(helper_restore_mode);
1743 break;
1744 case 0xF:
1745 /* Quadword virtual access with alternate access mode and
1746 * protection checks
1748 tcg_gen_helper_0_0(helper_set_alt_mode);
1749 tcg_gen_helper_0_2(helper_ldq_data, cpu_ir[ra], addr);
1750 tcg_gen_helper_0_0(helper_restore_mode);
1751 break;
1753 tcg_temp_free(addr);
1755 break;
1756 #endif
1757 case 0x1C:
1758 switch (fn7) {
1759 case 0x00:
1760 /* SEXTB */
1761 if (!(ctx->amask & AMASK_BWX))
1762 goto invalid_opc;
1763 if (likely(rc != 31)) {
1764 if (islit)
1765 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1766 else
1767 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1769 break;
1770 case 0x01:
1771 /* SEXTW */
1772 if (!(ctx->amask & AMASK_BWX))
1773 goto invalid_opc;
1774 if (likely(rc != 31)) {
1775 if (islit)
1776 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1777 else
1778 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1780 break;
1781 case 0x30:
1782 /* CTPOP */
1783 if (!(ctx->amask & AMASK_CIX))
1784 goto invalid_opc;
1785 if (likely(rc != 31)) {
1786 if (islit)
1787 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1788 else
1789 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1791 break;
1792 case 0x31:
1793 /* PERR */
1794 if (!(ctx->amask & AMASK_MVI))
1795 goto invalid_opc;
1796 /* XXX: TODO */
1797 goto invalid_opc;
1798 break;
1799 case 0x32:
1800 /* CTLZ */
1801 if (!(ctx->amask & AMASK_CIX))
1802 goto invalid_opc;
1803 if (likely(rc != 31)) {
1804 if (islit)
1805 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1806 else
1807 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1809 break;
1810 case 0x33:
1811 /* CTTZ */
1812 if (!(ctx->amask & AMASK_CIX))
1813 goto invalid_opc;
1814 if (likely(rc != 31)) {
1815 if (islit)
1816 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1817 else
1818 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1820 break;
1821 case 0x34:
1822 /* UNPKBW */
1823 if (!(ctx->amask & AMASK_MVI))
1824 goto invalid_opc;
1825 /* XXX: TODO */
1826 goto invalid_opc;
1827 break;
1828 case 0x35:
1829 /* UNPKWL */
1830 if (!(ctx->amask & AMASK_MVI))
1831 goto invalid_opc;
1832 /* XXX: TODO */
1833 goto invalid_opc;
1834 break;
1835 case 0x36:
1836 /* PKWB */
1837 if (!(ctx->amask & AMASK_MVI))
1838 goto invalid_opc;
1839 /* XXX: TODO */
1840 goto invalid_opc;
1841 break;
1842 case 0x37:
1843 /* PKLB */
1844 if (!(ctx->amask & AMASK_MVI))
1845 goto invalid_opc;
1846 /* XXX: TODO */
1847 goto invalid_opc;
1848 break;
1849 case 0x38:
1850 /* MINSB8 */
1851 if (!(ctx->amask & AMASK_MVI))
1852 goto invalid_opc;
1853 /* XXX: TODO */
1854 goto invalid_opc;
1855 break;
1856 case 0x39:
1857 /* MINSW4 */
1858 if (!(ctx->amask & AMASK_MVI))
1859 goto invalid_opc;
1860 /* XXX: TODO */
1861 goto invalid_opc;
1862 break;
1863 case 0x3A:
1864 /* MINUB8 */
1865 if (!(ctx->amask & AMASK_MVI))
1866 goto invalid_opc;
1867 /* XXX: TODO */
1868 goto invalid_opc;
1869 break;
1870 case 0x3B:
1871 /* MINUW4 */
1872 if (!(ctx->amask & AMASK_MVI))
1873 goto invalid_opc;
1874 /* XXX: TODO */
1875 goto invalid_opc;
1876 break;
1877 case 0x3C:
1878 /* MAXUB8 */
1879 if (!(ctx->amask & AMASK_MVI))
1880 goto invalid_opc;
1881 /* XXX: TODO */
1882 goto invalid_opc;
1883 break;
1884 case 0x3D:
1885 /* MAXUW4 */
1886 if (!(ctx->amask & AMASK_MVI))
1887 goto invalid_opc;
1888 /* XXX: TODO */
1889 goto invalid_opc;
1890 break;
1891 case 0x3E:
1892 /* MAXSB8 */
1893 if (!(ctx->amask & AMASK_MVI))
1894 goto invalid_opc;
1895 /* XXX: TODO */
1896 goto invalid_opc;
1897 break;
1898 case 0x3F:
1899 /* MAXSW4 */
1900 if (!(ctx->amask & AMASK_MVI))
1901 goto invalid_opc;
1902 /* XXX: TODO */
1903 goto invalid_opc;
1904 break;
1905 case 0x70:
1906 /* FTOIT */
1907 if (!(ctx->amask & AMASK_FIX))
1908 goto invalid_opc;
1909 if (likely(rc != 31)) {
1910 if (ra != 31)
1911 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
1912 else
1913 tcg_gen_movi_i64(cpu_ir[rc], 0);
1915 break;
1916 case 0x78:
1917 /* FTOIS */
1918 if (!(ctx->amask & AMASK_FIX))
1919 goto invalid_opc;
1920 if (rc != 31) {
1921 TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
1922 if (ra != 31)
1923 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, cpu_fir[ra]);
1924 else {
1925 TCGv tmp2 = tcg_const_i64(0);
1926 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, tmp2);
1927 tcg_temp_free(tmp2);
1929 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
1930 tcg_temp_free(tmp1);
1932 break;
1933 default:
1934 goto invalid_opc;
1936 break;
1937 case 0x1D:
1938 /* HW_MTPR (PALcode) */
1939 #if defined (CONFIG_USER_ONLY)
1940 goto invalid_opc;
1941 #else
1942 if (!ctx->pal_mode)
1943 goto invalid_opc;
1944 else {
1945 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
1946 if (ra != 31)
1947 tcg_gen_helper(helper_mtpr, tmp1, cpu_ir[ra]);
1948 else {
1949 TCGv tmp2 = tcg_const_i64(0);
1950 tcg_gen_helper(helper_mtpr, tmp1, tmp2);
1951 tcg_temp_free(tmp2);
1953 tcg_temp_free(tmp1);
1954 ret = 2;
1956 break;
1957 #endif
1958 case 0x1E:
1959 /* HW_REI (PALcode) */
1960 #if defined (CONFIG_USER_ONLY)
1961 goto invalid_opc;
1962 #else
1963 if (!ctx->pal_mode)
1964 goto invalid_opc;
1965 if (rb == 31) {
1966 /* "Old" alpha */
1967 tcg_gen_helper_0_0(helper_hw_rei);
1968 } else {
1969 TCGv tmp;
1971 if (ra != 31) {
1972 tmp = tcg_temp_new(TCG_TYPE_I64);
1973 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
1974 } else
1975 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
1976 tcg_gen_helper_0_1(helper_hw_ret, tmp);
1977 tcg_temp_free(tmp);
1979 ret = 2;
1980 break;
1981 #endif
1982 case 0x1F:
1983 /* HW_ST (PALcode) */
1984 #if defined (CONFIG_USER_ONLY)
1985 goto invalid_opc;
1986 #else
1987 if (!ctx->pal_mode)
1988 goto invalid_opc;
1989 else {
1990 TCGv addr, val;
1991 addr = tcg_temp_new(TCG_TYPE_I64);
1992 if (rb != 31)
1993 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1994 else
1995 tcg_gen_movi_i64(addr, disp12);
1996 if (ra != 31)
1997 val = cpu_ir[ra];
1998 else {
1999 val = tcg_temp_new(TCG_TYPE_I64);
2000 tcg_gen_movi_i64(val, 0);
2002 switch ((insn >> 12) & 0xF) {
2003 case 0x0:
2004 /* Longword physical access */
2005 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2006 break;
2007 case 0x1:
2008 /* Quadword physical access */
2009 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2010 break;
2011 case 0x2:
2012 /* Longword physical access with lock */
2013 tcg_gen_helper_1_2(helper_stl_c_raw, val, val, addr);
2014 break;
2015 case 0x3:
2016 /* Quadword physical access with lock */
2017 tcg_gen_helper_1_2(helper_stq_c_raw, val, val, addr);
2018 break;
2019 case 0x4:
2020 /* Longword virtual access */
2021 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2022 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2023 break;
2024 case 0x5:
2025 /* Quadword virtual access */
2026 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2027 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2028 break;
2029 case 0x6:
2030 /* Invalid */
2031 goto invalid_opc;
2032 case 0x7:
2033 /* Invalid */
2034 goto invalid_opc;
2035 case 0x8:
2036 /* Invalid */
2037 goto invalid_opc;
2038 case 0x9:
2039 /* Invalid */
2040 goto invalid_opc;
2041 case 0xA:
2042 /* Invalid */
2043 goto invalid_opc;
2044 case 0xB:
2045 /* Invalid */
2046 goto invalid_opc;
2047 case 0xC:
2048 /* Longword virtual access with alternate access mode */
2049 tcg_gen_helper_0_0(helper_set_alt_mode);
2050 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2051 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2052 tcg_gen_helper_0_0(helper_restore_mode);
2053 break;
2054 case 0xD:
2055 /* Quadword virtual access with alternate access mode */
2056 tcg_gen_helper_0_0(helper_set_alt_mode);
2057 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2058 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2059 tcg_gen_helper_0_0(helper_restore_mode);
2060 break;
2061 case 0xE:
2062 /* Invalid */
2063 goto invalid_opc;
2064 case 0xF:
2065 /* Invalid */
2066 goto invalid_opc;
2068 if (ra != 31)
2069 tcg_temp_free(val);
2070 tcg_temp_free(addr);
2072 ret = 2;
2073 break;
2074 #endif
2075 case 0x20:
2076 /* LDF */
2077 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2078 break;
2079 case 0x21:
2080 /* LDG */
2081 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2082 break;
2083 case 0x22:
2084 /* LDS */
2085 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2086 break;
2087 case 0x23:
2088 /* LDT */
2089 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2090 break;
2091 case 0x24:
2092 /* STF */
2093 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2094 break;
2095 case 0x25:
2096 /* STG */
2097 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2098 break;
2099 case 0x26:
2100 /* STS */
2101 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2102 break;
2103 case 0x27:
2104 /* STT */
2105 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2106 break;
2107 case 0x28:
2108 /* LDL */
2109 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2110 break;
2111 case 0x29:
2112 /* LDQ */
2113 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2114 break;
2115 case 0x2A:
2116 /* LDL_L */
2117 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2118 break;
2119 case 0x2B:
2120 /* LDQ_L */
2121 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2122 break;
2123 case 0x2C:
2124 /* STL */
2125 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2126 break;
2127 case 0x2D:
2128 /* STQ */
2129 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2130 break;
2131 case 0x2E:
2132 /* STL_C */
2133 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0);
2134 break;
2135 case 0x2F:
2136 /* STQ_C */
2137 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0);
2138 break;
2139 case 0x30:
2140 /* BR */
2141 if (ra != 31)
2142 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2143 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2144 ret = 1;
2145 break;
2146 case 0x31:
2147 /* FBEQ */
2148 gen_fbcond(ctx, &helper_cmpfeq, ra, disp16);
2149 ret = 1;
2150 break;
2151 case 0x32:
2152 /* FBLT */
2153 gen_fbcond(ctx, &helper_cmpflt, ra, disp16);
2154 ret = 1;
2155 break;
2156 case 0x33:
2157 /* FBLE */
2158 gen_fbcond(ctx, &helper_cmpfle, ra, disp16);
2159 ret = 1;
2160 break;
2161 case 0x34:
2162 /* BSR */
2163 if (ra != 31)
2164 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2165 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2166 ret = 1;
2167 break;
2168 case 0x35:
2169 /* FBNE */
2170 gen_fbcond(ctx, &helper_cmpfne, ra, disp16);
2171 ret = 1;
2172 break;
2173 case 0x36:
2174 /* FBGE */
2175 gen_fbcond(ctx, &helper_cmpfge, ra, disp16);
2176 ret = 1;
2177 break;
2178 case 0x37:
2179 /* FBGT */
2180 gen_fbcond(ctx, &helper_cmpfgt, ra, disp16);
2181 ret = 1;
2182 break;
2183 case 0x38:
2184 /* BLBC */
2185 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2186 ret = 1;
2187 break;
2188 case 0x39:
2189 /* BEQ */
2190 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2191 ret = 1;
2192 break;
2193 case 0x3A:
2194 /* BLT */
2195 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2196 ret = 1;
2197 break;
2198 case 0x3B:
2199 /* BLE */
2200 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2201 ret = 1;
2202 break;
2203 case 0x3C:
2204 /* BLBS */
2205 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2206 ret = 1;
2207 break;
2208 case 0x3D:
2209 /* BNE */
2210 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2211 ret = 1;
2212 break;
2213 case 0x3E:
2214 /* BGE */
2215 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2216 ret = 1;
2217 break;
2218 case 0x3F:
2219 /* BGT */
2220 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2221 ret = 1;
2222 break;
2223 invalid_opc:
2224 gen_invalid(ctx);
2225 ret = 3;
2226 break;
2229 return ret;
2232 static always_inline void gen_intermediate_code_internal (CPUState *env,
2233 TranslationBlock *tb,
2234 int search_pc)
2236 #if defined ALPHA_DEBUG_DISAS
2237 static int insn_count;
2238 #endif
2239 DisasContext ctx, *ctxp = &ctx;
2240 target_ulong pc_start;
2241 uint32_t insn;
2242 uint16_t *gen_opc_end;
2243 int j, lj = -1;
2244 int ret;
2245 int num_insns;
2246 int max_insns;
2248 pc_start = tb->pc;
2249 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2250 ctx.pc = pc_start;
2251 ctx.amask = env->amask;
2252 #if defined (CONFIG_USER_ONLY)
2253 ctx.mem_idx = 0;
2254 #else
2255 ctx.mem_idx = ((env->ps >> 3) & 3);
2256 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2257 #endif
2258 num_insns = 0;
2259 max_insns = tb->cflags & CF_COUNT_MASK;
2260 if (max_insns == 0)
2261 max_insns = CF_COUNT_MASK;
2263 gen_icount_start();
2264 for (ret = 0; ret == 0;) {
2265 if (env->nb_breakpoints > 0) {
2266 for(j = 0; j < env->nb_breakpoints; j++) {
2267 if (env->breakpoints[j] == ctx.pc) {
2268 gen_excp(&ctx, EXCP_DEBUG, 0);
2269 break;
2273 if (search_pc) {
2274 j = gen_opc_ptr - gen_opc_buf;
2275 if (lj < j) {
2276 lj++;
2277 while (lj < j)
2278 gen_opc_instr_start[lj++] = 0;
2279 gen_opc_pc[lj] = ctx.pc;
2280 gen_opc_instr_start[lj] = 1;
2281 gen_opc_icount[lj] = num_insns;
2284 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2285 gen_io_start();
2286 #if defined ALPHA_DEBUG_DISAS
2287 insn_count++;
2288 if (logfile != NULL) {
2289 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2290 ctx.pc, ctx.mem_idx);
2292 #endif
2293 insn = ldl_code(ctx.pc);
2294 #if defined ALPHA_DEBUG_DISAS
2295 insn_count++;
2296 if (logfile != NULL) {
2297 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2299 #endif
2300 num_insns++;
2301 ctx.pc += 4;
2302 ret = translate_one(ctxp, insn);
2303 if (ret != 0)
2304 break;
2305 /* if we reach a page boundary or are single stepping, stop
2306 * generation
2308 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2309 (env->singlestep_enabled) ||
2310 num_insns >= max_insns) {
2311 break;
2313 #if defined (DO_SINGLE_STEP)
2314 break;
2315 #endif
2317 if (ret != 1 && ret != 3) {
2318 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2320 #if defined (DO_TB_FLUSH)
2321 tcg_gen_helper_0_0(helper_tb_flush);
2322 #endif
2323 if (tb->cflags & CF_LAST_IO)
2324 gen_io_end();
2325 /* Generate the return instruction */
2326 tcg_gen_exit_tb(0);
2327 gen_icount_end(tb, num_insns);
2328 *gen_opc_ptr = INDEX_op_end;
2329 if (search_pc) {
2330 j = gen_opc_ptr - gen_opc_buf;
2331 lj++;
2332 while (lj <= j)
2333 gen_opc_instr_start[lj++] = 0;
2334 } else {
2335 tb->size = ctx.pc - pc_start;
2336 tb->icount = num_insns;
2338 #if defined ALPHA_DEBUG_DISAS
2339 if (loglevel & CPU_LOG_TB_CPU) {
2340 cpu_dump_state(env, logfile, fprintf, 0);
2342 if (loglevel & CPU_LOG_TB_IN_ASM) {
2343 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2344 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2345 fprintf(logfile, "\n");
2347 #endif
2350 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2352 gen_intermediate_code_internal(env, tb, 0);
2355 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2357 gen_intermediate_code_internal(env, tb, 1);
2360 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2362 CPUAlphaState *env;
2363 uint64_t hwpcb;
2365 env = qemu_mallocz(sizeof(CPUAlphaState));
2366 if (!env)
2367 return NULL;
2368 cpu_exec_init(env);
2369 alpha_translate_init();
2370 tlb_flush(env, 1);
2371 /* XXX: should not be hardcoded */
2372 env->implver = IMPLVER_2106x;
2373 env->ps = 0x1F00;
2374 #if defined (CONFIG_USER_ONLY)
2375 env->ps |= 1 << 3;
2376 #endif
2377 pal_init(env);
2378 /* Initialize IPR */
2379 hwpcb = env->ipr[IPR_PCBB];
2380 env->ipr[IPR_ASN] = 0;
2381 env->ipr[IPR_ASTEN] = 0;
2382 env->ipr[IPR_ASTSR] = 0;
2383 env->ipr[IPR_DATFX] = 0;
2384 /* XXX: fix this */
2385 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2386 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2387 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2388 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2389 env->ipr[IPR_FEN] = 0;
2390 env->ipr[IPR_IPL] = 31;
2391 env->ipr[IPR_MCES] = 0;
2392 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2393 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2394 env->ipr[IPR_SISR] = 0;
2395 env->ipr[IPR_VIRBND] = -1ULL;
2397 return env;
2400 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2401 unsigned long searched_pc, int pc_pos, void *puc)
2403 env->pc = gen_opc_pc[pc_pos];