microblaze: Add petalogix s3a1800dsp MMU linux ref-design.
[qemu/aliguori-queue.git] / target-alpha / translate.c
blob4cd24d847feb4209ad4998046558f6ce14bfb273
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 /* #define DO_SINGLE_STEP */
37 #define ALPHA_DEBUG_DISAS
38 /* #define DO_TB_FLUSH */
41 #ifdef ALPHA_DEBUG_DISAS
42 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
43 #else
44 # define LOG_DISAS(...) do { } while (0)
45 #endif
47 typedef struct DisasContext DisasContext;
48 struct DisasContext {
49 uint64_t pc;
50 int mem_idx;
51 #if !defined (CONFIG_USER_ONLY)
52 int pal_mode;
53 #endif
54 CPUAlphaState *env;
55 uint32_t amask;
58 /* global register indexes */
59 static TCGv_ptr cpu_env;
60 static TCGv cpu_ir[31];
61 static TCGv cpu_fir[31];
62 static TCGv cpu_pc;
63 static TCGv cpu_lock;
65 /* register names */
66 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
68 #include "gen-icount.h"
70 static void alpha_translate_init(void)
72 int i;
73 char *p;
74 static int done_init = 0;
76 if (done_init)
77 return;
79 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
81 p = cpu_reg_names;
82 for (i = 0; i < 31; i++) {
83 sprintf(p, "ir%d", i);
84 cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
85 offsetof(CPUState, ir[i]), p);
86 p += (i < 10) ? 4 : 5;
88 sprintf(p, "fir%d", i);
89 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
90 offsetof(CPUState, fir[i]), p);
91 p += (i < 10) ? 5 : 6;
94 cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
95 offsetof(CPUState, pc), "pc");
97 cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
98 offsetof(CPUState, lock), "lock");
100 /* register helpers */
101 #define GEN_HELPER 2
102 #include "helper.h"
104 done_init = 1;
107 static always_inline void gen_excp (DisasContext *ctx,
108 int exception, int error_code)
110 TCGv_i32 tmp1, tmp2;
112 tcg_gen_movi_i64(cpu_pc, ctx->pc);
113 tmp1 = tcg_const_i32(exception);
114 tmp2 = tcg_const_i32(error_code);
115 gen_helper_excp(tmp1, tmp2);
116 tcg_temp_free_i32(tmp2);
117 tcg_temp_free_i32(tmp1);
120 static always_inline void gen_invalid (DisasContext *ctx)
122 gen_excp(ctx, EXCP_OPCDEC, 0);
125 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
127 TCGv tmp = tcg_temp_new();
128 TCGv_i32 tmp32 = tcg_temp_new_i32();
129 tcg_gen_qemu_ld32u(tmp, t1, flags);
130 tcg_gen_trunc_i64_i32(tmp32, tmp);
131 gen_helper_memory_to_f(t0, tmp32);
132 tcg_temp_free_i32(tmp32);
133 tcg_temp_free(tmp);
136 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
138 TCGv tmp = tcg_temp_new();
139 tcg_gen_qemu_ld64(tmp, t1, flags);
140 gen_helper_memory_to_g(t0, tmp);
141 tcg_temp_free(tmp);
144 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
146 TCGv tmp = tcg_temp_new();
147 TCGv_i32 tmp32 = tcg_temp_new_i32();
148 tcg_gen_qemu_ld32u(tmp, t1, flags);
149 tcg_gen_trunc_i64_i32(tmp32, tmp);
150 gen_helper_memory_to_s(t0, tmp32);
151 tcg_temp_free_i32(tmp32);
152 tcg_temp_free(tmp);
155 static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
157 tcg_gen_mov_i64(cpu_lock, t1);
158 tcg_gen_qemu_ld32s(t0, t1, flags);
161 static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
163 tcg_gen_mov_i64(cpu_lock, t1);
164 tcg_gen_qemu_ld64(t0, t1, flags);
167 static always_inline void gen_load_mem (DisasContext *ctx,
168 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
169 int ra, int rb, int32_t disp16,
170 int fp, int clear)
172 TCGv addr;
174 if (unlikely(ra == 31))
175 return;
177 addr = tcg_temp_new();
178 if (rb != 31) {
179 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
180 if (clear)
181 tcg_gen_andi_i64(addr, addr, ~0x7);
182 } else {
183 if (clear)
184 disp16 &= ~0x7;
185 tcg_gen_movi_i64(addr, disp16);
187 if (fp)
188 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
189 else
190 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
191 tcg_temp_free(addr);
194 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
196 TCGv_i32 tmp32 = tcg_temp_new_i32();
197 TCGv tmp = tcg_temp_new();
198 gen_helper_f_to_memory(tmp32, t0);
199 tcg_gen_extu_i32_i64(tmp, tmp32);
200 tcg_gen_qemu_st32(tmp, t1, flags);
201 tcg_temp_free(tmp);
202 tcg_temp_free_i32(tmp32);
205 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
207 TCGv tmp = tcg_temp_new();
208 gen_helper_g_to_memory(tmp, t0);
209 tcg_gen_qemu_st64(tmp, t1, flags);
210 tcg_temp_free(tmp);
213 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
215 TCGv_i32 tmp32 = tcg_temp_new_i32();
216 TCGv tmp = tcg_temp_new();
217 gen_helper_s_to_memory(tmp32, t0);
218 tcg_gen_extu_i32_i64(tmp, tmp32);
219 tcg_gen_qemu_st32(tmp, t1, flags);
220 tcg_temp_free(tmp);
221 tcg_temp_free_i32(tmp32);
224 static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
226 int l1, l2;
228 l1 = gen_new_label();
229 l2 = gen_new_label();
230 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
231 tcg_gen_qemu_st32(t0, t1, flags);
232 tcg_gen_movi_i64(t0, 1);
233 tcg_gen_br(l2);
234 gen_set_label(l1);
235 tcg_gen_movi_i64(t0, 0);
236 gen_set_label(l2);
237 tcg_gen_movi_i64(cpu_lock, -1);
240 static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
242 int l1, l2;
244 l1 = gen_new_label();
245 l2 = gen_new_label();
246 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
247 tcg_gen_qemu_st64(t0, t1, flags);
248 tcg_gen_movi_i64(t0, 1);
249 tcg_gen_br(l2);
250 gen_set_label(l1);
251 tcg_gen_movi_i64(t0, 0);
252 gen_set_label(l2);
253 tcg_gen_movi_i64(cpu_lock, -1);
256 static always_inline void gen_store_mem (DisasContext *ctx,
257 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
258 int ra, int rb, int32_t disp16,
259 int fp, int clear, int local)
261 TCGv addr;
262 if (local)
263 addr = tcg_temp_local_new();
264 else
265 addr = tcg_temp_new();
266 if (rb != 31) {
267 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
268 if (clear)
269 tcg_gen_andi_i64(addr, addr, ~0x7);
270 } else {
271 if (clear)
272 disp16 &= ~0x7;
273 tcg_gen_movi_i64(addr, disp16);
275 if (ra != 31) {
276 if (fp)
277 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
278 else
279 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
280 } else {
281 TCGv zero;
282 if (local)
283 zero = tcg_const_local_i64(0);
284 else
285 zero = tcg_const_i64(0);
286 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
287 tcg_temp_free(zero);
289 tcg_temp_free(addr);
292 static always_inline void gen_bcond (DisasContext *ctx,
293 TCGCond cond,
294 int ra, int32_t disp, int mask)
296 int l1, l2;
298 l1 = gen_new_label();
299 l2 = gen_new_label();
300 if (likely(ra != 31)) {
301 if (mask) {
302 TCGv tmp = tcg_temp_new();
303 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
304 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
305 tcg_temp_free(tmp);
306 } else
307 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
308 } else {
309 /* Very uncommon case - Do not bother to optimize. */
310 TCGv tmp = tcg_const_i64(0);
311 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
312 tcg_temp_free(tmp);
314 tcg_gen_movi_i64(cpu_pc, ctx->pc);
315 tcg_gen_br(l2);
316 gen_set_label(l1);
317 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
318 gen_set_label(l2);
321 static always_inline void gen_fbcond (DisasContext *ctx, int opc,
322 int ra, int32_t disp16)
324 int l1, l2;
325 TCGv tmp;
326 TCGv src;
328 l1 = gen_new_label();
329 l2 = gen_new_label();
330 if (ra != 31) {
331 tmp = tcg_temp_new();
332 src = cpu_fir[ra];
333 } else {
334 tmp = tcg_const_i64(0);
335 src = tmp;
337 switch (opc) {
338 case 0x31: /* FBEQ */
339 gen_helper_cmpfeq(tmp, src);
340 break;
341 case 0x32: /* FBLT */
342 gen_helper_cmpflt(tmp, src);
343 break;
344 case 0x33: /* FBLE */
345 gen_helper_cmpfle(tmp, src);
346 break;
347 case 0x35: /* FBNE */
348 gen_helper_cmpfne(tmp, src);
349 break;
350 case 0x36: /* FBGE */
351 gen_helper_cmpfge(tmp, src);
352 break;
353 case 0x37: /* FBGT */
354 gen_helper_cmpfgt(tmp, src);
355 break;
356 default:
357 abort();
359 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
360 tcg_gen_movi_i64(cpu_pc, ctx->pc);
361 tcg_gen_br(l2);
362 gen_set_label(l1);
363 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
364 gen_set_label(l2);
367 static always_inline void gen_cmov (TCGCond inv_cond,
368 int ra, int rb, int rc,
369 int islit, uint8_t lit, int mask)
371 int l1;
373 if (unlikely(rc == 31))
374 return;
376 l1 = gen_new_label();
378 if (ra != 31) {
379 if (mask) {
380 TCGv tmp = tcg_temp_new();
381 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
382 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
383 tcg_temp_free(tmp);
384 } else
385 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
386 } else {
387 /* Very uncommon case - Do not bother to optimize. */
388 TCGv tmp = tcg_const_i64(0);
389 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
390 tcg_temp_free(tmp);
393 if (islit)
394 tcg_gen_movi_i64(cpu_ir[rc], lit);
395 else
396 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
397 gen_set_label(l1);
400 #define FARITH2(name) \
401 static always_inline void glue(gen_f, name)(int rb, int rc) \
403 if (unlikely(rc == 31)) \
404 return; \
406 if (rb != 31) \
407 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
408 else { \
409 TCGv tmp = tcg_const_i64(0); \
410 gen_helper_ ## name (cpu_fir[rc], tmp); \
411 tcg_temp_free(tmp); \
414 FARITH2(sqrts)
415 FARITH2(sqrtf)
416 FARITH2(sqrtg)
417 FARITH2(sqrtt)
418 FARITH2(cvtgf)
419 FARITH2(cvtgq)
420 FARITH2(cvtqf)
421 FARITH2(cvtqg)
422 FARITH2(cvtst)
423 FARITH2(cvtts)
424 FARITH2(cvttq)
425 FARITH2(cvtqs)
426 FARITH2(cvtqt)
427 FARITH2(cvtlq)
428 FARITH2(cvtql)
429 FARITH2(cvtqlv)
430 FARITH2(cvtqlsv)
432 #define FARITH3(name) \
433 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
435 if (unlikely(rc == 31)) \
436 return; \
438 if (ra != 31) { \
439 if (rb != 31) \
440 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
441 else { \
442 TCGv tmp = tcg_const_i64(0); \
443 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
444 tcg_temp_free(tmp); \
446 } else { \
447 TCGv tmp = tcg_const_i64(0); \
448 if (rb != 31) \
449 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
450 else \
451 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
452 tcg_temp_free(tmp); \
456 FARITH3(addf)
457 FARITH3(subf)
458 FARITH3(mulf)
459 FARITH3(divf)
460 FARITH3(addg)
461 FARITH3(subg)
462 FARITH3(mulg)
463 FARITH3(divg)
464 FARITH3(cmpgeq)
465 FARITH3(cmpglt)
466 FARITH3(cmpgle)
467 FARITH3(adds)
468 FARITH3(subs)
469 FARITH3(muls)
470 FARITH3(divs)
471 FARITH3(addt)
472 FARITH3(subt)
473 FARITH3(mult)
474 FARITH3(divt)
475 FARITH3(cmptun)
476 FARITH3(cmpteq)
477 FARITH3(cmptlt)
478 FARITH3(cmptle)
479 FARITH3(cpys)
480 FARITH3(cpysn)
481 FARITH3(cpyse)
483 #define FCMOV(name) \
484 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
486 int l1; \
487 TCGv tmp; \
489 if (unlikely(rc == 31)) \
490 return; \
492 l1 = gen_new_label(); \
493 tmp = tcg_temp_new(); \
494 if (ra != 31) { \
495 tmp = tcg_temp_new(); \
496 gen_helper_ ## name (tmp, cpu_fir[ra]); \
497 } else { \
498 tmp = tcg_const_i64(0); \
499 gen_helper_ ## name (tmp, tmp); \
501 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
502 if (rb != 31) \
503 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
504 else \
505 tcg_gen_movi_i64(cpu_fir[rc], 0); \
506 gen_set_label(l1); \
508 FCMOV(cmpfeq)
509 FCMOV(cmpfne)
510 FCMOV(cmpflt)
511 FCMOV(cmpfge)
512 FCMOV(cmpfle)
513 FCMOV(cmpfgt)
515 /* EXTWH, EXTWH, EXTLH, EXTQH */
516 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
517 int ra, int rb, int rc,
518 int islit, uint8_t lit)
520 if (unlikely(rc == 31))
521 return;
523 if (ra != 31) {
524 if (islit) {
525 if (lit != 0)
526 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
527 else
528 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
529 } else {
530 TCGv tmp1, tmp2;
531 tmp1 = tcg_temp_new();
532 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
533 tcg_gen_shli_i64(tmp1, tmp1, 3);
534 tmp2 = tcg_const_i64(64);
535 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
536 tcg_temp_free(tmp2);
537 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
538 tcg_temp_free(tmp1);
540 if (tcg_gen_ext_i64)
541 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
542 } else
543 tcg_gen_movi_i64(cpu_ir[rc], 0);
546 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
547 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
548 int ra, int rb, int rc,
549 int islit, uint8_t lit)
551 if (unlikely(rc == 31))
552 return;
554 if (ra != 31) {
555 if (islit) {
556 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
557 } else {
558 TCGv tmp = tcg_temp_new();
559 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
560 tcg_gen_shli_i64(tmp, tmp, 3);
561 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
562 tcg_temp_free(tmp);
564 if (tcg_gen_ext_i64)
565 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
566 } else
567 tcg_gen_movi_i64(cpu_ir[rc], 0);
570 /* Code to call arith3 helpers */
571 #define ARITH3(name) \
572 static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
573 int islit, uint8_t lit) \
575 if (unlikely(rc == 31)) \
576 return; \
578 if (ra != 31) { \
579 if (islit) { \
580 TCGv tmp = tcg_const_i64(lit); \
581 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
582 tcg_temp_free(tmp); \
583 } else \
584 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
585 } else { \
586 TCGv tmp1 = tcg_const_i64(0); \
587 if (islit) { \
588 TCGv tmp2 = tcg_const_i64(lit); \
589 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
590 tcg_temp_free(tmp2); \
591 } else \
592 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
593 tcg_temp_free(tmp1); \
596 ARITH3(cmpbge)
597 ARITH3(addlv)
598 ARITH3(sublv)
599 ARITH3(addqv)
600 ARITH3(subqv)
601 ARITH3(mskbl)
602 ARITH3(insbl)
603 ARITH3(mskwl)
604 ARITH3(inswl)
605 ARITH3(mskll)
606 ARITH3(insll)
607 ARITH3(zap)
608 ARITH3(zapnot)
609 ARITH3(mskql)
610 ARITH3(insql)
611 ARITH3(mskwh)
612 ARITH3(inswh)
613 ARITH3(msklh)
614 ARITH3(inslh)
615 ARITH3(mskqh)
616 ARITH3(insqh)
617 ARITH3(umulh)
618 ARITH3(mullv)
619 ARITH3(mulqv)
621 static always_inline void gen_cmp(TCGCond cond,
622 int ra, int rb, int rc,
623 int islit, uint8_t lit)
625 int l1, l2;
626 TCGv tmp;
628 if (unlikely(rc == 31))
629 return;
631 l1 = gen_new_label();
632 l2 = gen_new_label();
634 if (ra != 31) {
635 tmp = tcg_temp_new();
636 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
637 } else
638 tmp = tcg_const_i64(0);
639 if (islit)
640 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
641 else
642 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
644 tcg_gen_movi_i64(cpu_ir[rc], 0);
645 tcg_gen_br(l2);
646 gen_set_label(l1);
647 tcg_gen_movi_i64(cpu_ir[rc], 1);
648 gen_set_label(l2);
651 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
653 uint32_t palcode;
654 int32_t disp21, disp16, disp12;
655 uint16_t fn11, fn16;
656 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
657 uint8_t lit;
658 int ret;
660 /* Decode all instruction fields */
661 opc = insn >> 26;
662 ra = (insn >> 21) & 0x1F;
663 rb = (insn >> 16) & 0x1F;
664 rc = insn & 0x1F;
665 sbz = (insn >> 13) & 0x07;
666 islit = (insn >> 12) & 1;
667 if (rb == 31 && !islit) {
668 islit = 1;
669 lit = 0;
670 } else
671 lit = (insn >> 13) & 0xFF;
672 palcode = insn & 0x03FFFFFF;
673 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
674 disp16 = (int16_t)(insn & 0x0000FFFF);
675 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
676 fn16 = insn & 0x0000FFFF;
677 fn11 = (insn >> 5) & 0x000007FF;
678 fpfn = fn11 & 0x3F;
679 fn7 = (insn >> 5) & 0x0000007F;
680 fn2 = (insn >> 5) & 0x00000003;
681 ret = 0;
682 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
683 opc, ra, rb, rc, disp16);
684 switch (opc) {
685 case 0x00:
686 /* CALL_PAL */
687 if (palcode >= 0x80 && palcode < 0xC0) {
688 /* Unprivileged PAL call */
689 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
690 #if !defined (CONFIG_USER_ONLY)
691 } else if (palcode < 0x40) {
692 /* Privileged PAL code */
693 if (ctx->mem_idx & 1)
694 goto invalid_opc;
695 else
696 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
697 #endif
698 } else {
699 /* Invalid PAL call */
700 goto invalid_opc;
702 ret = 3;
703 break;
704 case 0x01:
705 /* OPC01 */
706 goto invalid_opc;
707 case 0x02:
708 /* OPC02 */
709 goto invalid_opc;
710 case 0x03:
711 /* OPC03 */
712 goto invalid_opc;
713 case 0x04:
714 /* OPC04 */
715 goto invalid_opc;
716 case 0x05:
717 /* OPC05 */
718 goto invalid_opc;
719 case 0x06:
720 /* OPC06 */
721 goto invalid_opc;
722 case 0x07:
723 /* OPC07 */
724 goto invalid_opc;
725 case 0x08:
726 /* LDA */
727 if (likely(ra != 31)) {
728 if (rb != 31)
729 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
730 else
731 tcg_gen_movi_i64(cpu_ir[ra], disp16);
733 break;
734 case 0x09:
735 /* LDAH */
736 if (likely(ra != 31)) {
737 if (rb != 31)
738 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
739 else
740 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
742 break;
743 case 0x0A:
744 /* LDBU */
745 if (!(ctx->amask & AMASK_BWX))
746 goto invalid_opc;
747 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
748 break;
749 case 0x0B:
750 /* LDQ_U */
751 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
752 break;
753 case 0x0C:
754 /* LDWU */
755 if (!(ctx->amask & AMASK_BWX))
756 goto invalid_opc;
757 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
758 break;
759 case 0x0D:
760 /* STW */
761 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
762 break;
763 case 0x0E:
764 /* STB */
765 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
766 break;
767 case 0x0F:
768 /* STQ_U */
769 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
770 break;
771 case 0x10:
772 switch (fn7) {
773 case 0x00:
774 /* ADDL */
775 if (likely(rc != 31)) {
776 if (ra != 31) {
777 if (islit) {
778 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
779 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
780 } else {
781 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
782 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
784 } else {
785 if (islit)
786 tcg_gen_movi_i64(cpu_ir[rc], lit);
787 else
788 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
791 break;
792 case 0x02:
793 /* S4ADDL */
794 if (likely(rc != 31)) {
795 if (ra != 31) {
796 TCGv tmp = tcg_temp_new();
797 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
798 if (islit)
799 tcg_gen_addi_i64(tmp, tmp, lit);
800 else
801 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
802 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
803 tcg_temp_free(tmp);
804 } else {
805 if (islit)
806 tcg_gen_movi_i64(cpu_ir[rc], lit);
807 else
808 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
811 break;
812 case 0x09:
813 /* SUBL */
814 if (likely(rc != 31)) {
815 if (ra != 31) {
816 if (islit)
817 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
818 else
819 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
820 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
821 } else {
822 if (islit)
823 tcg_gen_movi_i64(cpu_ir[rc], -lit);
824 else {
825 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
826 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
829 break;
830 case 0x0B:
831 /* S4SUBL */
832 if (likely(rc != 31)) {
833 if (ra != 31) {
834 TCGv tmp = tcg_temp_new();
835 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
836 if (islit)
837 tcg_gen_subi_i64(tmp, tmp, lit);
838 else
839 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
840 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
841 tcg_temp_free(tmp);
842 } else {
843 if (islit)
844 tcg_gen_movi_i64(cpu_ir[rc], -lit);
845 else {
846 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
847 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
851 break;
852 case 0x0F:
853 /* CMPBGE */
854 gen_cmpbge(ra, rb, rc, islit, lit);
855 break;
856 case 0x12:
857 /* S8ADDL */
858 if (likely(rc != 31)) {
859 if (ra != 31) {
860 TCGv tmp = tcg_temp_new();
861 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
862 if (islit)
863 tcg_gen_addi_i64(tmp, tmp, lit);
864 else
865 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
866 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
867 tcg_temp_free(tmp);
868 } else {
869 if (islit)
870 tcg_gen_movi_i64(cpu_ir[rc], lit);
871 else
872 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
875 break;
876 case 0x1B:
877 /* S8SUBL */
878 if (likely(rc != 31)) {
879 if (ra != 31) {
880 TCGv tmp = tcg_temp_new();
881 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
882 if (islit)
883 tcg_gen_subi_i64(tmp, tmp, lit);
884 else
885 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
886 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
887 tcg_temp_free(tmp);
888 } else {
889 if (islit)
890 tcg_gen_movi_i64(cpu_ir[rc], -lit);
891 else
892 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
893 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
897 break;
898 case 0x1D:
899 /* CMPULT */
900 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
901 break;
902 case 0x20:
903 /* ADDQ */
904 if (likely(rc != 31)) {
905 if (ra != 31) {
906 if (islit)
907 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
908 else
909 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
910 } else {
911 if (islit)
912 tcg_gen_movi_i64(cpu_ir[rc], lit);
913 else
914 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
917 break;
918 case 0x22:
919 /* S4ADDQ */
920 if (likely(rc != 31)) {
921 if (ra != 31) {
922 TCGv tmp = tcg_temp_new();
923 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
924 if (islit)
925 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
926 else
927 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
928 tcg_temp_free(tmp);
929 } else {
930 if (islit)
931 tcg_gen_movi_i64(cpu_ir[rc], lit);
932 else
933 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
936 break;
937 case 0x29:
938 /* SUBQ */
939 if (likely(rc != 31)) {
940 if (ra != 31) {
941 if (islit)
942 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
943 else
944 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
945 } else {
946 if (islit)
947 tcg_gen_movi_i64(cpu_ir[rc], -lit);
948 else
949 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
952 break;
953 case 0x2B:
954 /* S4SUBQ */
955 if (likely(rc != 31)) {
956 if (ra != 31) {
957 TCGv tmp = tcg_temp_new();
958 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
959 if (islit)
960 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
961 else
962 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
963 tcg_temp_free(tmp);
964 } else {
965 if (islit)
966 tcg_gen_movi_i64(cpu_ir[rc], -lit);
967 else
968 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
971 break;
972 case 0x2D:
973 /* CMPEQ */
974 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
975 break;
976 case 0x32:
977 /* S8ADDQ */
978 if (likely(rc != 31)) {
979 if (ra != 31) {
980 TCGv tmp = tcg_temp_new();
981 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
982 if (islit)
983 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
984 else
985 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
986 tcg_temp_free(tmp);
987 } else {
988 if (islit)
989 tcg_gen_movi_i64(cpu_ir[rc], lit);
990 else
991 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
994 break;
995 case 0x3B:
996 /* S8SUBQ */
997 if (likely(rc != 31)) {
998 if (ra != 31) {
999 TCGv tmp = tcg_temp_new();
1000 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
1001 if (islit)
1002 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
1003 else
1004 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1005 tcg_temp_free(tmp);
1006 } else {
1007 if (islit)
1008 tcg_gen_movi_i64(cpu_ir[rc], -lit);
1009 else
1010 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1013 break;
1014 case 0x3D:
1015 /* CMPULE */
1016 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1017 break;
1018 case 0x40:
1019 /* ADDL/V */
1020 gen_addlv(ra, rb, rc, islit, lit);
1021 break;
1022 case 0x49:
1023 /* SUBL/V */
1024 gen_sublv(ra, rb, rc, islit, lit);
1025 break;
1026 case 0x4D:
1027 /* CMPLT */
1028 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1029 break;
1030 case 0x60:
1031 /* ADDQ/V */
1032 gen_addqv(ra, rb, rc, islit, lit);
1033 break;
1034 case 0x69:
1035 /* SUBQ/V */
1036 gen_subqv(ra, rb, rc, islit, lit);
1037 break;
1038 case 0x6D:
1039 /* CMPLE */
1040 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1041 break;
1042 default:
1043 goto invalid_opc;
1045 break;
1046 case 0x11:
1047 switch (fn7) {
1048 case 0x00:
1049 /* AND */
1050 if (likely(rc != 31)) {
1051 if (ra == 31)
1052 tcg_gen_movi_i64(cpu_ir[rc], 0);
1053 else if (islit)
1054 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1055 else
1056 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1058 break;
1059 case 0x08:
1060 /* BIC */
1061 if (likely(rc != 31)) {
1062 if (ra != 31) {
1063 if (islit)
1064 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1065 else
1066 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1067 } else
1068 tcg_gen_movi_i64(cpu_ir[rc], 0);
1070 break;
1071 case 0x14:
1072 /* CMOVLBS */
1073 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1074 break;
1075 case 0x16:
1076 /* CMOVLBC */
1077 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1078 break;
1079 case 0x20:
1080 /* BIS */
1081 if (likely(rc != 31)) {
1082 if (ra != 31) {
1083 if (islit)
1084 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1085 else
1086 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1087 } else {
1088 if (islit)
1089 tcg_gen_movi_i64(cpu_ir[rc], lit);
1090 else
1091 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1094 break;
1095 case 0x24:
1096 /* CMOVEQ */
1097 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1098 break;
1099 case 0x26:
1100 /* CMOVNE */
1101 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1102 break;
1103 case 0x28:
1104 /* ORNOT */
1105 if (likely(rc != 31)) {
1106 if (ra != 31) {
1107 if (islit)
1108 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1109 else
1110 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1111 } else {
1112 if (islit)
1113 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1114 else
1115 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1118 break;
1119 case 0x40:
1120 /* XOR */
1121 if (likely(rc != 31)) {
1122 if (ra != 31) {
1123 if (islit)
1124 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1125 else
1126 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1127 } else {
1128 if (islit)
1129 tcg_gen_movi_i64(cpu_ir[rc], lit);
1130 else
1131 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1134 break;
1135 case 0x44:
1136 /* CMOVLT */
1137 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1138 break;
1139 case 0x46:
1140 /* CMOVGE */
1141 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1142 break;
1143 case 0x48:
1144 /* EQV */
1145 if (likely(rc != 31)) {
1146 if (ra != 31) {
1147 if (islit)
1148 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1149 else
1150 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1151 } else {
1152 if (islit)
1153 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1154 else
1155 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1158 break;
1159 case 0x61:
1160 /* AMASK */
1161 if (likely(rc != 31)) {
1162 if (islit)
1163 tcg_gen_movi_i64(cpu_ir[rc], lit);
1164 else
1165 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1166 switch (ctx->env->implver) {
1167 case IMPLVER_2106x:
1168 /* EV4, EV45, LCA, LCA45 & EV5 */
1169 break;
1170 case IMPLVER_21164:
1171 case IMPLVER_21264:
1172 case IMPLVER_21364:
1173 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1174 ~(uint64_t)ctx->amask);
1175 break;
1178 break;
1179 case 0x64:
1180 /* CMOVLE */
1181 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1182 break;
1183 case 0x66:
1184 /* CMOVGT */
1185 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1186 break;
1187 case 0x6C:
1188 /* IMPLVER */
1189 if (rc != 31)
1190 tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1191 break;
1192 default:
1193 goto invalid_opc;
1195 break;
1196 case 0x12:
1197 switch (fn7) {
1198 case 0x02:
1199 /* MSKBL */
1200 gen_mskbl(ra, rb, rc, islit, lit);
1201 break;
1202 case 0x06:
1203 /* EXTBL */
1204 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1205 break;
1206 case 0x0B:
1207 /* INSBL */
1208 gen_insbl(ra, rb, rc, islit, lit);
1209 break;
1210 case 0x12:
1211 /* MSKWL */
1212 gen_mskwl(ra, rb, rc, islit, lit);
1213 break;
1214 case 0x16:
1215 /* EXTWL */
1216 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1217 break;
1218 case 0x1B:
1219 /* INSWL */
1220 gen_inswl(ra, rb, rc, islit, lit);
1221 break;
1222 case 0x22:
1223 /* MSKLL */
1224 gen_mskll(ra, rb, rc, islit, lit);
1225 break;
1226 case 0x26:
1227 /* EXTLL */
1228 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1229 break;
1230 case 0x2B:
1231 /* INSLL */
1232 gen_insll(ra, rb, rc, islit, lit);
1233 break;
1234 case 0x30:
1235 /* ZAP */
1236 gen_zap(ra, rb, rc, islit, lit);
1237 break;
1238 case 0x31:
1239 /* ZAPNOT */
1240 gen_zapnot(ra, rb, rc, islit, lit);
1241 break;
1242 case 0x32:
1243 /* MSKQL */
1244 gen_mskql(ra, rb, rc, islit, lit);
1245 break;
1246 case 0x34:
1247 /* SRL */
1248 if (likely(rc != 31)) {
1249 if (ra != 31) {
1250 if (islit)
1251 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1252 else {
1253 TCGv shift = tcg_temp_new();
1254 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1255 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1256 tcg_temp_free(shift);
1258 } else
1259 tcg_gen_movi_i64(cpu_ir[rc], 0);
1261 break;
1262 case 0x36:
1263 /* EXTQL */
1264 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1265 break;
1266 case 0x39:
1267 /* SLL */
1268 if (likely(rc != 31)) {
1269 if (ra != 31) {
1270 if (islit)
1271 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1272 else {
1273 TCGv shift = tcg_temp_new();
1274 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1275 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1276 tcg_temp_free(shift);
1278 } else
1279 tcg_gen_movi_i64(cpu_ir[rc], 0);
1281 break;
1282 case 0x3B:
1283 /* INSQL */
1284 gen_insql(ra, rb, rc, islit, lit);
1285 break;
1286 case 0x3C:
1287 /* SRA */
1288 if (likely(rc != 31)) {
1289 if (ra != 31) {
1290 if (islit)
1291 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1292 else {
1293 TCGv shift = tcg_temp_new();
1294 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1295 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1296 tcg_temp_free(shift);
1298 } else
1299 tcg_gen_movi_i64(cpu_ir[rc], 0);
1301 break;
1302 case 0x52:
1303 /* MSKWH */
1304 gen_mskwh(ra, rb, rc, islit, lit);
1305 break;
1306 case 0x57:
1307 /* INSWH */
1308 gen_inswh(ra, rb, rc, islit, lit);
1309 break;
1310 case 0x5A:
1311 /* EXTWH */
1312 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1313 break;
1314 case 0x62:
1315 /* MSKLH */
1316 gen_msklh(ra, rb, rc, islit, lit);
1317 break;
1318 case 0x67:
1319 /* INSLH */
1320 gen_inslh(ra, rb, rc, islit, lit);
1321 break;
1322 case 0x6A:
1323 /* EXTLH */
1324 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1325 break;
1326 case 0x72:
1327 /* MSKQH */
1328 gen_mskqh(ra, rb, rc, islit, lit);
1329 break;
1330 case 0x77:
1331 /* INSQH */
1332 gen_insqh(ra, rb, rc, islit, lit);
1333 break;
1334 case 0x7A:
1335 /* EXTQH */
1336 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1337 break;
1338 default:
1339 goto invalid_opc;
1341 break;
1342 case 0x13:
1343 switch (fn7) {
1344 case 0x00:
1345 /* MULL */
1346 if (likely(rc != 31)) {
1347 if (ra == 31)
1348 tcg_gen_movi_i64(cpu_ir[rc], 0);
1349 else {
1350 if (islit)
1351 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1352 else
1353 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1354 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1357 break;
1358 case 0x20:
1359 /* MULQ */
1360 if (likely(rc != 31)) {
1361 if (ra == 31)
1362 tcg_gen_movi_i64(cpu_ir[rc], 0);
1363 else if (islit)
1364 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1365 else
1366 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1368 break;
1369 case 0x30:
1370 /* UMULH */
1371 gen_umulh(ra, rb, rc, islit, lit);
1372 break;
1373 case 0x40:
1374 /* MULL/V */
1375 gen_mullv(ra, rb, rc, islit, lit);
1376 break;
1377 case 0x60:
1378 /* MULQ/V */
1379 gen_mulqv(ra, rb, rc, islit, lit);
1380 break;
1381 default:
1382 goto invalid_opc;
1384 break;
1385 case 0x14:
1386 switch (fpfn) { /* f11 & 0x3F */
1387 case 0x04:
1388 /* ITOFS */
1389 if (!(ctx->amask & AMASK_FIX))
1390 goto invalid_opc;
1391 if (likely(rc != 31)) {
1392 if (ra != 31) {
1393 TCGv_i32 tmp = tcg_temp_new_i32();
1394 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1395 gen_helper_memory_to_s(cpu_fir[rc], tmp);
1396 tcg_temp_free_i32(tmp);
1397 } else
1398 tcg_gen_movi_i64(cpu_fir[rc], 0);
1400 break;
1401 case 0x0A:
1402 /* SQRTF */
1403 if (!(ctx->amask & AMASK_FIX))
1404 goto invalid_opc;
1405 gen_fsqrtf(rb, rc);
1406 break;
1407 case 0x0B:
1408 /* SQRTS */
1409 if (!(ctx->amask & AMASK_FIX))
1410 goto invalid_opc;
1411 gen_fsqrts(rb, rc);
1412 break;
1413 case 0x14:
1414 /* ITOFF */
1415 if (!(ctx->amask & AMASK_FIX))
1416 goto invalid_opc;
1417 if (likely(rc != 31)) {
1418 if (ra != 31) {
1419 TCGv_i32 tmp = tcg_temp_new_i32();
1420 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1421 gen_helper_memory_to_f(cpu_fir[rc], tmp);
1422 tcg_temp_free_i32(tmp);
1423 } else
1424 tcg_gen_movi_i64(cpu_fir[rc], 0);
1426 break;
1427 case 0x24:
1428 /* ITOFT */
1429 if (!(ctx->amask & AMASK_FIX))
1430 goto invalid_opc;
1431 if (likely(rc != 31)) {
1432 if (ra != 31)
1433 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1434 else
1435 tcg_gen_movi_i64(cpu_fir[rc], 0);
1437 break;
1438 case 0x2A:
1439 /* SQRTG */
1440 if (!(ctx->amask & AMASK_FIX))
1441 goto invalid_opc;
1442 gen_fsqrtg(rb, rc);
1443 break;
1444 case 0x02B:
1445 /* SQRTT */
1446 if (!(ctx->amask & AMASK_FIX))
1447 goto invalid_opc;
1448 gen_fsqrtt(rb, rc);
1449 break;
1450 default:
1451 goto invalid_opc;
1453 break;
1454 case 0x15:
1455 /* VAX floating point */
1456 /* XXX: rounding mode and trap are ignored (!) */
1457 switch (fpfn) { /* f11 & 0x3F */
1458 case 0x00:
1459 /* ADDF */
1460 gen_faddf(ra, rb, rc);
1461 break;
1462 case 0x01:
1463 /* SUBF */
1464 gen_fsubf(ra, rb, rc);
1465 break;
1466 case 0x02:
1467 /* MULF */
1468 gen_fmulf(ra, rb, rc);
1469 break;
1470 case 0x03:
1471 /* DIVF */
1472 gen_fdivf(ra, rb, rc);
1473 break;
1474 case 0x1E:
1475 /* CVTDG */
1476 #if 0 // TODO
1477 gen_fcvtdg(rb, rc);
1478 #else
1479 goto invalid_opc;
1480 #endif
1481 break;
1482 case 0x20:
1483 /* ADDG */
1484 gen_faddg(ra, rb, rc);
1485 break;
1486 case 0x21:
1487 /* SUBG */
1488 gen_fsubg(ra, rb, rc);
1489 break;
1490 case 0x22:
1491 /* MULG */
1492 gen_fmulg(ra, rb, rc);
1493 break;
1494 case 0x23:
1495 /* DIVG */
1496 gen_fdivg(ra, rb, rc);
1497 break;
1498 case 0x25:
1499 /* CMPGEQ */
1500 gen_fcmpgeq(ra, rb, rc);
1501 break;
1502 case 0x26:
1503 /* CMPGLT */
1504 gen_fcmpglt(ra, rb, rc);
1505 break;
1506 case 0x27:
1507 /* CMPGLE */
1508 gen_fcmpgle(ra, rb, rc);
1509 break;
1510 case 0x2C:
1511 /* CVTGF */
1512 gen_fcvtgf(rb, rc);
1513 break;
1514 case 0x2D:
1515 /* CVTGD */
1516 #if 0 // TODO
1517 gen_fcvtgd(rb, rc);
1518 #else
1519 goto invalid_opc;
1520 #endif
1521 break;
1522 case 0x2F:
1523 /* CVTGQ */
1524 gen_fcvtgq(rb, rc);
1525 break;
1526 case 0x3C:
1527 /* CVTQF */
1528 gen_fcvtqf(rb, rc);
1529 break;
1530 case 0x3E:
1531 /* CVTQG */
1532 gen_fcvtqg(rb, rc);
1533 break;
1534 default:
1535 goto invalid_opc;
1537 break;
1538 case 0x16:
1539 /* IEEE floating-point */
1540 /* XXX: rounding mode and traps are ignored (!) */
1541 switch (fpfn) { /* f11 & 0x3F */
1542 case 0x00:
1543 /* ADDS */
1544 gen_fadds(ra, rb, rc);
1545 break;
1546 case 0x01:
1547 /* SUBS */
1548 gen_fsubs(ra, rb, rc);
1549 break;
1550 case 0x02:
1551 /* MULS */
1552 gen_fmuls(ra, rb, rc);
1553 break;
1554 case 0x03:
1555 /* DIVS */
1556 gen_fdivs(ra, rb, rc);
1557 break;
1558 case 0x20:
1559 /* ADDT */
1560 gen_faddt(ra, rb, rc);
1561 break;
1562 case 0x21:
1563 /* SUBT */
1564 gen_fsubt(ra, rb, rc);
1565 break;
1566 case 0x22:
1567 /* MULT */
1568 gen_fmult(ra, rb, rc);
1569 break;
1570 case 0x23:
1571 /* DIVT */
1572 gen_fdivt(ra, rb, rc);
1573 break;
1574 case 0x24:
1575 /* CMPTUN */
1576 gen_fcmptun(ra, rb, rc);
1577 break;
1578 case 0x25:
1579 /* CMPTEQ */
1580 gen_fcmpteq(ra, rb, rc);
1581 break;
1582 case 0x26:
1583 /* CMPTLT */
1584 gen_fcmptlt(ra, rb, rc);
1585 break;
1586 case 0x27:
1587 /* CMPTLE */
1588 gen_fcmptle(ra, rb, rc);
1589 break;
1590 case 0x2C:
1591 /* XXX: incorrect */
1592 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1593 /* CVTST */
1594 gen_fcvtst(rb, rc);
1595 } else {
1596 /* CVTTS */
1597 gen_fcvtts(rb, rc);
1599 break;
1600 case 0x2F:
1601 /* CVTTQ */
1602 gen_fcvttq(rb, rc);
1603 break;
1604 case 0x3C:
1605 /* CVTQS */
1606 gen_fcvtqs(rb, rc);
1607 break;
1608 case 0x3E:
1609 /* CVTQT */
1610 gen_fcvtqt(rb, rc);
1611 break;
1612 default:
1613 goto invalid_opc;
1615 break;
1616 case 0x17:
1617 switch (fn11) {
1618 case 0x010:
1619 /* CVTLQ */
1620 gen_fcvtlq(rb, rc);
1621 break;
1622 case 0x020:
1623 if (likely(rc != 31)) {
1624 if (ra == rb)
1625 /* FMOV */
1626 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1627 else
1628 /* CPYS */
1629 gen_fcpys(ra, rb, rc);
1631 break;
1632 case 0x021:
1633 /* CPYSN */
1634 gen_fcpysn(ra, rb, rc);
1635 break;
1636 case 0x022:
1637 /* CPYSE */
1638 gen_fcpyse(ra, rb, rc);
1639 break;
1640 case 0x024:
1641 /* MT_FPCR */
1642 if (likely(ra != 31))
1643 gen_helper_store_fpcr(cpu_fir[ra]);
1644 else {
1645 TCGv tmp = tcg_const_i64(0);
1646 gen_helper_store_fpcr(tmp);
1647 tcg_temp_free(tmp);
1649 break;
1650 case 0x025:
1651 /* MF_FPCR */
1652 if (likely(ra != 31))
1653 gen_helper_load_fpcr(cpu_fir[ra]);
1654 break;
1655 case 0x02A:
1656 /* FCMOVEQ */
1657 gen_fcmpfeq(ra, rb, rc);
1658 break;
1659 case 0x02B:
1660 /* FCMOVNE */
1661 gen_fcmpfne(ra, rb, rc);
1662 break;
1663 case 0x02C:
1664 /* FCMOVLT */
1665 gen_fcmpflt(ra, rb, rc);
1666 break;
1667 case 0x02D:
1668 /* FCMOVGE */
1669 gen_fcmpfge(ra, rb, rc);
1670 break;
1671 case 0x02E:
1672 /* FCMOVLE */
1673 gen_fcmpfle(ra, rb, rc);
1674 break;
1675 case 0x02F:
1676 /* FCMOVGT */
1677 gen_fcmpfgt(ra, rb, rc);
1678 break;
1679 case 0x030:
1680 /* CVTQL */
1681 gen_fcvtql(rb, rc);
1682 break;
1683 case 0x130:
1684 /* CVTQL/V */
1685 gen_fcvtqlv(rb, rc);
1686 break;
1687 case 0x530:
1688 /* CVTQL/SV */
1689 gen_fcvtqlsv(rb, rc);
1690 break;
1691 default:
1692 goto invalid_opc;
1694 break;
1695 case 0x18:
1696 switch ((uint16_t)disp16) {
1697 case 0x0000:
1698 /* TRAPB */
1699 /* No-op. Just exit from the current tb */
1700 ret = 2;
1701 break;
1702 case 0x0400:
1703 /* EXCB */
1704 /* No-op. Just exit from the current tb */
1705 ret = 2;
1706 break;
1707 case 0x4000:
1708 /* MB */
1709 /* No-op */
1710 break;
1711 case 0x4400:
1712 /* WMB */
1713 /* No-op */
1714 break;
1715 case 0x8000:
1716 /* FETCH */
1717 /* No-op */
1718 break;
1719 case 0xA000:
1720 /* FETCH_M */
1721 /* No-op */
1722 break;
1723 case 0xC000:
1724 /* RPCC */
1725 if (ra != 31)
1726 gen_helper_load_pcc(cpu_ir[ra]);
1727 break;
1728 case 0xE000:
1729 /* RC */
1730 if (ra != 31)
1731 gen_helper_rc(cpu_ir[ra]);
1732 break;
1733 case 0xE800:
1734 /* ECB */
1735 break;
1736 case 0xF000:
1737 /* RS */
1738 if (ra != 31)
1739 gen_helper_rs(cpu_ir[ra]);
1740 break;
1741 case 0xF800:
1742 /* WH64 */
1743 /* No-op */
1744 break;
1745 default:
1746 goto invalid_opc;
1748 break;
1749 case 0x19:
1750 /* HW_MFPR (PALcode) */
1751 #if defined (CONFIG_USER_ONLY)
1752 goto invalid_opc;
1753 #else
1754 if (!ctx->pal_mode)
1755 goto invalid_opc;
1756 if (ra != 31) {
1757 TCGv tmp = tcg_const_i32(insn & 0xFF);
1758 gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1759 tcg_temp_free(tmp);
1761 break;
1762 #endif
1763 case 0x1A:
1764 if (rb != 31)
1765 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1766 else
1767 tcg_gen_movi_i64(cpu_pc, 0);
1768 if (ra != 31)
1769 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1770 /* Those four jumps only differ by the branch prediction hint */
1771 switch (fn2) {
1772 case 0x0:
1773 /* JMP */
1774 break;
1775 case 0x1:
1776 /* JSR */
1777 break;
1778 case 0x2:
1779 /* RET */
1780 break;
1781 case 0x3:
1782 /* JSR_COROUTINE */
1783 break;
1785 ret = 1;
1786 break;
1787 case 0x1B:
1788 /* HW_LD (PALcode) */
1789 #if defined (CONFIG_USER_ONLY)
1790 goto invalid_opc;
1791 #else
1792 if (!ctx->pal_mode)
1793 goto invalid_opc;
1794 if (ra != 31) {
1795 TCGv addr = tcg_temp_new();
1796 if (rb != 31)
1797 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1798 else
1799 tcg_gen_movi_i64(addr, disp12);
1800 switch ((insn >> 12) & 0xF) {
1801 case 0x0:
1802 /* Longword physical access (hw_ldl/p) */
1803 gen_helper_ldl_raw(cpu_ir[ra], addr);
1804 break;
1805 case 0x1:
1806 /* Quadword physical access (hw_ldq/p) */
1807 gen_helper_ldq_raw(cpu_ir[ra], addr);
1808 break;
1809 case 0x2:
1810 /* Longword physical access with lock (hw_ldl_l/p) */
1811 gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1812 break;
1813 case 0x3:
1814 /* Quadword physical access with lock (hw_ldq_l/p) */
1815 gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1816 break;
1817 case 0x4:
1818 /* Longword virtual PTE fetch (hw_ldl/v) */
1819 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1820 break;
1821 case 0x5:
1822 /* Quadword virtual PTE fetch (hw_ldq/v) */
1823 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1824 break;
1825 case 0x6:
1826 /* Incpu_ir[ra]id */
1827 goto invalid_opc;
1828 case 0x7:
1829 /* Incpu_ir[ra]id */
1830 goto invalid_opc;
1831 case 0x8:
1832 /* Longword virtual access (hw_ldl) */
1833 gen_helper_st_virt_to_phys(addr, addr);
1834 gen_helper_ldl_raw(cpu_ir[ra], addr);
1835 break;
1836 case 0x9:
1837 /* Quadword virtual access (hw_ldq) */
1838 gen_helper_st_virt_to_phys(addr, addr);
1839 gen_helper_ldq_raw(cpu_ir[ra], addr);
1840 break;
1841 case 0xA:
1842 /* Longword virtual access with protection check (hw_ldl/w) */
1843 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1844 break;
1845 case 0xB:
1846 /* Quadword virtual access with protection check (hw_ldq/w) */
1847 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1848 break;
1849 case 0xC:
1850 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1851 gen_helper_set_alt_mode();
1852 gen_helper_st_virt_to_phys(addr, addr);
1853 gen_helper_ldl_raw(cpu_ir[ra], addr);
1854 gen_helper_restore_mode();
1855 break;
1856 case 0xD:
1857 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1858 gen_helper_set_alt_mode();
1859 gen_helper_st_virt_to_phys(addr, addr);
1860 gen_helper_ldq_raw(cpu_ir[ra], addr);
1861 gen_helper_restore_mode();
1862 break;
1863 case 0xE:
1864 /* Longword virtual access with alternate access mode and
1865 * protection checks (hw_ldl/wa)
1867 gen_helper_set_alt_mode();
1868 gen_helper_ldl_data(cpu_ir[ra], addr);
1869 gen_helper_restore_mode();
1870 break;
1871 case 0xF:
1872 /* Quadword virtual access with alternate access mode and
1873 * protection checks (hw_ldq/wa)
1875 gen_helper_set_alt_mode();
1876 gen_helper_ldq_data(cpu_ir[ra], addr);
1877 gen_helper_restore_mode();
1878 break;
1880 tcg_temp_free(addr);
1882 break;
1883 #endif
1884 case 0x1C:
1885 switch (fn7) {
1886 case 0x00:
1887 /* SEXTB */
1888 if (!(ctx->amask & AMASK_BWX))
1889 goto invalid_opc;
1890 if (likely(rc != 31)) {
1891 if (islit)
1892 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1893 else
1894 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1896 break;
1897 case 0x01:
1898 /* SEXTW */
1899 if (!(ctx->amask & AMASK_BWX))
1900 goto invalid_opc;
1901 if (likely(rc != 31)) {
1902 if (islit)
1903 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1904 else
1905 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1907 break;
1908 case 0x30:
1909 /* CTPOP */
1910 if (!(ctx->amask & AMASK_CIX))
1911 goto invalid_opc;
1912 if (likely(rc != 31)) {
1913 if (islit)
1914 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1915 else
1916 gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1918 break;
1919 case 0x31:
1920 /* PERR */
1921 if (!(ctx->amask & AMASK_MVI))
1922 goto invalid_opc;
1923 /* XXX: TODO */
1924 goto invalid_opc;
1925 break;
1926 case 0x32:
1927 /* CTLZ */
1928 if (!(ctx->amask & AMASK_CIX))
1929 goto invalid_opc;
1930 if (likely(rc != 31)) {
1931 if (islit)
1932 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1933 else
1934 gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1936 break;
1937 case 0x33:
1938 /* CTTZ */
1939 if (!(ctx->amask & AMASK_CIX))
1940 goto invalid_opc;
1941 if (likely(rc != 31)) {
1942 if (islit)
1943 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1944 else
1945 gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1947 break;
1948 case 0x34:
1949 /* UNPKBW */
1950 if (!(ctx->amask & AMASK_MVI))
1951 goto invalid_opc;
1952 /* XXX: TODO */
1953 goto invalid_opc;
1954 break;
1955 case 0x35:
1956 /* UNPKWL */
1957 if (!(ctx->amask & AMASK_MVI))
1958 goto invalid_opc;
1959 /* XXX: TODO */
1960 goto invalid_opc;
1961 break;
1962 case 0x36:
1963 /* PKWB */
1964 if (!(ctx->amask & AMASK_MVI))
1965 goto invalid_opc;
1966 /* XXX: TODO */
1967 goto invalid_opc;
1968 break;
1969 case 0x37:
1970 /* PKLB */
1971 if (!(ctx->amask & AMASK_MVI))
1972 goto invalid_opc;
1973 /* XXX: TODO */
1974 goto invalid_opc;
1975 break;
1976 case 0x38:
1977 /* MINSB8 */
1978 if (!(ctx->amask & AMASK_MVI))
1979 goto invalid_opc;
1980 /* XXX: TODO */
1981 goto invalid_opc;
1982 break;
1983 case 0x39:
1984 /* MINSW4 */
1985 if (!(ctx->amask & AMASK_MVI))
1986 goto invalid_opc;
1987 /* XXX: TODO */
1988 goto invalid_opc;
1989 break;
1990 case 0x3A:
1991 /* MINUB8 */
1992 if (!(ctx->amask & AMASK_MVI))
1993 goto invalid_opc;
1994 /* XXX: TODO */
1995 goto invalid_opc;
1996 break;
1997 case 0x3B:
1998 /* MINUW4 */
1999 if (!(ctx->amask & AMASK_MVI))
2000 goto invalid_opc;
2001 /* XXX: TODO */
2002 goto invalid_opc;
2003 break;
2004 case 0x3C:
2005 /* MAXUB8 */
2006 if (!(ctx->amask & AMASK_MVI))
2007 goto invalid_opc;
2008 /* XXX: TODO */
2009 goto invalid_opc;
2010 break;
2011 case 0x3D:
2012 /* MAXUW4 */
2013 if (!(ctx->amask & AMASK_MVI))
2014 goto invalid_opc;
2015 /* XXX: TODO */
2016 goto invalid_opc;
2017 break;
2018 case 0x3E:
2019 /* MAXSB8 */
2020 if (!(ctx->amask & AMASK_MVI))
2021 goto invalid_opc;
2022 /* XXX: TODO */
2023 goto invalid_opc;
2024 break;
2025 case 0x3F:
2026 /* MAXSW4 */
2027 if (!(ctx->amask & AMASK_MVI))
2028 goto invalid_opc;
2029 /* XXX: TODO */
2030 goto invalid_opc;
2031 break;
2032 case 0x70:
2033 /* FTOIT */
2034 if (!(ctx->amask & AMASK_FIX))
2035 goto invalid_opc;
2036 if (likely(rc != 31)) {
2037 if (ra != 31)
2038 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2039 else
2040 tcg_gen_movi_i64(cpu_ir[rc], 0);
2042 break;
2043 case 0x78:
2044 /* FTOIS */
2045 if (!(ctx->amask & AMASK_FIX))
2046 goto invalid_opc;
2047 if (rc != 31) {
2048 TCGv_i32 tmp1 = tcg_temp_new_i32();
2049 if (ra != 31)
2050 gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2051 else {
2052 TCGv tmp2 = tcg_const_i64(0);
2053 gen_helper_s_to_memory(tmp1, tmp2);
2054 tcg_temp_free(tmp2);
2056 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2057 tcg_temp_free_i32(tmp1);
2059 break;
2060 default:
2061 goto invalid_opc;
2063 break;
2064 case 0x1D:
2065 /* HW_MTPR (PALcode) */
2066 #if defined (CONFIG_USER_ONLY)
2067 goto invalid_opc;
2068 #else
2069 if (!ctx->pal_mode)
2070 goto invalid_opc;
2071 else {
2072 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2073 if (ra != 31)
2074 gen_helper_mtpr(tmp1, cpu_ir[ra]);
2075 else {
2076 TCGv tmp2 = tcg_const_i64(0);
2077 gen_helper_mtpr(tmp1, tmp2);
2078 tcg_temp_free(tmp2);
2080 tcg_temp_free(tmp1);
2081 ret = 2;
2083 break;
2084 #endif
2085 case 0x1E:
2086 /* HW_REI (PALcode) */
2087 #if defined (CONFIG_USER_ONLY)
2088 goto invalid_opc;
2089 #else
2090 if (!ctx->pal_mode)
2091 goto invalid_opc;
2092 if (rb == 31) {
2093 /* "Old" alpha */
2094 gen_helper_hw_rei();
2095 } else {
2096 TCGv tmp;
2098 if (ra != 31) {
2099 tmp = tcg_temp_new();
2100 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2101 } else
2102 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2103 gen_helper_hw_ret(tmp);
2104 tcg_temp_free(tmp);
2106 ret = 2;
2107 break;
2108 #endif
2109 case 0x1F:
2110 /* HW_ST (PALcode) */
2111 #if defined (CONFIG_USER_ONLY)
2112 goto invalid_opc;
2113 #else
2114 if (!ctx->pal_mode)
2115 goto invalid_opc;
2116 else {
2117 TCGv addr, val;
2118 addr = tcg_temp_new();
2119 if (rb != 31)
2120 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2121 else
2122 tcg_gen_movi_i64(addr, disp12);
2123 if (ra != 31)
2124 val = cpu_ir[ra];
2125 else {
2126 val = tcg_temp_new();
2127 tcg_gen_movi_i64(val, 0);
2129 switch ((insn >> 12) & 0xF) {
2130 case 0x0:
2131 /* Longword physical access */
2132 gen_helper_stl_raw(val, addr);
2133 break;
2134 case 0x1:
2135 /* Quadword physical access */
2136 gen_helper_stq_raw(val, addr);
2137 break;
2138 case 0x2:
2139 /* Longword physical access with lock */
2140 gen_helper_stl_c_raw(val, val, addr);
2141 break;
2142 case 0x3:
2143 /* Quadword physical access with lock */
2144 gen_helper_stq_c_raw(val, val, addr);
2145 break;
2146 case 0x4:
2147 /* Longword virtual access */
2148 gen_helper_st_virt_to_phys(addr, addr);
2149 gen_helper_stl_raw(val, addr);
2150 break;
2151 case 0x5:
2152 /* Quadword virtual access */
2153 gen_helper_st_virt_to_phys(addr, addr);
2154 gen_helper_stq_raw(val, addr);
2155 break;
2156 case 0x6:
2157 /* Invalid */
2158 goto invalid_opc;
2159 case 0x7:
2160 /* Invalid */
2161 goto invalid_opc;
2162 case 0x8:
2163 /* Invalid */
2164 goto invalid_opc;
2165 case 0x9:
2166 /* Invalid */
2167 goto invalid_opc;
2168 case 0xA:
2169 /* Invalid */
2170 goto invalid_opc;
2171 case 0xB:
2172 /* Invalid */
2173 goto invalid_opc;
2174 case 0xC:
2175 /* Longword virtual access with alternate access mode */
2176 gen_helper_set_alt_mode();
2177 gen_helper_st_virt_to_phys(addr, addr);
2178 gen_helper_stl_raw(val, addr);
2179 gen_helper_restore_mode();
2180 break;
2181 case 0xD:
2182 /* Quadword virtual access with alternate access mode */
2183 gen_helper_set_alt_mode();
2184 gen_helper_st_virt_to_phys(addr, addr);
2185 gen_helper_stl_raw(val, addr);
2186 gen_helper_restore_mode();
2187 break;
2188 case 0xE:
2189 /* Invalid */
2190 goto invalid_opc;
2191 case 0xF:
2192 /* Invalid */
2193 goto invalid_opc;
2195 if (ra == 31)
2196 tcg_temp_free(val);
2197 tcg_temp_free(addr);
2199 break;
2200 #endif
2201 case 0x20:
2202 /* LDF */
2203 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2204 break;
2205 case 0x21:
2206 /* LDG */
2207 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2208 break;
2209 case 0x22:
2210 /* LDS */
2211 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2212 break;
2213 case 0x23:
2214 /* LDT */
2215 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2216 break;
2217 case 0x24:
2218 /* STF */
2219 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2220 break;
2221 case 0x25:
2222 /* STG */
2223 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2224 break;
2225 case 0x26:
2226 /* STS */
2227 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2228 break;
2229 case 0x27:
2230 /* STT */
2231 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2232 break;
2233 case 0x28:
2234 /* LDL */
2235 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2236 break;
2237 case 0x29:
2238 /* LDQ */
2239 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2240 break;
2241 case 0x2A:
2242 /* LDL_L */
2243 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2244 break;
2245 case 0x2B:
2246 /* LDQ_L */
2247 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2248 break;
2249 case 0x2C:
2250 /* STL */
2251 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2252 break;
2253 case 0x2D:
2254 /* STQ */
2255 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2256 break;
2257 case 0x2E:
2258 /* STL_C */
2259 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2260 break;
2261 case 0x2F:
2262 /* STQ_C */
2263 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2264 break;
2265 case 0x30:
2266 /* BR */
2267 if (ra != 31)
2268 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2269 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2270 ret = 1;
2271 break;
2272 case 0x31: /* FBEQ */
2273 case 0x32: /* FBLT */
2274 case 0x33: /* FBLE */
2275 gen_fbcond(ctx, opc, ra, disp16);
2276 ret = 1;
2277 break;
2278 case 0x34:
2279 /* BSR */
2280 if (ra != 31)
2281 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2282 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2283 ret = 1;
2284 break;
2285 case 0x35: /* FBNE */
2286 case 0x36: /* FBGE */
2287 case 0x37: /* FBGT */
2288 gen_fbcond(ctx, opc, ra, disp16);
2289 ret = 1;
2290 break;
2291 case 0x38:
2292 /* BLBC */
2293 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2294 ret = 1;
2295 break;
2296 case 0x39:
2297 /* BEQ */
2298 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2299 ret = 1;
2300 break;
2301 case 0x3A:
2302 /* BLT */
2303 gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2304 ret = 1;
2305 break;
2306 case 0x3B:
2307 /* BLE */
2308 gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2309 ret = 1;
2310 break;
2311 case 0x3C:
2312 /* BLBS */
2313 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2314 ret = 1;
2315 break;
2316 case 0x3D:
2317 /* BNE */
2318 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2319 ret = 1;
2320 break;
2321 case 0x3E:
2322 /* BGE */
2323 gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2324 ret = 1;
2325 break;
2326 case 0x3F:
2327 /* BGT */
2328 gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2329 ret = 1;
2330 break;
2331 invalid_opc:
2332 gen_invalid(ctx);
2333 ret = 3;
2334 break;
2337 return ret;
2340 static always_inline void gen_intermediate_code_internal (CPUState *env,
2341 TranslationBlock *tb,
2342 int search_pc)
2344 #if defined ALPHA_DEBUG_DISAS
2345 static int insn_count;
2346 #endif
2347 DisasContext ctx, *ctxp = &ctx;
2348 target_ulong pc_start;
2349 uint32_t insn;
2350 uint16_t *gen_opc_end;
2351 CPUBreakpoint *bp;
2352 int j, lj = -1;
2353 int ret;
2354 int num_insns;
2355 int max_insns;
2357 pc_start = tb->pc;
2358 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2359 ctx.pc = pc_start;
2360 ctx.amask = env->amask;
2361 ctx.env = env;
2362 #if defined (CONFIG_USER_ONLY)
2363 ctx.mem_idx = 0;
2364 #else
2365 ctx.mem_idx = ((env->ps >> 3) & 3);
2366 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2367 #endif
2368 num_insns = 0;
2369 max_insns = tb->cflags & CF_COUNT_MASK;
2370 if (max_insns == 0)
2371 max_insns = CF_COUNT_MASK;
2373 gen_icount_start();
2374 for (ret = 0; ret == 0;) {
2375 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
2376 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
2377 if (bp->pc == ctx.pc) {
2378 gen_excp(&ctx, EXCP_DEBUG, 0);
2379 break;
2383 if (search_pc) {
2384 j = gen_opc_ptr - gen_opc_buf;
2385 if (lj < j) {
2386 lj++;
2387 while (lj < j)
2388 gen_opc_instr_start[lj++] = 0;
2390 gen_opc_pc[lj] = ctx.pc;
2391 gen_opc_instr_start[lj] = 1;
2392 gen_opc_icount[lj] = num_insns;
2394 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2395 gen_io_start();
2396 #if defined ALPHA_DEBUG_DISAS
2397 insn_count++;
2398 LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2399 ctx.pc, ctx.mem_idx);
2400 #endif
2401 insn = ldl_code(ctx.pc);
2402 #if defined ALPHA_DEBUG_DISAS
2403 insn_count++;
2404 LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2405 #endif
2406 num_insns++;
2407 ctx.pc += 4;
2408 ret = translate_one(ctxp, insn);
2409 if (ret != 0)
2410 break;
2411 /* if we reach a page boundary or are single stepping, stop
2412 * generation
2414 if (env->singlestep_enabled) {
2415 gen_excp(&ctx, EXCP_DEBUG, 0);
2416 break;
2419 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2420 break;
2422 if (gen_opc_ptr >= gen_opc_end)
2423 break;
2425 if (num_insns >= max_insns)
2426 break;
2428 if (singlestep) {
2429 break;
2432 if (ret != 1 && ret != 3) {
2433 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2435 #if defined (DO_TB_FLUSH)
2436 gen_helper_tb_flush();
2437 #endif
2438 if (tb->cflags & CF_LAST_IO)
2439 gen_io_end();
2440 /* Generate the return instruction */
2441 tcg_gen_exit_tb(0);
2442 gen_icount_end(tb, num_insns);
2443 *gen_opc_ptr = INDEX_op_end;
2444 if (search_pc) {
2445 j = gen_opc_ptr - gen_opc_buf;
2446 lj++;
2447 while (lj <= j)
2448 gen_opc_instr_start[lj++] = 0;
2449 } else {
2450 tb->size = ctx.pc - pc_start;
2451 tb->icount = num_insns;
2453 #if defined ALPHA_DEBUG_DISAS
2454 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2455 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2456 qemu_log("IN: %s\n", lookup_symbol(pc_start));
2457 log_target_disas(pc_start, ctx.pc - pc_start, 1);
2458 qemu_log("\n");
2460 #endif
2463 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2465 gen_intermediate_code_internal(env, tb, 0);
2468 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2470 gen_intermediate_code_internal(env, tb, 1);
2473 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2475 CPUAlphaState *env;
2476 uint64_t hwpcb;
2478 env = qemu_mallocz(sizeof(CPUAlphaState));
2479 cpu_exec_init(env);
2480 alpha_translate_init();
2481 tlb_flush(env, 1);
2482 /* XXX: should not be hardcoded */
2483 env->implver = IMPLVER_2106x;
2484 env->ps = 0x1F00;
2485 #if defined (CONFIG_USER_ONLY)
2486 env->ps |= 1 << 3;
2487 #endif
2488 pal_init(env);
2489 /* Initialize IPR */
2490 hwpcb = env->ipr[IPR_PCBB];
2491 env->ipr[IPR_ASN] = 0;
2492 env->ipr[IPR_ASTEN] = 0;
2493 env->ipr[IPR_ASTSR] = 0;
2494 env->ipr[IPR_DATFX] = 0;
2495 /* XXX: fix this */
2496 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2497 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2498 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2499 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2500 env->ipr[IPR_FEN] = 0;
2501 env->ipr[IPR_IPL] = 31;
2502 env->ipr[IPR_MCES] = 0;
2503 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2504 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2505 env->ipr[IPR_SISR] = 0;
2506 env->ipr[IPR_VIRBND] = -1ULL;
2508 qemu_init_vcpu(env);
2509 return env;
2512 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2513 unsigned long searched_pc, int pc_pos, void *puc)
2515 env->pc = gen_opc_pc[pc_pos];