add roms/pcbios
[armpft.git] / target-alpha / translate.c
blob9e7e9b2d83f39f9397dd44af30a2d3cbf4a29bea
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdint.h>
21 #include <stdlib.h>
22 #include <stdio.h>
24 #include "cpu.h"
25 #include "exec-all.h"
26 #include "disas.h"
27 #include "host-utils.h"
28 #include "tcg-op.h"
29 #include "qemu-common.h"
31 #include "helper.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 /* #define DO_SINGLE_STEP */
36 #define ALPHA_DEBUG_DISAS
37 /* #define DO_TB_FLUSH */
40 #ifdef ALPHA_DEBUG_DISAS
41 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
42 #else
43 # define LOG_DISAS(...) do { } while (0)
44 #endif
46 typedef struct DisasContext DisasContext;
47 struct DisasContext {
48 uint64_t pc;
49 int mem_idx;
50 #if !defined (CONFIG_USER_ONLY)
51 int pal_mode;
52 #endif
53 CPUAlphaState *env;
54 uint32_t amask;
57 /* global register indexes */
58 static TCGv_ptr cpu_env;
59 static TCGv cpu_ir[31];
60 static TCGv cpu_fir[31];
61 static TCGv cpu_pc;
62 static TCGv cpu_lock;
64 /* register names */
65 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
67 #include "gen-icount.h"
69 static void alpha_translate_init(void)
71 int i;
72 char *p;
73 static int done_init = 0;
75 if (done_init)
76 return;
78 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
80 p = cpu_reg_names;
81 for (i = 0; i < 31; i++) {
82 sprintf(p, "ir%d", i);
83 cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84 offsetof(CPUState, ir[i]), p);
85 p += (i < 10) ? 4 : 5;
87 sprintf(p, "fir%d", i);
88 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89 offsetof(CPUState, fir[i]), p);
90 p += (i < 10) ? 5 : 6;
93 cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94 offsetof(CPUState, pc), "pc");
96 cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97 offsetof(CPUState, lock), "lock");
99 /* register helpers */
100 #define GEN_HELPER 2
101 #include "helper.h"
103 done_init = 1;
106 static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
108 TCGv_i32 tmp1, tmp2;
110 tcg_gen_movi_i64(cpu_pc, ctx->pc);
111 tmp1 = tcg_const_i32(exception);
112 tmp2 = tcg_const_i32(error_code);
113 gen_helper_excp(tmp1, tmp2);
114 tcg_temp_free_i32(tmp2);
115 tcg_temp_free_i32(tmp1);
118 static inline void gen_invalid(DisasContext *ctx)
120 gen_excp(ctx, EXCP_OPCDEC, 0);
123 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
125 TCGv tmp = tcg_temp_new();
126 TCGv_i32 tmp32 = tcg_temp_new_i32();
127 tcg_gen_qemu_ld32u(tmp, t1, flags);
128 tcg_gen_trunc_i64_i32(tmp32, tmp);
129 gen_helper_memory_to_f(t0, tmp32);
130 tcg_temp_free_i32(tmp32);
131 tcg_temp_free(tmp);
134 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
136 TCGv tmp = tcg_temp_new();
137 tcg_gen_qemu_ld64(tmp, t1, flags);
138 gen_helper_memory_to_g(t0, tmp);
139 tcg_temp_free(tmp);
142 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
144 TCGv tmp = tcg_temp_new();
145 TCGv_i32 tmp32 = tcg_temp_new_i32();
146 tcg_gen_qemu_ld32u(tmp, t1, flags);
147 tcg_gen_trunc_i64_i32(tmp32, tmp);
148 gen_helper_memory_to_s(t0, tmp32);
149 tcg_temp_free_i32(tmp32);
150 tcg_temp_free(tmp);
153 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
155 tcg_gen_mov_i64(cpu_lock, t1);
156 tcg_gen_qemu_ld32s(t0, t1, flags);
159 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
161 tcg_gen_mov_i64(cpu_lock, t1);
162 tcg_gen_qemu_ld64(t0, t1, flags);
165 static inline void gen_load_mem(DisasContext *ctx,
166 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
167 int flags),
168 int ra, int rb, int32_t disp16, int fp,
169 int clear)
171 TCGv addr;
173 if (unlikely(ra == 31))
174 return;
176 addr = tcg_temp_new();
177 if (rb != 31) {
178 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
179 if (clear)
180 tcg_gen_andi_i64(addr, addr, ~0x7);
181 } else {
182 if (clear)
183 disp16 &= ~0x7;
184 tcg_gen_movi_i64(addr, disp16);
186 if (fp)
187 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
188 else
189 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
190 tcg_temp_free(addr);
193 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
195 TCGv_i32 tmp32 = tcg_temp_new_i32();
196 TCGv tmp = tcg_temp_new();
197 gen_helper_f_to_memory(tmp32, t0);
198 tcg_gen_extu_i32_i64(tmp, tmp32);
199 tcg_gen_qemu_st32(tmp, t1, flags);
200 tcg_temp_free(tmp);
201 tcg_temp_free_i32(tmp32);
204 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
206 TCGv tmp = tcg_temp_new();
207 gen_helper_g_to_memory(tmp, t0);
208 tcg_gen_qemu_st64(tmp, t1, flags);
209 tcg_temp_free(tmp);
212 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
214 TCGv_i32 tmp32 = tcg_temp_new_i32();
215 TCGv tmp = tcg_temp_new();
216 gen_helper_s_to_memory(tmp32, t0);
217 tcg_gen_extu_i32_i64(tmp, tmp32);
218 tcg_gen_qemu_st32(tmp, t1, flags);
219 tcg_temp_free(tmp);
220 tcg_temp_free_i32(tmp32);
223 static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
225 int l1, l2;
227 l1 = gen_new_label();
228 l2 = gen_new_label();
229 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
230 tcg_gen_qemu_st32(t0, t1, flags);
231 tcg_gen_movi_i64(t0, 1);
232 tcg_gen_br(l2);
233 gen_set_label(l1);
234 tcg_gen_movi_i64(t0, 0);
235 gen_set_label(l2);
236 tcg_gen_movi_i64(cpu_lock, -1);
239 static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
241 int l1, l2;
243 l1 = gen_new_label();
244 l2 = gen_new_label();
245 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
246 tcg_gen_qemu_st64(t0, t1, flags);
247 tcg_gen_movi_i64(t0, 1);
248 tcg_gen_br(l2);
249 gen_set_label(l1);
250 tcg_gen_movi_i64(t0, 0);
251 gen_set_label(l2);
252 tcg_gen_movi_i64(cpu_lock, -1);
255 static inline void gen_store_mem(DisasContext *ctx,
256 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
257 int flags),
258 int ra, int rb, int32_t disp16, int fp,
259 int clear, int local)
261 TCGv addr;
262 if (local)
263 addr = tcg_temp_local_new();
264 else
265 addr = tcg_temp_new();
266 if (rb != 31) {
267 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
268 if (clear)
269 tcg_gen_andi_i64(addr, addr, ~0x7);
270 } else {
271 if (clear)
272 disp16 &= ~0x7;
273 tcg_gen_movi_i64(addr, disp16);
275 if (ra != 31) {
276 if (fp)
277 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
278 else
279 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
280 } else {
281 TCGv zero;
282 if (local)
283 zero = tcg_const_local_i64(0);
284 else
285 zero = tcg_const_i64(0);
286 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
287 tcg_temp_free(zero);
289 tcg_temp_free(addr);
292 static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
293 int32_t disp, int mask)
295 int l1, l2;
297 l1 = gen_new_label();
298 l2 = gen_new_label();
299 if (likely(ra != 31)) {
300 if (mask) {
301 TCGv tmp = tcg_temp_new();
302 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
303 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
304 tcg_temp_free(tmp);
305 } else
306 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
307 } else {
308 /* Very uncommon case - Do not bother to optimize. */
309 TCGv tmp = tcg_const_i64(0);
310 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
311 tcg_temp_free(tmp);
313 tcg_gen_movi_i64(cpu_pc, ctx->pc);
314 tcg_gen_br(l2);
315 gen_set_label(l1);
316 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
317 gen_set_label(l2);
320 static inline void gen_fbcond(DisasContext *ctx, int opc, int ra,
321 int32_t disp16)
323 int l1, l2;
324 TCGv tmp;
325 TCGv src;
327 l1 = gen_new_label();
328 l2 = gen_new_label();
329 if (ra != 31) {
330 tmp = tcg_temp_new();
331 src = cpu_fir[ra];
332 } else {
333 tmp = tcg_const_i64(0);
334 src = tmp;
336 switch (opc) {
337 case 0x31: /* FBEQ */
338 gen_helper_cmpfeq(tmp, src);
339 break;
340 case 0x32: /* FBLT */
341 gen_helper_cmpflt(tmp, src);
342 break;
343 case 0x33: /* FBLE */
344 gen_helper_cmpfle(tmp, src);
345 break;
346 case 0x35: /* FBNE */
347 gen_helper_cmpfne(tmp, src);
348 break;
349 case 0x36: /* FBGE */
350 gen_helper_cmpfge(tmp, src);
351 break;
352 case 0x37: /* FBGT */
353 gen_helper_cmpfgt(tmp, src);
354 break;
355 default:
356 abort();
358 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
359 tcg_gen_movi_i64(cpu_pc, ctx->pc);
360 tcg_gen_br(l2);
361 gen_set_label(l1);
362 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
363 gen_set_label(l2);
366 static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
367 int islit, uint8_t lit, int mask)
369 int l1;
371 if (unlikely(rc == 31))
372 return;
374 l1 = gen_new_label();
376 if (ra != 31) {
377 if (mask) {
378 TCGv tmp = tcg_temp_new();
379 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
380 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
381 tcg_temp_free(tmp);
382 } else
383 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
384 } else {
385 /* Very uncommon case - Do not bother to optimize. */
386 TCGv tmp = tcg_const_i64(0);
387 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
388 tcg_temp_free(tmp);
391 if (islit)
392 tcg_gen_movi_i64(cpu_ir[rc], lit);
393 else
394 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
395 gen_set_label(l1);
398 #define FARITH2(name) \
399 static inline void glue(gen_f, name)(int rb, int rc) \
401 if (unlikely(rc == 31)) \
402 return; \
404 if (rb != 31) \
405 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
406 else { \
407 TCGv tmp = tcg_const_i64(0); \
408 gen_helper_ ## name (cpu_fir[rc], tmp); \
409 tcg_temp_free(tmp); \
412 FARITH2(sqrts)
413 FARITH2(sqrtf)
414 FARITH2(sqrtg)
415 FARITH2(sqrtt)
416 FARITH2(cvtgf)
417 FARITH2(cvtgq)
418 FARITH2(cvtqf)
419 FARITH2(cvtqg)
420 FARITH2(cvtst)
421 FARITH2(cvtts)
422 FARITH2(cvttq)
423 FARITH2(cvtqs)
424 FARITH2(cvtqt)
425 FARITH2(cvtlq)
426 FARITH2(cvtql)
427 FARITH2(cvtqlv)
428 FARITH2(cvtqlsv)
430 #define FARITH3(name) \
431 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
433 if (unlikely(rc == 31)) \
434 return; \
436 if (ra != 31) { \
437 if (rb != 31) \
438 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
439 else { \
440 TCGv tmp = tcg_const_i64(0); \
441 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
442 tcg_temp_free(tmp); \
444 } else { \
445 TCGv tmp = tcg_const_i64(0); \
446 if (rb != 31) \
447 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
448 else \
449 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
450 tcg_temp_free(tmp); \
454 FARITH3(addf)
455 FARITH3(subf)
456 FARITH3(mulf)
457 FARITH3(divf)
458 FARITH3(addg)
459 FARITH3(subg)
460 FARITH3(mulg)
461 FARITH3(divg)
462 FARITH3(cmpgeq)
463 FARITH3(cmpglt)
464 FARITH3(cmpgle)
465 FARITH3(adds)
466 FARITH3(subs)
467 FARITH3(muls)
468 FARITH3(divs)
469 FARITH3(addt)
470 FARITH3(subt)
471 FARITH3(mult)
472 FARITH3(divt)
473 FARITH3(cmptun)
474 FARITH3(cmpteq)
475 FARITH3(cmptlt)
476 FARITH3(cmptle)
477 FARITH3(cpys)
478 FARITH3(cpysn)
479 FARITH3(cpyse)
481 #define FCMOV(name) \
482 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
484 int l1; \
485 TCGv tmp; \
487 if (unlikely(rc == 31)) \
488 return; \
490 l1 = gen_new_label(); \
491 tmp = tcg_temp_new(); \
492 if (ra != 31) { \
493 tmp = tcg_temp_new(); \
494 gen_helper_ ## name (tmp, cpu_fir[ra]); \
495 } else { \
496 tmp = tcg_const_i64(0); \
497 gen_helper_ ## name (tmp, tmp); \
499 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
500 if (rb != 31) \
501 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
502 else \
503 tcg_gen_movi_i64(cpu_fir[rc], 0); \
504 gen_set_label(l1); \
506 FCMOV(cmpfeq)
507 FCMOV(cmpfne)
508 FCMOV(cmpflt)
509 FCMOV(cmpfge)
510 FCMOV(cmpfle)
511 FCMOV(cmpfgt)
513 /* EXTWH, EXTWH, EXTLH, EXTQH */
514 static inline void gen_ext_h(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
515 int ra, int rb, int rc, int islit, uint8_t lit)
517 if (unlikely(rc == 31))
518 return;
520 if (ra != 31) {
521 if (islit) {
522 if (lit != 0)
523 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
524 else
525 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
526 } else {
527 TCGv tmp1;
528 tmp1 = tcg_temp_new();
530 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
531 tcg_gen_shli_i64(tmp1, tmp1, 3);
532 tcg_gen_neg_i64(tmp1, tmp1);
533 tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
534 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
536 tcg_temp_free(tmp1);
538 if (tcg_gen_ext_i64)
539 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
540 } else
541 tcg_gen_movi_i64(cpu_ir[rc], 0);
544 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
545 static inline void gen_ext_l(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
546 int ra, int rb, int rc, int islit, uint8_t lit)
548 if (unlikely(rc == 31))
549 return;
551 if (ra != 31) {
552 if (islit) {
553 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
554 } else {
555 TCGv tmp = tcg_temp_new();
556 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
557 tcg_gen_shli_i64(tmp, tmp, 3);
558 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
559 tcg_temp_free(tmp);
561 if (tcg_gen_ext_i64)
562 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
563 } else
564 tcg_gen_movi_i64(cpu_ir[rc], 0);
567 /* Code to call arith3 helpers */
568 #define ARITH3(name) \
569 static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
570 uint8_t lit) \
572 if (unlikely(rc == 31)) \
573 return; \
575 if (ra != 31) { \
576 if (islit) { \
577 TCGv tmp = tcg_const_i64(lit); \
578 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
579 tcg_temp_free(tmp); \
580 } else \
581 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
582 } else { \
583 TCGv tmp1 = tcg_const_i64(0); \
584 if (islit) { \
585 TCGv tmp2 = tcg_const_i64(lit); \
586 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
587 tcg_temp_free(tmp2); \
588 } else \
589 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
590 tcg_temp_free(tmp1); \
593 ARITH3(cmpbge)
594 ARITH3(addlv)
595 ARITH3(sublv)
596 ARITH3(addqv)
597 ARITH3(subqv)
598 ARITH3(mskbl)
599 ARITH3(insbl)
600 ARITH3(mskwl)
601 ARITH3(inswl)
602 ARITH3(mskll)
603 ARITH3(insll)
604 ARITH3(zap)
605 ARITH3(zapnot)
606 ARITH3(mskql)
607 ARITH3(insql)
608 ARITH3(mskwh)
609 ARITH3(inswh)
610 ARITH3(msklh)
611 ARITH3(inslh)
612 ARITH3(mskqh)
613 ARITH3(insqh)
614 ARITH3(umulh)
615 ARITH3(mullv)
616 ARITH3(mulqv)
618 static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
619 uint8_t lit)
621 int l1, l2;
622 TCGv tmp;
624 if (unlikely(rc == 31))
625 return;
627 l1 = gen_new_label();
628 l2 = gen_new_label();
630 if (ra != 31) {
631 tmp = tcg_temp_new();
632 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
633 } else
634 tmp = tcg_const_i64(0);
635 if (islit)
636 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
637 else
638 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
640 tcg_gen_movi_i64(cpu_ir[rc], 0);
641 tcg_gen_br(l2);
642 gen_set_label(l1);
643 tcg_gen_movi_i64(cpu_ir[rc], 1);
644 gen_set_label(l2);
647 static inline int translate_one(DisasContext *ctx, uint32_t insn)
649 uint32_t palcode;
650 int32_t disp21, disp16, disp12;
651 uint16_t fn11, fn16;
652 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
653 uint8_t lit;
654 int ret;
656 /* Decode all instruction fields */
657 opc = insn >> 26;
658 ra = (insn >> 21) & 0x1F;
659 rb = (insn >> 16) & 0x1F;
660 rc = insn & 0x1F;
661 sbz = (insn >> 13) & 0x07;
662 islit = (insn >> 12) & 1;
663 if (rb == 31 && !islit) {
664 islit = 1;
665 lit = 0;
666 } else
667 lit = (insn >> 13) & 0xFF;
668 palcode = insn & 0x03FFFFFF;
669 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
670 disp16 = (int16_t)(insn & 0x0000FFFF);
671 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
672 fn16 = insn & 0x0000FFFF;
673 fn11 = (insn >> 5) & 0x000007FF;
674 fpfn = fn11 & 0x3F;
675 fn7 = (insn >> 5) & 0x0000007F;
676 fn2 = (insn >> 5) & 0x00000003;
677 ret = 0;
678 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
679 opc, ra, rb, rc, disp16);
680 switch (opc) {
681 case 0x00:
682 /* CALL_PAL */
683 if (palcode >= 0x80 && palcode < 0xC0) {
684 /* Unprivileged PAL call */
685 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
686 #if !defined (CONFIG_USER_ONLY)
687 } else if (palcode < 0x40) {
688 /* Privileged PAL code */
689 if (ctx->mem_idx & 1)
690 goto invalid_opc;
691 else
692 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
693 #endif
694 } else {
695 /* Invalid PAL call */
696 goto invalid_opc;
698 ret = 3;
699 break;
700 case 0x01:
701 /* OPC01 */
702 goto invalid_opc;
703 case 0x02:
704 /* OPC02 */
705 goto invalid_opc;
706 case 0x03:
707 /* OPC03 */
708 goto invalid_opc;
709 case 0x04:
710 /* OPC04 */
711 goto invalid_opc;
712 case 0x05:
713 /* OPC05 */
714 goto invalid_opc;
715 case 0x06:
716 /* OPC06 */
717 goto invalid_opc;
718 case 0x07:
719 /* OPC07 */
720 goto invalid_opc;
721 case 0x08:
722 /* LDA */
723 if (likely(ra != 31)) {
724 if (rb != 31)
725 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
726 else
727 tcg_gen_movi_i64(cpu_ir[ra], disp16);
729 break;
730 case 0x09:
731 /* LDAH */
732 if (likely(ra != 31)) {
733 if (rb != 31)
734 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
735 else
736 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
738 break;
739 case 0x0A:
740 /* LDBU */
741 if (!(ctx->amask & AMASK_BWX))
742 goto invalid_opc;
743 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
744 break;
745 case 0x0B:
746 /* LDQ_U */
747 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
748 break;
749 case 0x0C:
750 /* LDWU */
751 if (!(ctx->amask & AMASK_BWX))
752 goto invalid_opc;
753 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
754 break;
755 case 0x0D:
756 /* STW */
757 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
758 break;
759 case 0x0E:
760 /* STB */
761 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
762 break;
763 case 0x0F:
764 /* STQ_U */
765 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
766 break;
767 case 0x10:
768 switch (fn7) {
769 case 0x00:
770 /* ADDL */
771 if (likely(rc != 31)) {
772 if (ra != 31) {
773 if (islit) {
774 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
775 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
776 } else {
777 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
778 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
780 } else {
781 if (islit)
782 tcg_gen_movi_i64(cpu_ir[rc], lit);
783 else
784 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
787 break;
788 case 0x02:
789 /* S4ADDL */
790 if (likely(rc != 31)) {
791 if (ra != 31) {
792 TCGv tmp = tcg_temp_new();
793 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
794 if (islit)
795 tcg_gen_addi_i64(tmp, tmp, lit);
796 else
797 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
798 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
799 tcg_temp_free(tmp);
800 } else {
801 if (islit)
802 tcg_gen_movi_i64(cpu_ir[rc], lit);
803 else
804 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
807 break;
808 case 0x09:
809 /* SUBL */
810 if (likely(rc != 31)) {
811 if (ra != 31) {
812 if (islit)
813 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
814 else
815 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
816 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
817 } else {
818 if (islit)
819 tcg_gen_movi_i64(cpu_ir[rc], -lit);
820 else {
821 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
822 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
825 break;
826 case 0x0B:
827 /* S4SUBL */
828 if (likely(rc != 31)) {
829 if (ra != 31) {
830 TCGv tmp = tcg_temp_new();
831 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
832 if (islit)
833 tcg_gen_subi_i64(tmp, tmp, lit);
834 else
835 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
836 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
837 tcg_temp_free(tmp);
838 } else {
839 if (islit)
840 tcg_gen_movi_i64(cpu_ir[rc], -lit);
841 else {
842 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
843 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
847 break;
848 case 0x0F:
849 /* CMPBGE */
850 gen_cmpbge(ra, rb, rc, islit, lit);
851 break;
852 case 0x12:
853 /* S8ADDL */
854 if (likely(rc != 31)) {
855 if (ra != 31) {
856 TCGv tmp = tcg_temp_new();
857 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
858 if (islit)
859 tcg_gen_addi_i64(tmp, tmp, lit);
860 else
861 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
862 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
863 tcg_temp_free(tmp);
864 } else {
865 if (islit)
866 tcg_gen_movi_i64(cpu_ir[rc], lit);
867 else
868 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
871 break;
872 case 0x1B:
873 /* S8SUBL */
874 if (likely(rc != 31)) {
875 if (ra != 31) {
876 TCGv tmp = tcg_temp_new();
877 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
878 if (islit)
879 tcg_gen_subi_i64(tmp, tmp, lit);
880 else
881 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
882 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
883 tcg_temp_free(tmp);
884 } else {
885 if (islit)
886 tcg_gen_movi_i64(cpu_ir[rc], -lit);
887 else
888 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
889 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
893 break;
894 case 0x1D:
895 /* CMPULT */
896 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
897 break;
898 case 0x20:
899 /* ADDQ */
900 if (likely(rc != 31)) {
901 if (ra != 31) {
902 if (islit)
903 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
904 else
905 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
906 } else {
907 if (islit)
908 tcg_gen_movi_i64(cpu_ir[rc], lit);
909 else
910 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
913 break;
914 case 0x22:
915 /* S4ADDQ */
916 if (likely(rc != 31)) {
917 if (ra != 31) {
918 TCGv tmp = tcg_temp_new();
919 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
920 if (islit)
921 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
922 else
923 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
924 tcg_temp_free(tmp);
925 } else {
926 if (islit)
927 tcg_gen_movi_i64(cpu_ir[rc], lit);
928 else
929 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
932 break;
933 case 0x29:
934 /* SUBQ */
935 if (likely(rc != 31)) {
936 if (ra != 31) {
937 if (islit)
938 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
939 else
940 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
941 } else {
942 if (islit)
943 tcg_gen_movi_i64(cpu_ir[rc], -lit);
944 else
945 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
948 break;
949 case 0x2B:
950 /* S4SUBQ */
951 if (likely(rc != 31)) {
952 if (ra != 31) {
953 TCGv tmp = tcg_temp_new();
954 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
955 if (islit)
956 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
957 else
958 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
959 tcg_temp_free(tmp);
960 } else {
961 if (islit)
962 tcg_gen_movi_i64(cpu_ir[rc], -lit);
963 else
964 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
967 break;
968 case 0x2D:
969 /* CMPEQ */
970 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
971 break;
972 case 0x32:
973 /* S8ADDQ */
974 if (likely(rc != 31)) {
975 if (ra != 31) {
976 TCGv tmp = tcg_temp_new();
977 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
978 if (islit)
979 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
980 else
981 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
982 tcg_temp_free(tmp);
983 } else {
984 if (islit)
985 tcg_gen_movi_i64(cpu_ir[rc], lit);
986 else
987 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
990 break;
991 case 0x3B:
992 /* S8SUBQ */
993 if (likely(rc != 31)) {
994 if (ra != 31) {
995 TCGv tmp = tcg_temp_new();
996 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
997 if (islit)
998 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
999 else
1000 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1001 tcg_temp_free(tmp);
1002 } else {
1003 if (islit)
1004 tcg_gen_movi_i64(cpu_ir[rc], -lit);
1005 else
1006 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1009 break;
1010 case 0x3D:
1011 /* CMPULE */
1012 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1013 break;
1014 case 0x40:
1015 /* ADDL/V */
1016 gen_addlv(ra, rb, rc, islit, lit);
1017 break;
1018 case 0x49:
1019 /* SUBL/V */
1020 gen_sublv(ra, rb, rc, islit, lit);
1021 break;
1022 case 0x4D:
1023 /* CMPLT */
1024 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1025 break;
1026 case 0x60:
1027 /* ADDQ/V */
1028 gen_addqv(ra, rb, rc, islit, lit);
1029 break;
1030 case 0x69:
1031 /* SUBQ/V */
1032 gen_subqv(ra, rb, rc, islit, lit);
1033 break;
1034 case 0x6D:
1035 /* CMPLE */
1036 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1037 break;
1038 default:
1039 goto invalid_opc;
1041 break;
1042 case 0x11:
1043 switch (fn7) {
1044 case 0x00:
1045 /* AND */
1046 if (likely(rc != 31)) {
1047 if (ra == 31)
1048 tcg_gen_movi_i64(cpu_ir[rc], 0);
1049 else if (islit)
1050 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1051 else
1052 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1054 break;
1055 case 0x08:
1056 /* BIC */
1057 if (likely(rc != 31)) {
1058 if (ra != 31) {
1059 if (islit)
1060 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1061 else
1062 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1063 } else
1064 tcg_gen_movi_i64(cpu_ir[rc], 0);
1066 break;
1067 case 0x14:
1068 /* CMOVLBS */
1069 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1070 break;
1071 case 0x16:
1072 /* CMOVLBC */
1073 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1074 break;
1075 case 0x20:
1076 /* BIS */
1077 if (likely(rc != 31)) {
1078 if (ra != 31) {
1079 if (islit)
1080 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1081 else
1082 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1083 } else {
1084 if (islit)
1085 tcg_gen_movi_i64(cpu_ir[rc], lit);
1086 else
1087 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1090 break;
1091 case 0x24:
1092 /* CMOVEQ */
1093 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1094 break;
1095 case 0x26:
1096 /* CMOVNE */
1097 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1098 break;
1099 case 0x28:
1100 /* ORNOT */
1101 if (likely(rc != 31)) {
1102 if (ra != 31) {
1103 if (islit)
1104 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1105 else
1106 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1107 } else {
1108 if (islit)
1109 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1110 else
1111 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1114 break;
1115 case 0x40:
1116 /* XOR */
1117 if (likely(rc != 31)) {
1118 if (ra != 31) {
1119 if (islit)
1120 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1121 else
1122 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1123 } else {
1124 if (islit)
1125 tcg_gen_movi_i64(cpu_ir[rc], lit);
1126 else
1127 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1130 break;
1131 case 0x44:
1132 /* CMOVLT */
1133 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1134 break;
1135 case 0x46:
1136 /* CMOVGE */
1137 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1138 break;
1139 case 0x48:
1140 /* EQV */
1141 if (likely(rc != 31)) {
1142 if (ra != 31) {
1143 if (islit)
1144 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1145 else
1146 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1147 } else {
1148 if (islit)
1149 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1150 else
1151 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1154 break;
1155 case 0x61:
1156 /* AMASK */
1157 if (likely(rc != 31)) {
1158 if (islit)
1159 tcg_gen_movi_i64(cpu_ir[rc], lit);
1160 else
1161 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1162 switch (ctx->env->implver) {
1163 case IMPLVER_2106x:
1164 /* EV4, EV45, LCA, LCA45 & EV5 */
1165 break;
1166 case IMPLVER_21164:
1167 case IMPLVER_21264:
1168 case IMPLVER_21364:
1169 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1170 ~(uint64_t)ctx->amask);
1171 break;
1174 break;
1175 case 0x64:
1176 /* CMOVLE */
1177 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1178 break;
1179 case 0x66:
1180 /* CMOVGT */
1181 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1182 break;
1183 case 0x6C:
1184 /* IMPLVER */
1185 if (rc != 31)
1186 tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1187 break;
1188 default:
1189 goto invalid_opc;
1191 break;
1192 case 0x12:
1193 switch (fn7) {
1194 case 0x02:
1195 /* MSKBL */
1196 gen_mskbl(ra, rb, rc, islit, lit);
1197 break;
1198 case 0x06:
1199 /* EXTBL */
1200 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1201 break;
1202 case 0x0B:
1203 /* INSBL */
1204 gen_insbl(ra, rb, rc, islit, lit);
1205 break;
1206 case 0x12:
1207 /* MSKWL */
1208 gen_mskwl(ra, rb, rc, islit, lit);
1209 break;
1210 case 0x16:
1211 /* EXTWL */
1212 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1213 break;
1214 case 0x1B:
1215 /* INSWL */
1216 gen_inswl(ra, rb, rc, islit, lit);
1217 break;
1218 case 0x22:
1219 /* MSKLL */
1220 gen_mskll(ra, rb, rc, islit, lit);
1221 break;
1222 case 0x26:
1223 /* EXTLL */
1224 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1225 break;
1226 case 0x2B:
1227 /* INSLL */
1228 gen_insll(ra, rb, rc, islit, lit);
1229 break;
1230 case 0x30:
1231 /* ZAP */
1232 gen_zap(ra, rb, rc, islit, lit);
1233 break;
1234 case 0x31:
1235 /* ZAPNOT */
1236 gen_zapnot(ra, rb, rc, islit, lit);
1237 break;
1238 case 0x32:
1239 /* MSKQL */
1240 gen_mskql(ra, rb, rc, islit, lit);
1241 break;
1242 case 0x34:
1243 /* SRL */
1244 if (likely(rc != 31)) {
1245 if (ra != 31) {
1246 if (islit)
1247 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1248 else {
1249 TCGv shift = tcg_temp_new();
1250 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1251 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1252 tcg_temp_free(shift);
1254 } else
1255 tcg_gen_movi_i64(cpu_ir[rc], 0);
1257 break;
1258 case 0x36:
1259 /* EXTQL */
1260 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1261 break;
1262 case 0x39:
1263 /* SLL */
1264 if (likely(rc != 31)) {
1265 if (ra != 31) {
1266 if (islit)
1267 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1268 else {
1269 TCGv shift = tcg_temp_new();
1270 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1271 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1272 tcg_temp_free(shift);
1274 } else
1275 tcg_gen_movi_i64(cpu_ir[rc], 0);
1277 break;
1278 case 0x3B:
1279 /* INSQL */
1280 gen_insql(ra, rb, rc, islit, lit);
1281 break;
1282 case 0x3C:
1283 /* SRA */
1284 if (likely(rc != 31)) {
1285 if (ra != 31) {
1286 if (islit)
1287 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1288 else {
1289 TCGv shift = tcg_temp_new();
1290 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1291 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1292 tcg_temp_free(shift);
1294 } else
1295 tcg_gen_movi_i64(cpu_ir[rc], 0);
1297 break;
1298 case 0x52:
1299 /* MSKWH */
1300 gen_mskwh(ra, rb, rc, islit, lit);
1301 break;
1302 case 0x57:
1303 /* INSWH */
1304 gen_inswh(ra, rb, rc, islit, lit);
1305 break;
1306 case 0x5A:
1307 /* EXTWH */
1308 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1309 break;
1310 case 0x62:
1311 /* MSKLH */
1312 gen_msklh(ra, rb, rc, islit, lit);
1313 break;
1314 case 0x67:
1315 /* INSLH */
1316 gen_inslh(ra, rb, rc, islit, lit);
1317 break;
1318 case 0x6A:
1319 /* EXTLH */
1320 gen_ext_h(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1321 break;
1322 case 0x72:
1323 /* MSKQH */
1324 gen_mskqh(ra, rb, rc, islit, lit);
1325 break;
1326 case 0x77:
1327 /* INSQH */
1328 gen_insqh(ra, rb, rc, islit, lit);
1329 break;
1330 case 0x7A:
1331 /* EXTQH */
1332 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1333 break;
1334 default:
1335 goto invalid_opc;
1337 break;
1338 case 0x13:
1339 switch (fn7) {
1340 case 0x00:
1341 /* MULL */
1342 if (likely(rc != 31)) {
1343 if (ra == 31)
1344 tcg_gen_movi_i64(cpu_ir[rc], 0);
1345 else {
1346 if (islit)
1347 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1348 else
1349 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1350 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1353 break;
1354 case 0x20:
1355 /* MULQ */
1356 if (likely(rc != 31)) {
1357 if (ra == 31)
1358 tcg_gen_movi_i64(cpu_ir[rc], 0);
1359 else if (islit)
1360 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1361 else
1362 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1364 break;
1365 case 0x30:
1366 /* UMULH */
1367 gen_umulh(ra, rb, rc, islit, lit);
1368 break;
1369 case 0x40:
1370 /* MULL/V */
1371 gen_mullv(ra, rb, rc, islit, lit);
1372 break;
1373 case 0x60:
1374 /* MULQ/V */
1375 gen_mulqv(ra, rb, rc, islit, lit);
1376 break;
1377 default:
1378 goto invalid_opc;
1380 break;
1381 case 0x14:
1382 switch (fpfn) { /* f11 & 0x3F */
1383 case 0x04:
1384 /* ITOFS */
1385 if (!(ctx->amask & AMASK_FIX))
1386 goto invalid_opc;
1387 if (likely(rc != 31)) {
1388 if (ra != 31) {
1389 TCGv_i32 tmp = tcg_temp_new_i32();
1390 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1391 gen_helper_memory_to_s(cpu_fir[rc], tmp);
1392 tcg_temp_free_i32(tmp);
1393 } else
1394 tcg_gen_movi_i64(cpu_fir[rc], 0);
1396 break;
1397 case 0x0A:
1398 /* SQRTF */
1399 if (!(ctx->amask & AMASK_FIX))
1400 goto invalid_opc;
1401 gen_fsqrtf(rb, rc);
1402 break;
1403 case 0x0B:
1404 /* SQRTS */
1405 if (!(ctx->amask & AMASK_FIX))
1406 goto invalid_opc;
1407 gen_fsqrts(rb, rc);
1408 break;
1409 case 0x14:
1410 /* ITOFF */
1411 if (!(ctx->amask & AMASK_FIX))
1412 goto invalid_opc;
1413 if (likely(rc != 31)) {
1414 if (ra != 31) {
1415 TCGv_i32 tmp = tcg_temp_new_i32();
1416 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1417 gen_helper_memory_to_f(cpu_fir[rc], tmp);
1418 tcg_temp_free_i32(tmp);
1419 } else
1420 tcg_gen_movi_i64(cpu_fir[rc], 0);
1422 break;
1423 case 0x24:
1424 /* ITOFT */
1425 if (!(ctx->amask & AMASK_FIX))
1426 goto invalid_opc;
1427 if (likely(rc != 31)) {
1428 if (ra != 31)
1429 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1430 else
1431 tcg_gen_movi_i64(cpu_fir[rc], 0);
1433 break;
1434 case 0x2A:
1435 /* SQRTG */
1436 if (!(ctx->amask & AMASK_FIX))
1437 goto invalid_opc;
1438 gen_fsqrtg(rb, rc);
1439 break;
1440 case 0x02B:
1441 /* SQRTT */
1442 if (!(ctx->amask & AMASK_FIX))
1443 goto invalid_opc;
1444 gen_fsqrtt(rb, rc);
1445 break;
1446 default:
1447 goto invalid_opc;
1449 break;
1450 case 0x15:
1451 /* VAX floating point */
1452 /* XXX: rounding mode and trap are ignored (!) */
1453 switch (fpfn) { /* f11 & 0x3F */
1454 case 0x00:
1455 /* ADDF */
1456 gen_faddf(ra, rb, rc);
1457 break;
1458 case 0x01:
1459 /* SUBF */
1460 gen_fsubf(ra, rb, rc);
1461 break;
1462 case 0x02:
1463 /* MULF */
1464 gen_fmulf(ra, rb, rc);
1465 break;
1466 case 0x03:
1467 /* DIVF */
1468 gen_fdivf(ra, rb, rc);
1469 break;
1470 case 0x1E:
1471 /* CVTDG */
1472 #if 0 // TODO
1473 gen_fcvtdg(rb, rc);
1474 #else
1475 goto invalid_opc;
1476 #endif
1477 break;
1478 case 0x20:
1479 /* ADDG */
1480 gen_faddg(ra, rb, rc);
1481 break;
1482 case 0x21:
1483 /* SUBG */
1484 gen_fsubg(ra, rb, rc);
1485 break;
1486 case 0x22:
1487 /* MULG */
1488 gen_fmulg(ra, rb, rc);
1489 break;
1490 case 0x23:
1491 /* DIVG */
1492 gen_fdivg(ra, rb, rc);
1493 break;
1494 case 0x25:
1495 /* CMPGEQ */
1496 gen_fcmpgeq(ra, rb, rc);
1497 break;
1498 case 0x26:
1499 /* CMPGLT */
1500 gen_fcmpglt(ra, rb, rc);
1501 break;
1502 case 0x27:
1503 /* CMPGLE */
1504 gen_fcmpgle(ra, rb, rc);
1505 break;
1506 case 0x2C:
1507 /* CVTGF */
1508 gen_fcvtgf(rb, rc);
1509 break;
1510 case 0x2D:
1511 /* CVTGD */
1512 #if 0 // TODO
1513 gen_fcvtgd(rb, rc);
1514 #else
1515 goto invalid_opc;
1516 #endif
1517 break;
1518 case 0x2F:
1519 /* CVTGQ */
1520 gen_fcvtgq(rb, rc);
1521 break;
1522 case 0x3C:
1523 /* CVTQF */
1524 gen_fcvtqf(rb, rc);
1525 break;
1526 case 0x3E:
1527 /* CVTQG */
1528 gen_fcvtqg(rb, rc);
1529 break;
1530 default:
1531 goto invalid_opc;
1533 break;
1534 case 0x16:
1535 /* IEEE floating-point */
1536 /* XXX: rounding mode and traps are ignored (!) */
1537 switch (fpfn) { /* f11 & 0x3F */
1538 case 0x00:
1539 /* ADDS */
1540 gen_fadds(ra, rb, rc);
1541 break;
1542 case 0x01:
1543 /* SUBS */
1544 gen_fsubs(ra, rb, rc);
1545 break;
1546 case 0x02:
1547 /* MULS */
1548 gen_fmuls(ra, rb, rc);
1549 break;
1550 case 0x03:
1551 /* DIVS */
1552 gen_fdivs(ra, rb, rc);
1553 break;
1554 case 0x20:
1555 /* ADDT */
1556 gen_faddt(ra, rb, rc);
1557 break;
1558 case 0x21:
1559 /* SUBT */
1560 gen_fsubt(ra, rb, rc);
1561 break;
1562 case 0x22:
1563 /* MULT */
1564 gen_fmult(ra, rb, rc);
1565 break;
1566 case 0x23:
1567 /* DIVT */
1568 gen_fdivt(ra, rb, rc);
1569 break;
1570 case 0x24:
1571 /* CMPTUN */
1572 gen_fcmptun(ra, rb, rc);
1573 break;
1574 case 0x25:
1575 /* CMPTEQ */
1576 gen_fcmpteq(ra, rb, rc);
1577 break;
1578 case 0x26:
1579 /* CMPTLT */
1580 gen_fcmptlt(ra, rb, rc);
1581 break;
1582 case 0x27:
1583 /* CMPTLE */
1584 gen_fcmptle(ra, rb, rc);
1585 break;
1586 case 0x2C:
1587 /* XXX: incorrect */
1588 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1589 /* CVTST */
1590 gen_fcvtst(rb, rc);
1591 } else {
1592 /* CVTTS */
1593 gen_fcvtts(rb, rc);
1595 break;
1596 case 0x2F:
1597 /* CVTTQ */
1598 gen_fcvttq(rb, rc);
1599 break;
1600 case 0x3C:
1601 /* CVTQS */
1602 gen_fcvtqs(rb, rc);
1603 break;
1604 case 0x3E:
1605 /* CVTQT */
1606 gen_fcvtqt(rb, rc);
1607 break;
1608 default:
1609 goto invalid_opc;
1611 break;
1612 case 0x17:
1613 switch (fn11) {
1614 case 0x010:
1615 /* CVTLQ */
1616 gen_fcvtlq(rb, rc);
1617 break;
1618 case 0x020:
1619 if (likely(rc != 31)) {
1620 if (ra == rb)
1621 /* FMOV */
1622 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1623 else
1624 /* CPYS */
1625 gen_fcpys(ra, rb, rc);
1627 break;
1628 case 0x021:
1629 /* CPYSN */
1630 gen_fcpysn(ra, rb, rc);
1631 break;
1632 case 0x022:
1633 /* CPYSE */
1634 gen_fcpyse(ra, rb, rc);
1635 break;
1636 case 0x024:
1637 /* MT_FPCR */
1638 if (likely(ra != 31))
1639 gen_helper_store_fpcr(cpu_fir[ra]);
1640 else {
1641 TCGv tmp = tcg_const_i64(0);
1642 gen_helper_store_fpcr(tmp);
1643 tcg_temp_free(tmp);
1645 break;
1646 case 0x025:
1647 /* MF_FPCR */
1648 if (likely(ra != 31))
1649 gen_helper_load_fpcr(cpu_fir[ra]);
1650 break;
1651 case 0x02A:
1652 /* FCMOVEQ */
1653 gen_fcmpfeq(ra, rb, rc);
1654 break;
1655 case 0x02B:
1656 /* FCMOVNE */
1657 gen_fcmpfne(ra, rb, rc);
1658 break;
1659 case 0x02C:
1660 /* FCMOVLT */
1661 gen_fcmpflt(ra, rb, rc);
1662 break;
1663 case 0x02D:
1664 /* FCMOVGE */
1665 gen_fcmpfge(ra, rb, rc);
1666 break;
1667 case 0x02E:
1668 /* FCMOVLE */
1669 gen_fcmpfle(ra, rb, rc);
1670 break;
1671 case 0x02F:
1672 /* FCMOVGT */
1673 gen_fcmpfgt(ra, rb, rc);
1674 break;
1675 case 0x030:
1676 /* CVTQL */
1677 gen_fcvtql(rb, rc);
1678 break;
1679 case 0x130:
1680 /* CVTQL/V */
1681 gen_fcvtqlv(rb, rc);
1682 break;
1683 case 0x530:
1684 /* CVTQL/SV */
1685 gen_fcvtqlsv(rb, rc);
1686 break;
1687 default:
1688 goto invalid_opc;
1690 break;
1691 case 0x18:
1692 switch ((uint16_t)disp16) {
1693 case 0x0000:
1694 /* TRAPB */
1695 /* No-op. Just exit from the current tb */
1696 ret = 2;
1697 break;
1698 case 0x0400:
1699 /* EXCB */
1700 /* No-op. Just exit from the current tb */
1701 ret = 2;
1702 break;
1703 case 0x4000:
1704 /* MB */
1705 /* No-op */
1706 break;
1707 case 0x4400:
1708 /* WMB */
1709 /* No-op */
1710 break;
1711 case 0x8000:
1712 /* FETCH */
1713 /* No-op */
1714 break;
1715 case 0xA000:
1716 /* FETCH_M */
1717 /* No-op */
1718 break;
1719 case 0xC000:
1720 /* RPCC */
1721 if (ra != 31)
1722 gen_helper_load_pcc(cpu_ir[ra]);
1723 break;
1724 case 0xE000:
1725 /* RC */
1726 if (ra != 31)
1727 gen_helper_rc(cpu_ir[ra]);
1728 break;
1729 case 0xE800:
1730 /* ECB */
1731 break;
1732 case 0xF000:
1733 /* RS */
1734 if (ra != 31)
1735 gen_helper_rs(cpu_ir[ra]);
1736 break;
1737 case 0xF800:
1738 /* WH64 */
1739 /* No-op */
1740 break;
1741 default:
1742 goto invalid_opc;
1744 break;
1745 case 0x19:
1746 /* HW_MFPR (PALcode) */
1747 #if defined (CONFIG_USER_ONLY)
1748 goto invalid_opc;
1749 #else
1750 if (!ctx->pal_mode)
1751 goto invalid_opc;
1752 if (ra != 31) {
1753 TCGv tmp = tcg_const_i32(insn & 0xFF);
1754 gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1755 tcg_temp_free(tmp);
1757 break;
1758 #endif
1759 case 0x1A:
1760 if (rb != 31)
1761 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1762 else
1763 tcg_gen_movi_i64(cpu_pc, 0);
1764 if (ra != 31)
1765 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1766 /* Those four jumps only differ by the branch prediction hint */
1767 switch (fn2) {
1768 case 0x0:
1769 /* JMP */
1770 break;
1771 case 0x1:
1772 /* JSR */
1773 break;
1774 case 0x2:
1775 /* RET */
1776 break;
1777 case 0x3:
1778 /* JSR_COROUTINE */
1779 break;
1781 ret = 1;
1782 break;
1783 case 0x1B:
1784 /* HW_LD (PALcode) */
1785 #if defined (CONFIG_USER_ONLY)
1786 goto invalid_opc;
1787 #else
1788 if (!ctx->pal_mode)
1789 goto invalid_opc;
1790 if (ra != 31) {
1791 TCGv addr = tcg_temp_new();
1792 if (rb != 31)
1793 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1794 else
1795 tcg_gen_movi_i64(addr, disp12);
1796 switch ((insn >> 12) & 0xF) {
1797 case 0x0:
1798 /* Longword physical access (hw_ldl/p) */
1799 gen_helper_ldl_raw(cpu_ir[ra], addr);
1800 break;
1801 case 0x1:
1802 /* Quadword physical access (hw_ldq/p) */
1803 gen_helper_ldq_raw(cpu_ir[ra], addr);
1804 break;
1805 case 0x2:
1806 /* Longword physical access with lock (hw_ldl_l/p) */
1807 gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1808 break;
1809 case 0x3:
1810 /* Quadword physical access with lock (hw_ldq_l/p) */
1811 gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1812 break;
1813 case 0x4:
1814 /* Longword virtual PTE fetch (hw_ldl/v) */
1815 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1816 break;
1817 case 0x5:
1818 /* Quadword virtual PTE fetch (hw_ldq/v) */
1819 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1820 break;
1821 case 0x6:
1822 /* Incpu_ir[ra]id */
1823 goto invalid_opc;
1824 case 0x7:
1825 /* Incpu_ir[ra]id */
1826 goto invalid_opc;
1827 case 0x8:
1828 /* Longword virtual access (hw_ldl) */
1829 gen_helper_st_virt_to_phys(addr, addr);
1830 gen_helper_ldl_raw(cpu_ir[ra], addr);
1831 break;
1832 case 0x9:
1833 /* Quadword virtual access (hw_ldq) */
1834 gen_helper_st_virt_to_phys(addr, addr);
1835 gen_helper_ldq_raw(cpu_ir[ra], addr);
1836 break;
1837 case 0xA:
1838 /* Longword virtual access with protection check (hw_ldl/w) */
1839 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1840 break;
1841 case 0xB:
1842 /* Quadword virtual access with protection check (hw_ldq/w) */
1843 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1844 break;
1845 case 0xC:
1846 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1847 gen_helper_set_alt_mode();
1848 gen_helper_st_virt_to_phys(addr, addr);
1849 gen_helper_ldl_raw(cpu_ir[ra], addr);
1850 gen_helper_restore_mode();
1851 break;
1852 case 0xD:
1853 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1854 gen_helper_set_alt_mode();
1855 gen_helper_st_virt_to_phys(addr, addr);
1856 gen_helper_ldq_raw(cpu_ir[ra], addr);
1857 gen_helper_restore_mode();
1858 break;
1859 case 0xE:
1860 /* Longword virtual access with alternate access mode and
1861 * protection checks (hw_ldl/wa)
1863 gen_helper_set_alt_mode();
1864 gen_helper_ldl_data(cpu_ir[ra], addr);
1865 gen_helper_restore_mode();
1866 break;
1867 case 0xF:
1868 /* Quadword virtual access with alternate access mode and
1869 * protection checks (hw_ldq/wa)
1871 gen_helper_set_alt_mode();
1872 gen_helper_ldq_data(cpu_ir[ra], addr);
1873 gen_helper_restore_mode();
1874 break;
1876 tcg_temp_free(addr);
1878 break;
1879 #endif
1880 case 0x1C:
1881 switch (fn7) {
1882 case 0x00:
1883 /* SEXTB */
1884 if (!(ctx->amask & AMASK_BWX))
1885 goto invalid_opc;
1886 if (likely(rc != 31)) {
1887 if (islit)
1888 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1889 else
1890 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1892 break;
1893 case 0x01:
1894 /* SEXTW */
1895 if (!(ctx->amask & AMASK_BWX))
1896 goto invalid_opc;
1897 if (likely(rc != 31)) {
1898 if (islit)
1899 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1900 else
1901 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1903 break;
1904 case 0x30:
1905 /* CTPOP */
1906 if (!(ctx->amask & AMASK_CIX))
1907 goto invalid_opc;
1908 if (likely(rc != 31)) {
1909 if (islit)
1910 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1911 else
1912 gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1914 break;
1915 case 0x31:
1916 /* PERR */
1917 if (!(ctx->amask & AMASK_MVI))
1918 goto invalid_opc;
1919 /* XXX: TODO */
1920 goto invalid_opc;
1921 break;
1922 case 0x32:
1923 /* CTLZ */
1924 if (!(ctx->amask & AMASK_CIX))
1925 goto invalid_opc;
1926 if (likely(rc != 31)) {
1927 if (islit)
1928 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1929 else
1930 gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1932 break;
1933 case 0x33:
1934 /* CTTZ */
1935 if (!(ctx->amask & AMASK_CIX))
1936 goto invalid_opc;
1937 if (likely(rc != 31)) {
1938 if (islit)
1939 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1940 else
1941 gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1943 break;
1944 case 0x34:
1945 /* UNPKBW */
1946 if (!(ctx->amask & AMASK_MVI))
1947 goto invalid_opc;
1948 /* XXX: TODO */
1949 goto invalid_opc;
1950 break;
1951 case 0x35:
1952 /* UNPKWL */
1953 if (!(ctx->amask & AMASK_MVI))
1954 goto invalid_opc;
1955 /* XXX: TODO */
1956 goto invalid_opc;
1957 break;
1958 case 0x36:
1959 /* PKWB */
1960 if (!(ctx->amask & AMASK_MVI))
1961 goto invalid_opc;
1962 /* XXX: TODO */
1963 goto invalid_opc;
1964 break;
1965 case 0x37:
1966 /* PKLB */
1967 if (!(ctx->amask & AMASK_MVI))
1968 goto invalid_opc;
1969 /* XXX: TODO */
1970 goto invalid_opc;
1971 break;
1972 case 0x38:
1973 /* MINSB8 */
1974 if (!(ctx->amask & AMASK_MVI))
1975 goto invalid_opc;
1976 /* XXX: TODO */
1977 goto invalid_opc;
1978 break;
1979 case 0x39:
1980 /* MINSW4 */
1981 if (!(ctx->amask & AMASK_MVI))
1982 goto invalid_opc;
1983 /* XXX: TODO */
1984 goto invalid_opc;
1985 break;
1986 case 0x3A:
1987 /* MINUB8 */
1988 if (!(ctx->amask & AMASK_MVI))
1989 goto invalid_opc;
1990 /* XXX: TODO */
1991 goto invalid_opc;
1992 break;
1993 case 0x3B:
1994 /* MINUW4 */
1995 if (!(ctx->amask & AMASK_MVI))
1996 goto invalid_opc;
1997 /* XXX: TODO */
1998 goto invalid_opc;
1999 break;
2000 case 0x3C:
2001 /* MAXUB8 */
2002 if (!(ctx->amask & AMASK_MVI))
2003 goto invalid_opc;
2004 /* XXX: TODO */
2005 goto invalid_opc;
2006 break;
2007 case 0x3D:
2008 /* MAXUW4 */
2009 if (!(ctx->amask & AMASK_MVI))
2010 goto invalid_opc;
2011 /* XXX: TODO */
2012 goto invalid_opc;
2013 break;
2014 case 0x3E:
2015 /* MAXSB8 */
2016 if (!(ctx->amask & AMASK_MVI))
2017 goto invalid_opc;
2018 /* XXX: TODO */
2019 goto invalid_opc;
2020 break;
2021 case 0x3F:
2022 /* MAXSW4 */
2023 if (!(ctx->amask & AMASK_MVI))
2024 goto invalid_opc;
2025 /* XXX: TODO */
2026 goto invalid_opc;
2027 break;
2028 case 0x70:
2029 /* FTOIT */
2030 if (!(ctx->amask & AMASK_FIX))
2031 goto invalid_opc;
2032 if (likely(rc != 31)) {
2033 if (ra != 31)
2034 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2035 else
2036 tcg_gen_movi_i64(cpu_ir[rc], 0);
2038 break;
2039 case 0x78:
2040 /* FTOIS */
2041 if (!(ctx->amask & AMASK_FIX))
2042 goto invalid_opc;
2043 if (rc != 31) {
2044 TCGv_i32 tmp1 = tcg_temp_new_i32();
2045 if (ra != 31)
2046 gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2047 else {
2048 TCGv tmp2 = tcg_const_i64(0);
2049 gen_helper_s_to_memory(tmp1, tmp2);
2050 tcg_temp_free(tmp2);
2052 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2053 tcg_temp_free_i32(tmp1);
2055 break;
2056 default:
2057 goto invalid_opc;
2059 break;
2060 case 0x1D:
2061 /* HW_MTPR (PALcode) */
2062 #if defined (CONFIG_USER_ONLY)
2063 goto invalid_opc;
2064 #else
2065 if (!ctx->pal_mode)
2066 goto invalid_opc;
2067 else {
2068 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2069 if (ra != 31)
2070 gen_helper_mtpr(tmp1, cpu_ir[ra]);
2071 else {
2072 TCGv tmp2 = tcg_const_i64(0);
2073 gen_helper_mtpr(tmp1, tmp2);
2074 tcg_temp_free(tmp2);
2076 tcg_temp_free(tmp1);
2077 ret = 2;
2079 break;
2080 #endif
2081 case 0x1E:
2082 /* HW_REI (PALcode) */
2083 #if defined (CONFIG_USER_ONLY)
2084 goto invalid_opc;
2085 #else
2086 if (!ctx->pal_mode)
2087 goto invalid_opc;
2088 if (rb == 31) {
2089 /* "Old" alpha */
2090 gen_helper_hw_rei();
2091 } else {
2092 TCGv tmp;
2094 if (ra != 31) {
2095 tmp = tcg_temp_new();
2096 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2097 } else
2098 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2099 gen_helper_hw_ret(tmp);
2100 tcg_temp_free(tmp);
2102 ret = 2;
2103 break;
2104 #endif
2105 case 0x1F:
2106 /* HW_ST (PALcode) */
2107 #if defined (CONFIG_USER_ONLY)
2108 goto invalid_opc;
2109 #else
2110 if (!ctx->pal_mode)
2111 goto invalid_opc;
2112 else {
2113 TCGv addr, val;
2114 addr = tcg_temp_new();
2115 if (rb != 31)
2116 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2117 else
2118 tcg_gen_movi_i64(addr, disp12);
2119 if (ra != 31)
2120 val = cpu_ir[ra];
2121 else {
2122 val = tcg_temp_new();
2123 tcg_gen_movi_i64(val, 0);
2125 switch ((insn >> 12) & 0xF) {
2126 case 0x0:
2127 /* Longword physical access */
2128 gen_helper_stl_raw(val, addr);
2129 break;
2130 case 0x1:
2131 /* Quadword physical access */
2132 gen_helper_stq_raw(val, addr);
2133 break;
2134 case 0x2:
2135 /* Longword physical access with lock */
2136 gen_helper_stl_c_raw(val, val, addr);
2137 break;
2138 case 0x3:
2139 /* Quadword physical access with lock */
2140 gen_helper_stq_c_raw(val, val, addr);
2141 break;
2142 case 0x4:
2143 /* Longword virtual access */
2144 gen_helper_st_virt_to_phys(addr, addr);
2145 gen_helper_stl_raw(val, addr);
2146 break;
2147 case 0x5:
2148 /* Quadword virtual access */
2149 gen_helper_st_virt_to_phys(addr, addr);
2150 gen_helper_stq_raw(val, addr);
2151 break;
2152 case 0x6:
2153 /* Invalid */
2154 goto invalid_opc;
2155 case 0x7:
2156 /* Invalid */
2157 goto invalid_opc;
2158 case 0x8:
2159 /* Invalid */
2160 goto invalid_opc;
2161 case 0x9:
2162 /* Invalid */
2163 goto invalid_opc;
2164 case 0xA:
2165 /* Invalid */
2166 goto invalid_opc;
2167 case 0xB:
2168 /* Invalid */
2169 goto invalid_opc;
2170 case 0xC:
2171 /* Longword virtual access with alternate access mode */
2172 gen_helper_set_alt_mode();
2173 gen_helper_st_virt_to_phys(addr, addr);
2174 gen_helper_stl_raw(val, addr);
2175 gen_helper_restore_mode();
2176 break;
2177 case 0xD:
2178 /* Quadword virtual access with alternate access mode */
2179 gen_helper_set_alt_mode();
2180 gen_helper_st_virt_to_phys(addr, addr);
2181 gen_helper_stl_raw(val, addr);
2182 gen_helper_restore_mode();
2183 break;
2184 case 0xE:
2185 /* Invalid */
2186 goto invalid_opc;
2187 case 0xF:
2188 /* Invalid */
2189 goto invalid_opc;
2191 if (ra == 31)
2192 tcg_temp_free(val);
2193 tcg_temp_free(addr);
2195 break;
2196 #endif
2197 case 0x20:
2198 /* LDF */
2199 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2200 break;
2201 case 0x21:
2202 /* LDG */
2203 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2204 break;
2205 case 0x22:
2206 /* LDS */
2207 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2208 break;
2209 case 0x23:
2210 /* LDT */
2211 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2212 break;
2213 case 0x24:
2214 /* STF */
2215 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2216 break;
2217 case 0x25:
2218 /* STG */
2219 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2220 break;
2221 case 0x26:
2222 /* STS */
2223 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2224 break;
2225 case 0x27:
2226 /* STT */
2227 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2228 break;
2229 case 0x28:
2230 /* LDL */
2231 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2232 break;
2233 case 0x29:
2234 /* LDQ */
2235 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2236 break;
2237 case 0x2A:
2238 /* LDL_L */
2239 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2240 break;
2241 case 0x2B:
2242 /* LDQ_L */
2243 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2244 break;
2245 case 0x2C:
2246 /* STL */
2247 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2248 break;
2249 case 0x2D:
2250 /* STQ */
2251 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2252 break;
2253 case 0x2E:
2254 /* STL_C */
2255 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2256 break;
2257 case 0x2F:
2258 /* STQ_C */
2259 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2260 break;
2261 case 0x30:
2262 /* BR */
2263 if (ra != 31)
2264 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2265 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2266 ret = 1;
2267 break;
2268 case 0x31: /* FBEQ */
2269 case 0x32: /* FBLT */
2270 case 0x33: /* FBLE */
2271 gen_fbcond(ctx, opc, ra, disp16);
2272 ret = 1;
2273 break;
2274 case 0x34:
2275 /* BSR */
2276 if (ra != 31)
2277 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2278 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2279 ret = 1;
2280 break;
2281 case 0x35: /* FBNE */
2282 case 0x36: /* FBGE */
2283 case 0x37: /* FBGT */
2284 gen_fbcond(ctx, opc, ra, disp16);
2285 ret = 1;
2286 break;
2287 case 0x38:
2288 /* BLBC */
2289 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2290 ret = 1;
2291 break;
2292 case 0x39:
2293 /* BEQ */
2294 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2295 ret = 1;
2296 break;
2297 case 0x3A:
2298 /* BLT */
2299 gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2300 ret = 1;
2301 break;
2302 case 0x3B:
2303 /* BLE */
2304 gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2305 ret = 1;
2306 break;
2307 case 0x3C:
2308 /* BLBS */
2309 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2310 ret = 1;
2311 break;
2312 case 0x3D:
2313 /* BNE */
2314 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2315 ret = 1;
2316 break;
2317 case 0x3E:
2318 /* BGE */
2319 gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2320 ret = 1;
2321 break;
2322 case 0x3F:
2323 /* BGT */
2324 gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2325 ret = 1;
2326 break;
2327 invalid_opc:
2328 gen_invalid(ctx);
2329 ret = 3;
2330 break;
2333 return ret;
2336 static inline void gen_intermediate_code_internal(CPUState *env,
2337 TranslationBlock *tb,
2338 int search_pc)
2340 #if defined ALPHA_DEBUG_DISAS
2341 static int insn_count;
2342 #endif
2343 DisasContext ctx, *ctxp = &ctx;
2344 target_ulong pc_start;
2345 uint32_t insn;
2346 uint16_t *gen_opc_end;
2347 CPUBreakpoint *bp;
2348 int j, lj = -1;
2349 int ret;
2350 int num_insns;
2351 int max_insns;
2353 pc_start = tb->pc;
2354 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2355 ctx.pc = pc_start;
2356 ctx.amask = env->amask;
2357 ctx.env = env;
2358 #if defined (CONFIG_USER_ONLY)
2359 ctx.mem_idx = 0;
2360 #else
2361 ctx.mem_idx = ((env->ps >> 3) & 3);
2362 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2363 #endif
2364 num_insns = 0;
2365 max_insns = tb->cflags & CF_COUNT_MASK;
2366 if (max_insns == 0)
2367 max_insns = CF_COUNT_MASK;
2369 gen_icount_start();
2370 for (ret = 0; ret == 0;) {
2371 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2372 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2373 if (bp->pc == ctx.pc) {
2374 gen_excp(&ctx, EXCP_DEBUG, 0);
2375 break;
2379 if (search_pc) {
2380 j = gen_opc_ptr - gen_opc_buf;
2381 if (lj < j) {
2382 lj++;
2383 while (lj < j)
2384 gen_opc_instr_start[lj++] = 0;
2386 gen_opc_pc[lj] = ctx.pc;
2387 gen_opc_instr_start[lj] = 1;
2388 gen_opc_icount[lj] = num_insns;
2390 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2391 gen_io_start();
2392 #if defined ALPHA_DEBUG_DISAS
2393 insn_count++;
2394 LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2395 ctx.pc, ctx.mem_idx);
2396 #endif
2397 insn = ldl_code(ctx.pc);
2398 #if defined ALPHA_DEBUG_DISAS
2399 insn_count++;
2400 LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2401 #endif
2402 num_insns++;
2403 ctx.pc += 4;
2404 ret = translate_one(ctxp, insn);
2405 if (ret != 0)
2406 break;
2407 /* if we reach a page boundary or are single stepping, stop
2408 * generation
2410 if (env->singlestep_enabled) {
2411 gen_excp(&ctx, EXCP_DEBUG, 0);
2412 break;
2415 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2416 break;
2418 if (gen_opc_ptr >= gen_opc_end)
2419 break;
2421 if (num_insns >= max_insns)
2422 break;
2424 if (singlestep) {
2425 break;
2428 if (ret != 1 && ret != 3) {
2429 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2431 #if defined (DO_TB_FLUSH)
2432 gen_helper_tb_flush();
2433 #endif
2434 if (tb->cflags & CF_LAST_IO)
2435 gen_io_end();
2436 /* Generate the return instruction */
2437 tcg_gen_exit_tb(0);
2438 gen_icount_end(tb, num_insns);
2439 *gen_opc_ptr = INDEX_op_end;
2440 if (search_pc) {
2441 j = gen_opc_ptr - gen_opc_buf;
2442 lj++;
2443 while (lj <= j)
2444 gen_opc_instr_start[lj++] = 0;
2445 } else {
2446 tb->size = ctx.pc - pc_start;
2447 tb->icount = num_insns;
2449 #if defined ALPHA_DEBUG_DISAS
2450 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2451 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2452 qemu_log("IN: %s\n", lookup_symbol(pc_start));
2453 log_target_disas(pc_start, ctx.pc - pc_start, 1);
2454 qemu_log("\n");
2456 #endif
2459 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2461 gen_intermediate_code_internal(env, tb, 0);
2464 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2466 gen_intermediate_code_internal(env, tb, 1);
2469 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2471 CPUAlphaState *env;
2472 uint64_t hwpcb;
2474 env = qemu_mallocz(sizeof(CPUAlphaState));
2475 cpu_exec_init(env);
2476 alpha_translate_init();
2477 tlb_flush(env, 1);
2478 /* XXX: should not be hardcoded */
2479 env->implver = IMPLVER_2106x;
2480 env->ps = 0x1F00;
2481 #if defined (CONFIG_USER_ONLY)
2482 env->ps |= 1 << 3;
2483 #endif
2484 pal_init(env);
2485 /* Initialize IPR */
2486 hwpcb = env->ipr[IPR_PCBB];
2487 env->ipr[IPR_ASN] = 0;
2488 env->ipr[IPR_ASTEN] = 0;
2489 env->ipr[IPR_ASTSR] = 0;
2490 env->ipr[IPR_DATFX] = 0;
2491 /* XXX: fix this */
2492 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2493 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2494 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2495 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2496 env->ipr[IPR_FEN] = 0;
2497 env->ipr[IPR_IPL] = 31;
2498 env->ipr[IPR_MCES] = 0;
2499 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2500 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2501 env->ipr[IPR_SISR] = 0;
2502 env->ipr[IPR_VIRBND] = -1ULL;
2504 qemu_init_vcpu(env);
2505 return env;
2508 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2509 unsigned long searched_pc, int pc_pos, void *puc)
2511 env->pc = gen_opc_pc[pc_pos];