split away drive init from ide_init2()
[qemu.git] / target-alpha / translate.c
blob9d2bc45d86e52a8f010e9b506b52a7a011240730
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdint.h>
21 #include <stdlib.h>
22 #include <stdio.h>
24 #include "cpu.h"
25 #include "exec-all.h"
26 #include "disas.h"
27 #include "host-utils.h"
28 #include "tcg-op.h"
29 #include "qemu-common.h"
31 #include "helper.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 /* #define DO_SINGLE_STEP */
36 #define ALPHA_DEBUG_DISAS
37 /* #define DO_TB_FLUSH */
40 #ifdef ALPHA_DEBUG_DISAS
41 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
42 #else
43 # define LOG_DISAS(...) do { } while (0)
44 #endif
46 typedef struct DisasContext DisasContext;
47 struct DisasContext {
48 uint64_t pc;
49 int mem_idx;
50 #if !defined (CONFIG_USER_ONLY)
51 int pal_mode;
52 #endif
53 CPUAlphaState *env;
54 uint32_t amask;
57 /* global register indexes */
58 static TCGv_ptr cpu_env;
59 static TCGv cpu_ir[31];
60 static TCGv cpu_fir[31];
61 static TCGv cpu_pc;
62 static TCGv cpu_lock;
64 /* register names */
65 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
67 #include "gen-icount.h"
69 static void alpha_translate_init(void)
71 int i;
72 char *p;
73 static int done_init = 0;
75 if (done_init)
76 return;
78 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
80 p = cpu_reg_names;
81 for (i = 0; i < 31; i++) {
82 sprintf(p, "ir%d", i);
83 cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
84 offsetof(CPUState, ir[i]), p);
85 p += (i < 10) ? 4 : 5;
87 sprintf(p, "fir%d", i);
88 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
89 offsetof(CPUState, fir[i]), p);
90 p += (i < 10) ? 5 : 6;
93 cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
94 offsetof(CPUState, pc), "pc");
96 cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
97 offsetof(CPUState, lock), "lock");
99 /* register helpers */
100 #define GEN_HELPER 2
101 #include "helper.h"
103 done_init = 1;
106 static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
108 TCGv_i32 tmp1, tmp2;
110 tcg_gen_movi_i64(cpu_pc, ctx->pc);
111 tmp1 = tcg_const_i32(exception);
112 tmp2 = tcg_const_i32(error_code);
113 gen_helper_excp(tmp1, tmp2);
114 tcg_temp_free_i32(tmp2);
115 tcg_temp_free_i32(tmp1);
118 static inline void gen_invalid(DisasContext *ctx)
120 gen_excp(ctx, EXCP_OPCDEC, 0);
123 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
125 TCGv tmp = tcg_temp_new();
126 TCGv_i32 tmp32 = tcg_temp_new_i32();
127 tcg_gen_qemu_ld32u(tmp, t1, flags);
128 tcg_gen_trunc_i64_i32(tmp32, tmp);
129 gen_helper_memory_to_f(t0, tmp32);
130 tcg_temp_free_i32(tmp32);
131 tcg_temp_free(tmp);
134 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
136 TCGv tmp = tcg_temp_new();
137 tcg_gen_qemu_ld64(tmp, t1, flags);
138 gen_helper_memory_to_g(t0, tmp);
139 tcg_temp_free(tmp);
142 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
144 TCGv tmp = tcg_temp_new();
145 TCGv_i32 tmp32 = tcg_temp_new_i32();
146 tcg_gen_qemu_ld32u(tmp, t1, flags);
147 tcg_gen_trunc_i64_i32(tmp32, tmp);
148 gen_helper_memory_to_s(t0, tmp32);
149 tcg_temp_free_i32(tmp32);
150 tcg_temp_free(tmp);
153 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
155 tcg_gen_mov_i64(cpu_lock, t1);
156 tcg_gen_qemu_ld32s(t0, t1, flags);
159 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
161 tcg_gen_mov_i64(cpu_lock, t1);
162 tcg_gen_qemu_ld64(t0, t1, flags);
165 static inline void gen_load_mem(DisasContext *ctx,
166 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
167 int flags),
168 int ra, int rb, int32_t disp16, int fp,
169 int clear)
171 TCGv addr;
173 if (unlikely(ra == 31))
174 return;
176 addr = tcg_temp_new();
177 if (rb != 31) {
178 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
179 if (clear)
180 tcg_gen_andi_i64(addr, addr, ~0x7);
181 } else {
182 if (clear)
183 disp16 &= ~0x7;
184 tcg_gen_movi_i64(addr, disp16);
186 if (fp)
187 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
188 else
189 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
190 tcg_temp_free(addr);
193 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
195 TCGv_i32 tmp32 = tcg_temp_new_i32();
196 TCGv tmp = tcg_temp_new();
197 gen_helper_f_to_memory(tmp32, t0);
198 tcg_gen_extu_i32_i64(tmp, tmp32);
199 tcg_gen_qemu_st32(tmp, t1, flags);
200 tcg_temp_free(tmp);
201 tcg_temp_free_i32(tmp32);
204 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
206 TCGv tmp = tcg_temp_new();
207 gen_helper_g_to_memory(tmp, t0);
208 tcg_gen_qemu_st64(tmp, t1, flags);
209 tcg_temp_free(tmp);
212 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
214 TCGv_i32 tmp32 = tcg_temp_new_i32();
215 TCGv tmp = tcg_temp_new();
216 gen_helper_s_to_memory(tmp32, t0);
217 tcg_gen_extu_i32_i64(tmp, tmp32);
218 tcg_gen_qemu_st32(tmp, t1, flags);
219 tcg_temp_free(tmp);
220 tcg_temp_free_i32(tmp32);
223 static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
225 int l1, l2;
227 l1 = gen_new_label();
228 l2 = gen_new_label();
229 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
230 tcg_gen_qemu_st32(t0, t1, flags);
231 tcg_gen_movi_i64(t0, 1);
232 tcg_gen_br(l2);
233 gen_set_label(l1);
234 tcg_gen_movi_i64(t0, 0);
235 gen_set_label(l2);
236 tcg_gen_movi_i64(cpu_lock, -1);
239 static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
241 int l1, l2;
243 l1 = gen_new_label();
244 l2 = gen_new_label();
245 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
246 tcg_gen_qemu_st64(t0, t1, flags);
247 tcg_gen_movi_i64(t0, 1);
248 tcg_gen_br(l2);
249 gen_set_label(l1);
250 tcg_gen_movi_i64(t0, 0);
251 gen_set_label(l2);
252 tcg_gen_movi_i64(cpu_lock, -1);
255 static inline void gen_store_mem(DisasContext *ctx,
256 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
257 int flags),
258 int ra, int rb, int32_t disp16, int fp,
259 int clear, int local)
261 TCGv addr;
262 if (local)
263 addr = tcg_temp_local_new();
264 else
265 addr = tcg_temp_new();
266 if (rb != 31) {
267 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
268 if (clear)
269 tcg_gen_andi_i64(addr, addr, ~0x7);
270 } else {
271 if (clear)
272 disp16 &= ~0x7;
273 tcg_gen_movi_i64(addr, disp16);
275 if (ra != 31) {
276 if (fp)
277 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
278 else
279 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
280 } else {
281 TCGv zero;
282 if (local)
283 zero = tcg_const_local_i64(0);
284 else
285 zero = tcg_const_i64(0);
286 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
287 tcg_temp_free(zero);
289 tcg_temp_free(addr);
292 static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
293 int32_t disp, int mask)
295 int l1, l2;
297 l1 = gen_new_label();
298 l2 = gen_new_label();
299 if (likely(ra != 31)) {
300 if (mask) {
301 TCGv tmp = tcg_temp_new();
302 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
303 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
304 tcg_temp_free(tmp);
305 } else
306 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
307 } else {
308 /* Very uncommon case - Do not bother to optimize. */
309 TCGv tmp = tcg_const_i64(0);
310 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
311 tcg_temp_free(tmp);
313 tcg_gen_movi_i64(cpu_pc, ctx->pc);
314 tcg_gen_br(l2);
315 gen_set_label(l1);
316 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
317 gen_set_label(l2);
320 static inline void gen_fbcond(DisasContext *ctx, int opc, int ra,
321 int32_t disp16)
323 int l1, l2;
324 TCGv tmp;
325 TCGv src;
327 l1 = gen_new_label();
328 l2 = gen_new_label();
329 if (ra != 31) {
330 tmp = tcg_temp_new();
331 src = cpu_fir[ra];
332 } else {
333 tmp = tcg_const_i64(0);
334 src = tmp;
336 switch (opc) {
337 case 0x31: /* FBEQ */
338 gen_helper_cmpfeq(tmp, src);
339 break;
340 case 0x32: /* FBLT */
341 gen_helper_cmpflt(tmp, src);
342 break;
343 case 0x33: /* FBLE */
344 gen_helper_cmpfle(tmp, src);
345 break;
346 case 0x35: /* FBNE */
347 gen_helper_cmpfne(tmp, src);
348 break;
349 case 0x36: /* FBGE */
350 gen_helper_cmpfge(tmp, src);
351 break;
352 case 0x37: /* FBGT */
353 gen_helper_cmpfgt(tmp, src);
354 break;
355 default:
356 abort();
358 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
359 tcg_gen_movi_i64(cpu_pc, ctx->pc);
360 tcg_gen_br(l2);
361 gen_set_label(l1);
362 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
363 gen_set_label(l2);
366 static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
367 int islit, uint8_t lit, int mask)
369 int l1;
371 if (unlikely(rc == 31))
372 return;
374 l1 = gen_new_label();
376 if (ra != 31) {
377 if (mask) {
378 TCGv tmp = tcg_temp_new();
379 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
380 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
381 tcg_temp_free(tmp);
382 } else
383 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
384 } else {
385 /* Very uncommon case - Do not bother to optimize. */
386 TCGv tmp = tcg_const_i64(0);
387 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
388 tcg_temp_free(tmp);
391 if (islit)
392 tcg_gen_movi_i64(cpu_ir[rc], lit);
393 else
394 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
395 gen_set_label(l1);
398 #define FARITH2(name) \
399 static inline void glue(gen_f, name)(int rb, int rc) \
401 if (unlikely(rc == 31)) \
402 return; \
404 if (rb != 31) \
405 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
406 else { \
407 TCGv tmp = tcg_const_i64(0); \
408 gen_helper_ ## name (cpu_fir[rc], tmp); \
409 tcg_temp_free(tmp); \
412 FARITH2(sqrts)
413 FARITH2(sqrtf)
414 FARITH2(sqrtg)
415 FARITH2(sqrtt)
416 FARITH2(cvtgf)
417 FARITH2(cvtgq)
418 FARITH2(cvtqf)
419 FARITH2(cvtqg)
420 FARITH2(cvtst)
421 FARITH2(cvtts)
422 FARITH2(cvttq)
423 FARITH2(cvtqs)
424 FARITH2(cvtqt)
425 FARITH2(cvtlq)
426 FARITH2(cvtql)
427 FARITH2(cvtqlv)
428 FARITH2(cvtqlsv)
430 #define FARITH3(name) \
431 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
433 if (unlikely(rc == 31)) \
434 return; \
436 if (ra != 31) { \
437 if (rb != 31) \
438 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
439 else { \
440 TCGv tmp = tcg_const_i64(0); \
441 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
442 tcg_temp_free(tmp); \
444 } else { \
445 TCGv tmp = tcg_const_i64(0); \
446 if (rb != 31) \
447 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
448 else \
449 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
450 tcg_temp_free(tmp); \
454 FARITH3(addf)
455 FARITH3(subf)
456 FARITH3(mulf)
457 FARITH3(divf)
458 FARITH3(addg)
459 FARITH3(subg)
460 FARITH3(mulg)
461 FARITH3(divg)
462 FARITH3(cmpgeq)
463 FARITH3(cmpglt)
464 FARITH3(cmpgle)
465 FARITH3(adds)
466 FARITH3(subs)
467 FARITH3(muls)
468 FARITH3(divs)
469 FARITH3(addt)
470 FARITH3(subt)
471 FARITH3(mult)
472 FARITH3(divt)
473 FARITH3(cmptun)
474 FARITH3(cmpteq)
475 FARITH3(cmptlt)
476 FARITH3(cmptle)
477 FARITH3(cpys)
478 FARITH3(cpysn)
479 FARITH3(cpyse)
481 #define FCMOV(name) \
482 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
484 int l1; \
485 TCGv tmp; \
487 if (unlikely(rc == 31)) \
488 return; \
490 l1 = gen_new_label(); \
491 tmp = tcg_temp_new(); \
492 if (ra != 31) { \
493 tmp = tcg_temp_new(); \
494 gen_helper_ ## name (tmp, cpu_fir[ra]); \
495 } else { \
496 tmp = tcg_const_i64(0); \
497 gen_helper_ ## name (tmp, tmp); \
499 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
500 if (rb != 31) \
501 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
502 else \
503 tcg_gen_movi_i64(cpu_fir[rc], 0); \
504 gen_set_label(l1); \
506 FCMOV(cmpfeq)
507 FCMOV(cmpfne)
508 FCMOV(cmpflt)
509 FCMOV(cmpfge)
510 FCMOV(cmpfle)
511 FCMOV(cmpfgt)
513 /* EXTWH, EXTWH, EXTLH, EXTQH */
514 static inline void gen_ext_h(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
515 int ra, int rb, int rc, int islit, uint8_t lit)
517 if (unlikely(rc == 31))
518 return;
520 if (ra != 31) {
521 if (islit) {
522 if (lit != 0)
523 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
524 else
525 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
526 } else {
527 TCGv tmp1, tmp2;
528 tmp1 = tcg_temp_new();
529 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
530 tcg_gen_shli_i64(tmp1, tmp1, 3);
531 tmp2 = tcg_const_i64(64);
532 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
533 tcg_temp_free(tmp2);
534 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
535 tcg_temp_free(tmp1);
537 if (tcg_gen_ext_i64)
538 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
539 } else
540 tcg_gen_movi_i64(cpu_ir[rc], 0);
543 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
544 static inline void gen_ext_l(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
545 int ra, int rb, int rc, int islit, uint8_t lit)
547 if (unlikely(rc == 31))
548 return;
550 if (ra != 31) {
551 if (islit) {
552 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
553 } else {
554 TCGv tmp = tcg_temp_new();
555 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
556 tcg_gen_shli_i64(tmp, tmp, 3);
557 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
558 tcg_temp_free(tmp);
560 if (tcg_gen_ext_i64)
561 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
562 } else
563 tcg_gen_movi_i64(cpu_ir[rc], 0);
566 /* Code to call arith3 helpers */
567 #define ARITH3(name) \
568 static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
569 uint8_t lit) \
571 if (unlikely(rc == 31)) \
572 return; \
574 if (ra != 31) { \
575 if (islit) { \
576 TCGv tmp = tcg_const_i64(lit); \
577 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
578 tcg_temp_free(tmp); \
579 } else \
580 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
581 } else { \
582 TCGv tmp1 = tcg_const_i64(0); \
583 if (islit) { \
584 TCGv tmp2 = tcg_const_i64(lit); \
585 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
586 tcg_temp_free(tmp2); \
587 } else \
588 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
589 tcg_temp_free(tmp1); \
592 ARITH3(cmpbge)
593 ARITH3(addlv)
594 ARITH3(sublv)
595 ARITH3(addqv)
596 ARITH3(subqv)
597 ARITH3(mskbl)
598 ARITH3(insbl)
599 ARITH3(mskwl)
600 ARITH3(inswl)
601 ARITH3(mskll)
602 ARITH3(insll)
603 ARITH3(zap)
604 ARITH3(zapnot)
605 ARITH3(mskql)
606 ARITH3(insql)
607 ARITH3(mskwh)
608 ARITH3(inswh)
609 ARITH3(msklh)
610 ARITH3(inslh)
611 ARITH3(mskqh)
612 ARITH3(insqh)
613 ARITH3(umulh)
614 ARITH3(mullv)
615 ARITH3(mulqv)
617 static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
618 uint8_t lit)
620 int l1, l2;
621 TCGv tmp;
623 if (unlikely(rc == 31))
624 return;
626 l1 = gen_new_label();
627 l2 = gen_new_label();
629 if (ra != 31) {
630 tmp = tcg_temp_new();
631 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
632 } else
633 tmp = tcg_const_i64(0);
634 if (islit)
635 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
636 else
637 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
639 tcg_gen_movi_i64(cpu_ir[rc], 0);
640 tcg_gen_br(l2);
641 gen_set_label(l1);
642 tcg_gen_movi_i64(cpu_ir[rc], 1);
643 gen_set_label(l2);
646 static inline int translate_one(DisasContext *ctx, uint32_t insn)
648 uint32_t palcode;
649 int32_t disp21, disp16, disp12;
650 uint16_t fn11, fn16;
651 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
652 uint8_t lit;
653 int ret;
655 /* Decode all instruction fields */
656 opc = insn >> 26;
657 ra = (insn >> 21) & 0x1F;
658 rb = (insn >> 16) & 0x1F;
659 rc = insn & 0x1F;
660 sbz = (insn >> 13) & 0x07;
661 islit = (insn >> 12) & 1;
662 if (rb == 31 && !islit) {
663 islit = 1;
664 lit = 0;
665 } else
666 lit = (insn >> 13) & 0xFF;
667 palcode = insn & 0x03FFFFFF;
668 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
669 disp16 = (int16_t)(insn & 0x0000FFFF);
670 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
671 fn16 = insn & 0x0000FFFF;
672 fn11 = (insn >> 5) & 0x000007FF;
673 fpfn = fn11 & 0x3F;
674 fn7 = (insn >> 5) & 0x0000007F;
675 fn2 = (insn >> 5) & 0x00000003;
676 ret = 0;
677 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
678 opc, ra, rb, rc, disp16);
679 switch (opc) {
680 case 0x00:
681 /* CALL_PAL */
682 if (palcode >= 0x80 && palcode < 0xC0) {
683 /* Unprivileged PAL call */
684 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
685 #if !defined (CONFIG_USER_ONLY)
686 } else if (palcode < 0x40) {
687 /* Privileged PAL code */
688 if (ctx->mem_idx & 1)
689 goto invalid_opc;
690 else
691 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
692 #endif
693 } else {
694 /* Invalid PAL call */
695 goto invalid_opc;
697 ret = 3;
698 break;
699 case 0x01:
700 /* OPC01 */
701 goto invalid_opc;
702 case 0x02:
703 /* OPC02 */
704 goto invalid_opc;
705 case 0x03:
706 /* OPC03 */
707 goto invalid_opc;
708 case 0x04:
709 /* OPC04 */
710 goto invalid_opc;
711 case 0x05:
712 /* OPC05 */
713 goto invalid_opc;
714 case 0x06:
715 /* OPC06 */
716 goto invalid_opc;
717 case 0x07:
718 /* OPC07 */
719 goto invalid_opc;
720 case 0x08:
721 /* LDA */
722 if (likely(ra != 31)) {
723 if (rb != 31)
724 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
725 else
726 tcg_gen_movi_i64(cpu_ir[ra], disp16);
728 break;
729 case 0x09:
730 /* LDAH */
731 if (likely(ra != 31)) {
732 if (rb != 31)
733 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
734 else
735 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
737 break;
738 case 0x0A:
739 /* LDBU */
740 if (!(ctx->amask & AMASK_BWX))
741 goto invalid_opc;
742 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
743 break;
744 case 0x0B:
745 /* LDQ_U */
746 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
747 break;
748 case 0x0C:
749 /* LDWU */
750 if (!(ctx->amask & AMASK_BWX))
751 goto invalid_opc;
752 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
753 break;
754 case 0x0D:
755 /* STW */
756 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
757 break;
758 case 0x0E:
759 /* STB */
760 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
761 break;
762 case 0x0F:
763 /* STQ_U */
764 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
765 break;
766 case 0x10:
767 switch (fn7) {
768 case 0x00:
769 /* ADDL */
770 if (likely(rc != 31)) {
771 if (ra != 31) {
772 if (islit) {
773 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
774 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
775 } else {
776 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
777 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
779 } else {
780 if (islit)
781 tcg_gen_movi_i64(cpu_ir[rc], lit);
782 else
783 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
786 break;
787 case 0x02:
788 /* S4ADDL */
789 if (likely(rc != 31)) {
790 if (ra != 31) {
791 TCGv tmp = tcg_temp_new();
792 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
793 if (islit)
794 tcg_gen_addi_i64(tmp, tmp, lit);
795 else
796 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
797 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
798 tcg_temp_free(tmp);
799 } else {
800 if (islit)
801 tcg_gen_movi_i64(cpu_ir[rc], lit);
802 else
803 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
806 break;
807 case 0x09:
808 /* SUBL */
809 if (likely(rc != 31)) {
810 if (ra != 31) {
811 if (islit)
812 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
813 else
814 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
815 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
816 } else {
817 if (islit)
818 tcg_gen_movi_i64(cpu_ir[rc], -lit);
819 else {
820 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
821 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
824 break;
825 case 0x0B:
826 /* S4SUBL */
827 if (likely(rc != 31)) {
828 if (ra != 31) {
829 TCGv tmp = tcg_temp_new();
830 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
831 if (islit)
832 tcg_gen_subi_i64(tmp, tmp, lit);
833 else
834 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
835 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
836 tcg_temp_free(tmp);
837 } else {
838 if (islit)
839 tcg_gen_movi_i64(cpu_ir[rc], -lit);
840 else {
841 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
842 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
846 break;
847 case 0x0F:
848 /* CMPBGE */
849 gen_cmpbge(ra, rb, rc, islit, lit);
850 break;
851 case 0x12:
852 /* S8ADDL */
853 if (likely(rc != 31)) {
854 if (ra != 31) {
855 TCGv tmp = tcg_temp_new();
856 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
857 if (islit)
858 tcg_gen_addi_i64(tmp, tmp, lit);
859 else
860 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
861 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
862 tcg_temp_free(tmp);
863 } else {
864 if (islit)
865 tcg_gen_movi_i64(cpu_ir[rc], lit);
866 else
867 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
870 break;
871 case 0x1B:
872 /* S8SUBL */
873 if (likely(rc != 31)) {
874 if (ra != 31) {
875 TCGv tmp = tcg_temp_new();
876 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
877 if (islit)
878 tcg_gen_subi_i64(tmp, tmp, lit);
879 else
880 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
881 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
882 tcg_temp_free(tmp);
883 } else {
884 if (islit)
885 tcg_gen_movi_i64(cpu_ir[rc], -lit);
886 else
887 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
888 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
892 break;
893 case 0x1D:
894 /* CMPULT */
895 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
896 break;
897 case 0x20:
898 /* ADDQ */
899 if (likely(rc != 31)) {
900 if (ra != 31) {
901 if (islit)
902 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
903 else
904 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
905 } else {
906 if (islit)
907 tcg_gen_movi_i64(cpu_ir[rc], lit);
908 else
909 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
912 break;
913 case 0x22:
914 /* S4ADDQ */
915 if (likely(rc != 31)) {
916 if (ra != 31) {
917 TCGv tmp = tcg_temp_new();
918 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
919 if (islit)
920 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
921 else
922 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
923 tcg_temp_free(tmp);
924 } else {
925 if (islit)
926 tcg_gen_movi_i64(cpu_ir[rc], lit);
927 else
928 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
931 break;
932 case 0x29:
933 /* SUBQ */
934 if (likely(rc != 31)) {
935 if (ra != 31) {
936 if (islit)
937 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
938 else
939 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
940 } else {
941 if (islit)
942 tcg_gen_movi_i64(cpu_ir[rc], -lit);
943 else
944 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
947 break;
948 case 0x2B:
949 /* S4SUBQ */
950 if (likely(rc != 31)) {
951 if (ra != 31) {
952 TCGv tmp = tcg_temp_new();
953 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
954 if (islit)
955 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
956 else
957 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
958 tcg_temp_free(tmp);
959 } else {
960 if (islit)
961 tcg_gen_movi_i64(cpu_ir[rc], -lit);
962 else
963 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
966 break;
967 case 0x2D:
968 /* CMPEQ */
969 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
970 break;
971 case 0x32:
972 /* S8ADDQ */
973 if (likely(rc != 31)) {
974 if (ra != 31) {
975 TCGv tmp = tcg_temp_new();
976 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
977 if (islit)
978 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
979 else
980 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
981 tcg_temp_free(tmp);
982 } else {
983 if (islit)
984 tcg_gen_movi_i64(cpu_ir[rc], lit);
985 else
986 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
989 break;
990 case 0x3B:
991 /* S8SUBQ */
992 if (likely(rc != 31)) {
993 if (ra != 31) {
994 TCGv tmp = tcg_temp_new();
995 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
996 if (islit)
997 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
998 else
999 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1000 tcg_temp_free(tmp);
1001 } else {
1002 if (islit)
1003 tcg_gen_movi_i64(cpu_ir[rc], -lit);
1004 else
1005 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1008 break;
1009 case 0x3D:
1010 /* CMPULE */
1011 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1012 break;
1013 case 0x40:
1014 /* ADDL/V */
1015 gen_addlv(ra, rb, rc, islit, lit);
1016 break;
1017 case 0x49:
1018 /* SUBL/V */
1019 gen_sublv(ra, rb, rc, islit, lit);
1020 break;
1021 case 0x4D:
1022 /* CMPLT */
1023 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1024 break;
1025 case 0x60:
1026 /* ADDQ/V */
1027 gen_addqv(ra, rb, rc, islit, lit);
1028 break;
1029 case 0x69:
1030 /* SUBQ/V */
1031 gen_subqv(ra, rb, rc, islit, lit);
1032 break;
1033 case 0x6D:
1034 /* CMPLE */
1035 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1036 break;
1037 default:
1038 goto invalid_opc;
1040 break;
1041 case 0x11:
1042 switch (fn7) {
1043 case 0x00:
1044 /* AND */
1045 if (likely(rc != 31)) {
1046 if (ra == 31)
1047 tcg_gen_movi_i64(cpu_ir[rc], 0);
1048 else if (islit)
1049 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1050 else
1051 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1053 break;
1054 case 0x08:
1055 /* BIC */
1056 if (likely(rc != 31)) {
1057 if (ra != 31) {
1058 if (islit)
1059 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1060 else
1061 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1062 } else
1063 tcg_gen_movi_i64(cpu_ir[rc], 0);
1065 break;
1066 case 0x14:
1067 /* CMOVLBS */
1068 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1069 break;
1070 case 0x16:
1071 /* CMOVLBC */
1072 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1073 break;
1074 case 0x20:
1075 /* BIS */
1076 if (likely(rc != 31)) {
1077 if (ra != 31) {
1078 if (islit)
1079 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1080 else
1081 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1082 } else {
1083 if (islit)
1084 tcg_gen_movi_i64(cpu_ir[rc], lit);
1085 else
1086 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1089 break;
1090 case 0x24:
1091 /* CMOVEQ */
1092 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1093 break;
1094 case 0x26:
1095 /* CMOVNE */
1096 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1097 break;
1098 case 0x28:
1099 /* ORNOT */
1100 if (likely(rc != 31)) {
1101 if (ra != 31) {
1102 if (islit)
1103 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1104 else
1105 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1106 } else {
1107 if (islit)
1108 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1109 else
1110 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1113 break;
1114 case 0x40:
1115 /* XOR */
1116 if (likely(rc != 31)) {
1117 if (ra != 31) {
1118 if (islit)
1119 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1120 else
1121 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1122 } else {
1123 if (islit)
1124 tcg_gen_movi_i64(cpu_ir[rc], lit);
1125 else
1126 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1129 break;
1130 case 0x44:
1131 /* CMOVLT */
1132 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1133 break;
1134 case 0x46:
1135 /* CMOVGE */
1136 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1137 break;
1138 case 0x48:
1139 /* EQV */
1140 if (likely(rc != 31)) {
1141 if (ra != 31) {
1142 if (islit)
1143 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1144 else
1145 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1146 } else {
1147 if (islit)
1148 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1149 else
1150 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1153 break;
1154 case 0x61:
1155 /* AMASK */
1156 if (likely(rc != 31)) {
1157 if (islit)
1158 tcg_gen_movi_i64(cpu_ir[rc], lit);
1159 else
1160 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1161 switch (ctx->env->implver) {
1162 case IMPLVER_2106x:
1163 /* EV4, EV45, LCA, LCA45 & EV5 */
1164 break;
1165 case IMPLVER_21164:
1166 case IMPLVER_21264:
1167 case IMPLVER_21364:
1168 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1169 ~(uint64_t)ctx->amask);
1170 break;
1173 break;
1174 case 0x64:
1175 /* CMOVLE */
1176 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1177 break;
1178 case 0x66:
1179 /* CMOVGT */
1180 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1181 break;
1182 case 0x6C:
1183 /* IMPLVER */
1184 if (rc != 31)
1185 tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1186 break;
1187 default:
1188 goto invalid_opc;
1190 break;
1191 case 0x12:
1192 switch (fn7) {
1193 case 0x02:
1194 /* MSKBL */
1195 gen_mskbl(ra, rb, rc, islit, lit);
1196 break;
1197 case 0x06:
1198 /* EXTBL */
1199 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1200 break;
1201 case 0x0B:
1202 /* INSBL */
1203 gen_insbl(ra, rb, rc, islit, lit);
1204 break;
1205 case 0x12:
1206 /* MSKWL */
1207 gen_mskwl(ra, rb, rc, islit, lit);
1208 break;
1209 case 0x16:
1210 /* EXTWL */
1211 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1212 break;
1213 case 0x1B:
1214 /* INSWL */
1215 gen_inswl(ra, rb, rc, islit, lit);
1216 break;
1217 case 0x22:
1218 /* MSKLL */
1219 gen_mskll(ra, rb, rc, islit, lit);
1220 break;
1221 case 0x26:
1222 /* EXTLL */
1223 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1224 break;
1225 case 0x2B:
1226 /* INSLL */
1227 gen_insll(ra, rb, rc, islit, lit);
1228 break;
1229 case 0x30:
1230 /* ZAP */
1231 gen_zap(ra, rb, rc, islit, lit);
1232 break;
1233 case 0x31:
1234 /* ZAPNOT */
1235 gen_zapnot(ra, rb, rc, islit, lit);
1236 break;
1237 case 0x32:
1238 /* MSKQL */
1239 gen_mskql(ra, rb, rc, islit, lit);
1240 break;
1241 case 0x34:
1242 /* SRL */
1243 if (likely(rc != 31)) {
1244 if (ra != 31) {
1245 if (islit)
1246 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1247 else {
1248 TCGv shift = tcg_temp_new();
1249 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1250 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1251 tcg_temp_free(shift);
1253 } else
1254 tcg_gen_movi_i64(cpu_ir[rc], 0);
1256 break;
1257 case 0x36:
1258 /* EXTQL */
1259 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1260 break;
1261 case 0x39:
1262 /* SLL */
1263 if (likely(rc != 31)) {
1264 if (ra != 31) {
1265 if (islit)
1266 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1267 else {
1268 TCGv shift = tcg_temp_new();
1269 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1270 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1271 tcg_temp_free(shift);
1273 } else
1274 tcg_gen_movi_i64(cpu_ir[rc], 0);
1276 break;
1277 case 0x3B:
1278 /* INSQL */
1279 gen_insql(ra, rb, rc, islit, lit);
1280 break;
1281 case 0x3C:
1282 /* SRA */
1283 if (likely(rc != 31)) {
1284 if (ra != 31) {
1285 if (islit)
1286 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1287 else {
1288 TCGv shift = tcg_temp_new();
1289 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1290 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1291 tcg_temp_free(shift);
1293 } else
1294 tcg_gen_movi_i64(cpu_ir[rc], 0);
1296 break;
1297 case 0x52:
1298 /* MSKWH */
1299 gen_mskwh(ra, rb, rc, islit, lit);
1300 break;
1301 case 0x57:
1302 /* INSWH */
1303 gen_inswh(ra, rb, rc, islit, lit);
1304 break;
1305 case 0x5A:
1306 /* EXTWH */
1307 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1308 break;
1309 case 0x62:
1310 /* MSKLH */
1311 gen_msklh(ra, rb, rc, islit, lit);
1312 break;
1313 case 0x67:
1314 /* INSLH */
1315 gen_inslh(ra, rb, rc, islit, lit);
1316 break;
1317 case 0x6A:
1318 /* EXTLH */
1319 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1320 break;
1321 case 0x72:
1322 /* MSKQH */
1323 gen_mskqh(ra, rb, rc, islit, lit);
1324 break;
1325 case 0x77:
1326 /* INSQH */
1327 gen_insqh(ra, rb, rc, islit, lit);
1328 break;
1329 case 0x7A:
1330 /* EXTQH */
1331 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1332 break;
1333 default:
1334 goto invalid_opc;
1336 break;
1337 case 0x13:
1338 switch (fn7) {
1339 case 0x00:
1340 /* MULL */
1341 if (likely(rc != 31)) {
1342 if (ra == 31)
1343 tcg_gen_movi_i64(cpu_ir[rc], 0);
1344 else {
1345 if (islit)
1346 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1347 else
1348 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1349 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1352 break;
1353 case 0x20:
1354 /* MULQ */
1355 if (likely(rc != 31)) {
1356 if (ra == 31)
1357 tcg_gen_movi_i64(cpu_ir[rc], 0);
1358 else if (islit)
1359 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1360 else
1361 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1363 break;
1364 case 0x30:
1365 /* UMULH */
1366 gen_umulh(ra, rb, rc, islit, lit);
1367 break;
1368 case 0x40:
1369 /* MULL/V */
1370 gen_mullv(ra, rb, rc, islit, lit);
1371 break;
1372 case 0x60:
1373 /* MULQ/V */
1374 gen_mulqv(ra, rb, rc, islit, lit);
1375 break;
1376 default:
1377 goto invalid_opc;
1379 break;
1380 case 0x14:
1381 switch (fpfn) { /* f11 & 0x3F */
1382 case 0x04:
1383 /* ITOFS */
1384 if (!(ctx->amask & AMASK_FIX))
1385 goto invalid_opc;
1386 if (likely(rc != 31)) {
1387 if (ra != 31) {
1388 TCGv_i32 tmp = tcg_temp_new_i32();
1389 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1390 gen_helper_memory_to_s(cpu_fir[rc], tmp);
1391 tcg_temp_free_i32(tmp);
1392 } else
1393 tcg_gen_movi_i64(cpu_fir[rc], 0);
1395 break;
1396 case 0x0A:
1397 /* SQRTF */
1398 if (!(ctx->amask & AMASK_FIX))
1399 goto invalid_opc;
1400 gen_fsqrtf(rb, rc);
1401 break;
1402 case 0x0B:
1403 /* SQRTS */
1404 if (!(ctx->amask & AMASK_FIX))
1405 goto invalid_opc;
1406 gen_fsqrts(rb, rc);
1407 break;
1408 case 0x14:
1409 /* ITOFF */
1410 if (!(ctx->amask & AMASK_FIX))
1411 goto invalid_opc;
1412 if (likely(rc != 31)) {
1413 if (ra != 31) {
1414 TCGv_i32 tmp = tcg_temp_new_i32();
1415 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1416 gen_helper_memory_to_f(cpu_fir[rc], tmp);
1417 tcg_temp_free_i32(tmp);
1418 } else
1419 tcg_gen_movi_i64(cpu_fir[rc], 0);
1421 break;
1422 case 0x24:
1423 /* ITOFT */
1424 if (!(ctx->amask & AMASK_FIX))
1425 goto invalid_opc;
1426 if (likely(rc != 31)) {
1427 if (ra != 31)
1428 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1429 else
1430 tcg_gen_movi_i64(cpu_fir[rc], 0);
1432 break;
1433 case 0x2A:
1434 /* SQRTG */
1435 if (!(ctx->amask & AMASK_FIX))
1436 goto invalid_opc;
1437 gen_fsqrtg(rb, rc);
1438 break;
1439 case 0x02B:
1440 /* SQRTT */
1441 if (!(ctx->amask & AMASK_FIX))
1442 goto invalid_opc;
1443 gen_fsqrtt(rb, rc);
1444 break;
1445 default:
1446 goto invalid_opc;
1448 break;
1449 case 0x15:
1450 /* VAX floating point */
1451 /* XXX: rounding mode and trap are ignored (!) */
1452 switch (fpfn) { /* f11 & 0x3F */
1453 case 0x00:
1454 /* ADDF */
1455 gen_faddf(ra, rb, rc);
1456 break;
1457 case 0x01:
1458 /* SUBF */
1459 gen_fsubf(ra, rb, rc);
1460 break;
1461 case 0x02:
1462 /* MULF */
1463 gen_fmulf(ra, rb, rc);
1464 break;
1465 case 0x03:
1466 /* DIVF */
1467 gen_fdivf(ra, rb, rc);
1468 break;
1469 case 0x1E:
1470 /* CVTDG */
1471 #if 0 // TODO
1472 gen_fcvtdg(rb, rc);
1473 #else
1474 goto invalid_opc;
1475 #endif
1476 break;
1477 case 0x20:
1478 /* ADDG */
1479 gen_faddg(ra, rb, rc);
1480 break;
1481 case 0x21:
1482 /* SUBG */
1483 gen_fsubg(ra, rb, rc);
1484 break;
1485 case 0x22:
1486 /* MULG */
1487 gen_fmulg(ra, rb, rc);
1488 break;
1489 case 0x23:
1490 /* DIVG */
1491 gen_fdivg(ra, rb, rc);
1492 break;
1493 case 0x25:
1494 /* CMPGEQ */
1495 gen_fcmpgeq(ra, rb, rc);
1496 break;
1497 case 0x26:
1498 /* CMPGLT */
1499 gen_fcmpglt(ra, rb, rc);
1500 break;
1501 case 0x27:
1502 /* CMPGLE */
1503 gen_fcmpgle(ra, rb, rc);
1504 break;
1505 case 0x2C:
1506 /* CVTGF */
1507 gen_fcvtgf(rb, rc);
1508 break;
1509 case 0x2D:
1510 /* CVTGD */
1511 #if 0 // TODO
1512 gen_fcvtgd(rb, rc);
1513 #else
1514 goto invalid_opc;
1515 #endif
1516 break;
1517 case 0x2F:
1518 /* CVTGQ */
1519 gen_fcvtgq(rb, rc);
1520 break;
1521 case 0x3C:
1522 /* CVTQF */
1523 gen_fcvtqf(rb, rc);
1524 break;
1525 case 0x3E:
1526 /* CVTQG */
1527 gen_fcvtqg(rb, rc);
1528 break;
1529 default:
1530 goto invalid_opc;
1532 break;
1533 case 0x16:
1534 /* IEEE floating-point */
1535 /* XXX: rounding mode and traps are ignored (!) */
1536 switch (fpfn) { /* f11 & 0x3F */
1537 case 0x00:
1538 /* ADDS */
1539 gen_fadds(ra, rb, rc);
1540 break;
1541 case 0x01:
1542 /* SUBS */
1543 gen_fsubs(ra, rb, rc);
1544 break;
1545 case 0x02:
1546 /* MULS */
1547 gen_fmuls(ra, rb, rc);
1548 break;
1549 case 0x03:
1550 /* DIVS */
1551 gen_fdivs(ra, rb, rc);
1552 break;
1553 case 0x20:
1554 /* ADDT */
1555 gen_faddt(ra, rb, rc);
1556 break;
1557 case 0x21:
1558 /* SUBT */
1559 gen_fsubt(ra, rb, rc);
1560 break;
1561 case 0x22:
1562 /* MULT */
1563 gen_fmult(ra, rb, rc);
1564 break;
1565 case 0x23:
1566 /* DIVT */
1567 gen_fdivt(ra, rb, rc);
1568 break;
1569 case 0x24:
1570 /* CMPTUN */
1571 gen_fcmptun(ra, rb, rc);
1572 break;
1573 case 0x25:
1574 /* CMPTEQ */
1575 gen_fcmpteq(ra, rb, rc);
1576 break;
1577 case 0x26:
1578 /* CMPTLT */
1579 gen_fcmptlt(ra, rb, rc);
1580 break;
1581 case 0x27:
1582 /* CMPTLE */
1583 gen_fcmptle(ra, rb, rc);
1584 break;
1585 case 0x2C:
1586 /* XXX: incorrect */
1587 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1588 /* CVTST */
1589 gen_fcvtst(rb, rc);
1590 } else {
1591 /* CVTTS */
1592 gen_fcvtts(rb, rc);
1594 break;
1595 case 0x2F:
1596 /* CVTTQ */
1597 gen_fcvttq(rb, rc);
1598 break;
1599 case 0x3C:
1600 /* CVTQS */
1601 gen_fcvtqs(rb, rc);
1602 break;
1603 case 0x3E:
1604 /* CVTQT */
1605 gen_fcvtqt(rb, rc);
1606 break;
1607 default:
1608 goto invalid_opc;
1610 break;
1611 case 0x17:
1612 switch (fn11) {
1613 case 0x010:
1614 /* CVTLQ */
1615 gen_fcvtlq(rb, rc);
1616 break;
1617 case 0x020:
1618 if (likely(rc != 31)) {
1619 if (ra == rb)
1620 /* FMOV */
1621 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1622 else
1623 /* CPYS */
1624 gen_fcpys(ra, rb, rc);
1626 break;
1627 case 0x021:
1628 /* CPYSN */
1629 gen_fcpysn(ra, rb, rc);
1630 break;
1631 case 0x022:
1632 /* CPYSE */
1633 gen_fcpyse(ra, rb, rc);
1634 break;
1635 case 0x024:
1636 /* MT_FPCR */
1637 if (likely(ra != 31))
1638 gen_helper_store_fpcr(cpu_fir[ra]);
1639 else {
1640 TCGv tmp = tcg_const_i64(0);
1641 gen_helper_store_fpcr(tmp);
1642 tcg_temp_free(tmp);
1644 break;
1645 case 0x025:
1646 /* MF_FPCR */
1647 if (likely(ra != 31))
1648 gen_helper_load_fpcr(cpu_fir[ra]);
1649 break;
1650 case 0x02A:
1651 /* FCMOVEQ */
1652 gen_fcmpfeq(ra, rb, rc);
1653 break;
1654 case 0x02B:
1655 /* FCMOVNE */
1656 gen_fcmpfne(ra, rb, rc);
1657 break;
1658 case 0x02C:
1659 /* FCMOVLT */
1660 gen_fcmpflt(ra, rb, rc);
1661 break;
1662 case 0x02D:
1663 /* FCMOVGE */
1664 gen_fcmpfge(ra, rb, rc);
1665 break;
1666 case 0x02E:
1667 /* FCMOVLE */
1668 gen_fcmpfle(ra, rb, rc);
1669 break;
1670 case 0x02F:
1671 /* FCMOVGT */
1672 gen_fcmpfgt(ra, rb, rc);
1673 break;
1674 case 0x030:
1675 /* CVTQL */
1676 gen_fcvtql(rb, rc);
1677 break;
1678 case 0x130:
1679 /* CVTQL/V */
1680 gen_fcvtqlv(rb, rc);
1681 break;
1682 case 0x530:
1683 /* CVTQL/SV */
1684 gen_fcvtqlsv(rb, rc);
1685 break;
1686 default:
1687 goto invalid_opc;
1689 break;
1690 case 0x18:
1691 switch ((uint16_t)disp16) {
1692 case 0x0000:
1693 /* TRAPB */
1694 /* No-op. Just exit from the current tb */
1695 ret = 2;
1696 break;
1697 case 0x0400:
1698 /* EXCB */
1699 /* No-op. Just exit from the current tb */
1700 ret = 2;
1701 break;
1702 case 0x4000:
1703 /* MB */
1704 /* No-op */
1705 break;
1706 case 0x4400:
1707 /* WMB */
1708 /* No-op */
1709 break;
1710 case 0x8000:
1711 /* FETCH */
1712 /* No-op */
1713 break;
1714 case 0xA000:
1715 /* FETCH_M */
1716 /* No-op */
1717 break;
1718 case 0xC000:
1719 /* RPCC */
1720 if (ra != 31)
1721 gen_helper_load_pcc(cpu_ir[ra]);
1722 break;
1723 case 0xE000:
1724 /* RC */
1725 if (ra != 31)
1726 gen_helper_rc(cpu_ir[ra]);
1727 break;
1728 case 0xE800:
1729 /* ECB */
1730 break;
1731 case 0xF000:
1732 /* RS */
1733 if (ra != 31)
1734 gen_helper_rs(cpu_ir[ra]);
1735 break;
1736 case 0xF800:
1737 /* WH64 */
1738 /* No-op */
1739 break;
1740 default:
1741 goto invalid_opc;
1743 break;
1744 case 0x19:
1745 /* HW_MFPR (PALcode) */
1746 #if defined (CONFIG_USER_ONLY)
1747 goto invalid_opc;
1748 #else
1749 if (!ctx->pal_mode)
1750 goto invalid_opc;
1751 if (ra != 31) {
1752 TCGv tmp = tcg_const_i32(insn & 0xFF);
1753 gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1754 tcg_temp_free(tmp);
1756 break;
1757 #endif
1758 case 0x1A:
1759 if (rb != 31)
1760 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1761 else
1762 tcg_gen_movi_i64(cpu_pc, 0);
1763 if (ra != 31)
1764 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1765 /* Those four jumps only differ by the branch prediction hint */
1766 switch (fn2) {
1767 case 0x0:
1768 /* JMP */
1769 break;
1770 case 0x1:
1771 /* JSR */
1772 break;
1773 case 0x2:
1774 /* RET */
1775 break;
1776 case 0x3:
1777 /* JSR_COROUTINE */
1778 break;
1780 ret = 1;
1781 break;
1782 case 0x1B:
1783 /* HW_LD (PALcode) */
1784 #if defined (CONFIG_USER_ONLY)
1785 goto invalid_opc;
1786 #else
1787 if (!ctx->pal_mode)
1788 goto invalid_opc;
1789 if (ra != 31) {
1790 TCGv addr = tcg_temp_new();
1791 if (rb != 31)
1792 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1793 else
1794 tcg_gen_movi_i64(addr, disp12);
1795 switch ((insn >> 12) & 0xF) {
1796 case 0x0:
1797 /* Longword physical access (hw_ldl/p) */
1798 gen_helper_ldl_raw(cpu_ir[ra], addr);
1799 break;
1800 case 0x1:
1801 /* Quadword physical access (hw_ldq/p) */
1802 gen_helper_ldq_raw(cpu_ir[ra], addr);
1803 break;
1804 case 0x2:
1805 /* Longword physical access with lock (hw_ldl_l/p) */
1806 gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1807 break;
1808 case 0x3:
1809 /* Quadword physical access with lock (hw_ldq_l/p) */
1810 gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1811 break;
1812 case 0x4:
1813 /* Longword virtual PTE fetch (hw_ldl/v) */
1814 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1815 break;
1816 case 0x5:
1817 /* Quadword virtual PTE fetch (hw_ldq/v) */
1818 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1819 break;
1820 case 0x6:
1821 /* Incpu_ir[ra]id */
1822 goto invalid_opc;
1823 case 0x7:
1824 /* Incpu_ir[ra]id */
1825 goto invalid_opc;
1826 case 0x8:
1827 /* Longword virtual access (hw_ldl) */
1828 gen_helper_st_virt_to_phys(addr, addr);
1829 gen_helper_ldl_raw(cpu_ir[ra], addr);
1830 break;
1831 case 0x9:
1832 /* Quadword virtual access (hw_ldq) */
1833 gen_helper_st_virt_to_phys(addr, addr);
1834 gen_helper_ldq_raw(cpu_ir[ra], addr);
1835 break;
1836 case 0xA:
1837 /* Longword virtual access with protection check (hw_ldl/w) */
1838 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1839 break;
1840 case 0xB:
1841 /* Quadword virtual access with protection check (hw_ldq/w) */
1842 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1843 break;
1844 case 0xC:
1845 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1846 gen_helper_set_alt_mode();
1847 gen_helper_st_virt_to_phys(addr, addr);
1848 gen_helper_ldl_raw(cpu_ir[ra], addr);
1849 gen_helper_restore_mode();
1850 break;
1851 case 0xD:
1852 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1853 gen_helper_set_alt_mode();
1854 gen_helper_st_virt_to_phys(addr, addr);
1855 gen_helper_ldq_raw(cpu_ir[ra], addr);
1856 gen_helper_restore_mode();
1857 break;
1858 case 0xE:
1859 /* Longword virtual access with alternate access mode and
1860 * protection checks (hw_ldl/wa)
1862 gen_helper_set_alt_mode();
1863 gen_helper_ldl_data(cpu_ir[ra], addr);
1864 gen_helper_restore_mode();
1865 break;
1866 case 0xF:
1867 /* Quadword virtual access with alternate access mode and
1868 * protection checks (hw_ldq/wa)
1870 gen_helper_set_alt_mode();
1871 gen_helper_ldq_data(cpu_ir[ra], addr);
1872 gen_helper_restore_mode();
1873 break;
1875 tcg_temp_free(addr);
1877 break;
1878 #endif
1879 case 0x1C:
1880 switch (fn7) {
1881 case 0x00:
1882 /* SEXTB */
1883 if (!(ctx->amask & AMASK_BWX))
1884 goto invalid_opc;
1885 if (likely(rc != 31)) {
1886 if (islit)
1887 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1888 else
1889 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1891 break;
1892 case 0x01:
1893 /* SEXTW */
1894 if (!(ctx->amask & AMASK_BWX))
1895 goto invalid_opc;
1896 if (likely(rc != 31)) {
1897 if (islit)
1898 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1899 else
1900 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1902 break;
1903 case 0x30:
1904 /* CTPOP */
1905 if (!(ctx->amask & AMASK_CIX))
1906 goto invalid_opc;
1907 if (likely(rc != 31)) {
1908 if (islit)
1909 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1910 else
1911 gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1913 break;
1914 case 0x31:
1915 /* PERR */
1916 if (!(ctx->amask & AMASK_MVI))
1917 goto invalid_opc;
1918 /* XXX: TODO */
1919 goto invalid_opc;
1920 break;
1921 case 0x32:
1922 /* CTLZ */
1923 if (!(ctx->amask & AMASK_CIX))
1924 goto invalid_opc;
1925 if (likely(rc != 31)) {
1926 if (islit)
1927 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1928 else
1929 gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1931 break;
1932 case 0x33:
1933 /* CTTZ */
1934 if (!(ctx->amask & AMASK_CIX))
1935 goto invalid_opc;
1936 if (likely(rc != 31)) {
1937 if (islit)
1938 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1939 else
1940 gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1942 break;
1943 case 0x34:
1944 /* UNPKBW */
1945 if (!(ctx->amask & AMASK_MVI))
1946 goto invalid_opc;
1947 /* XXX: TODO */
1948 goto invalid_opc;
1949 break;
1950 case 0x35:
1951 /* UNPKWL */
1952 if (!(ctx->amask & AMASK_MVI))
1953 goto invalid_opc;
1954 /* XXX: TODO */
1955 goto invalid_opc;
1956 break;
1957 case 0x36:
1958 /* PKWB */
1959 if (!(ctx->amask & AMASK_MVI))
1960 goto invalid_opc;
1961 /* XXX: TODO */
1962 goto invalid_opc;
1963 break;
1964 case 0x37:
1965 /* PKLB */
1966 if (!(ctx->amask & AMASK_MVI))
1967 goto invalid_opc;
1968 /* XXX: TODO */
1969 goto invalid_opc;
1970 break;
1971 case 0x38:
1972 /* MINSB8 */
1973 if (!(ctx->amask & AMASK_MVI))
1974 goto invalid_opc;
1975 /* XXX: TODO */
1976 goto invalid_opc;
1977 break;
1978 case 0x39:
1979 /* MINSW4 */
1980 if (!(ctx->amask & AMASK_MVI))
1981 goto invalid_opc;
1982 /* XXX: TODO */
1983 goto invalid_opc;
1984 break;
1985 case 0x3A:
1986 /* MINUB8 */
1987 if (!(ctx->amask & AMASK_MVI))
1988 goto invalid_opc;
1989 /* XXX: TODO */
1990 goto invalid_opc;
1991 break;
1992 case 0x3B:
1993 /* MINUW4 */
1994 if (!(ctx->amask & AMASK_MVI))
1995 goto invalid_opc;
1996 /* XXX: TODO */
1997 goto invalid_opc;
1998 break;
1999 case 0x3C:
2000 /* MAXUB8 */
2001 if (!(ctx->amask & AMASK_MVI))
2002 goto invalid_opc;
2003 /* XXX: TODO */
2004 goto invalid_opc;
2005 break;
2006 case 0x3D:
2007 /* MAXUW4 */
2008 if (!(ctx->amask & AMASK_MVI))
2009 goto invalid_opc;
2010 /* XXX: TODO */
2011 goto invalid_opc;
2012 break;
2013 case 0x3E:
2014 /* MAXSB8 */
2015 if (!(ctx->amask & AMASK_MVI))
2016 goto invalid_opc;
2017 /* XXX: TODO */
2018 goto invalid_opc;
2019 break;
2020 case 0x3F:
2021 /* MAXSW4 */
2022 if (!(ctx->amask & AMASK_MVI))
2023 goto invalid_opc;
2024 /* XXX: TODO */
2025 goto invalid_opc;
2026 break;
2027 case 0x70:
2028 /* FTOIT */
2029 if (!(ctx->amask & AMASK_FIX))
2030 goto invalid_opc;
2031 if (likely(rc != 31)) {
2032 if (ra != 31)
2033 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2034 else
2035 tcg_gen_movi_i64(cpu_ir[rc], 0);
2037 break;
2038 case 0x78:
2039 /* FTOIS */
2040 if (!(ctx->amask & AMASK_FIX))
2041 goto invalid_opc;
2042 if (rc != 31) {
2043 TCGv_i32 tmp1 = tcg_temp_new_i32();
2044 if (ra != 31)
2045 gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2046 else {
2047 TCGv tmp2 = tcg_const_i64(0);
2048 gen_helper_s_to_memory(tmp1, tmp2);
2049 tcg_temp_free(tmp2);
2051 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2052 tcg_temp_free_i32(tmp1);
2054 break;
2055 default:
2056 goto invalid_opc;
2058 break;
2059 case 0x1D:
2060 /* HW_MTPR (PALcode) */
2061 #if defined (CONFIG_USER_ONLY)
2062 goto invalid_opc;
2063 #else
2064 if (!ctx->pal_mode)
2065 goto invalid_opc;
2066 else {
2067 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2068 if (ra != 31)
2069 gen_helper_mtpr(tmp1, cpu_ir[ra]);
2070 else {
2071 TCGv tmp2 = tcg_const_i64(0);
2072 gen_helper_mtpr(tmp1, tmp2);
2073 tcg_temp_free(tmp2);
2075 tcg_temp_free(tmp1);
2076 ret = 2;
2078 break;
2079 #endif
2080 case 0x1E:
2081 /* HW_REI (PALcode) */
2082 #if defined (CONFIG_USER_ONLY)
2083 goto invalid_opc;
2084 #else
2085 if (!ctx->pal_mode)
2086 goto invalid_opc;
2087 if (rb == 31) {
2088 /* "Old" alpha */
2089 gen_helper_hw_rei();
2090 } else {
2091 TCGv tmp;
2093 if (ra != 31) {
2094 tmp = tcg_temp_new();
2095 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2096 } else
2097 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2098 gen_helper_hw_ret(tmp);
2099 tcg_temp_free(tmp);
2101 ret = 2;
2102 break;
2103 #endif
2104 case 0x1F:
2105 /* HW_ST (PALcode) */
2106 #if defined (CONFIG_USER_ONLY)
2107 goto invalid_opc;
2108 #else
2109 if (!ctx->pal_mode)
2110 goto invalid_opc;
2111 else {
2112 TCGv addr, val;
2113 addr = tcg_temp_new();
2114 if (rb != 31)
2115 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2116 else
2117 tcg_gen_movi_i64(addr, disp12);
2118 if (ra != 31)
2119 val = cpu_ir[ra];
2120 else {
2121 val = tcg_temp_new();
2122 tcg_gen_movi_i64(val, 0);
2124 switch ((insn >> 12) & 0xF) {
2125 case 0x0:
2126 /* Longword physical access */
2127 gen_helper_stl_raw(val, addr);
2128 break;
2129 case 0x1:
2130 /* Quadword physical access */
2131 gen_helper_stq_raw(val, addr);
2132 break;
2133 case 0x2:
2134 /* Longword physical access with lock */
2135 gen_helper_stl_c_raw(val, val, addr);
2136 break;
2137 case 0x3:
2138 /* Quadword physical access with lock */
2139 gen_helper_stq_c_raw(val, val, addr);
2140 break;
2141 case 0x4:
2142 /* Longword virtual access */
2143 gen_helper_st_virt_to_phys(addr, addr);
2144 gen_helper_stl_raw(val, addr);
2145 break;
2146 case 0x5:
2147 /* Quadword virtual access */
2148 gen_helper_st_virt_to_phys(addr, addr);
2149 gen_helper_stq_raw(val, addr);
2150 break;
2151 case 0x6:
2152 /* Invalid */
2153 goto invalid_opc;
2154 case 0x7:
2155 /* Invalid */
2156 goto invalid_opc;
2157 case 0x8:
2158 /* Invalid */
2159 goto invalid_opc;
2160 case 0x9:
2161 /* Invalid */
2162 goto invalid_opc;
2163 case 0xA:
2164 /* Invalid */
2165 goto invalid_opc;
2166 case 0xB:
2167 /* Invalid */
2168 goto invalid_opc;
2169 case 0xC:
2170 /* Longword virtual access with alternate access mode */
2171 gen_helper_set_alt_mode();
2172 gen_helper_st_virt_to_phys(addr, addr);
2173 gen_helper_stl_raw(val, addr);
2174 gen_helper_restore_mode();
2175 break;
2176 case 0xD:
2177 /* Quadword virtual access with alternate access mode */
2178 gen_helper_set_alt_mode();
2179 gen_helper_st_virt_to_phys(addr, addr);
2180 gen_helper_stl_raw(val, addr);
2181 gen_helper_restore_mode();
2182 break;
2183 case 0xE:
2184 /* Invalid */
2185 goto invalid_opc;
2186 case 0xF:
2187 /* Invalid */
2188 goto invalid_opc;
2190 if (ra == 31)
2191 tcg_temp_free(val);
2192 tcg_temp_free(addr);
2194 break;
2195 #endif
2196 case 0x20:
2197 /* LDF */
2198 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2199 break;
2200 case 0x21:
2201 /* LDG */
2202 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2203 break;
2204 case 0x22:
2205 /* LDS */
2206 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2207 break;
2208 case 0x23:
2209 /* LDT */
2210 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2211 break;
2212 case 0x24:
2213 /* STF */
2214 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2215 break;
2216 case 0x25:
2217 /* STG */
2218 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2219 break;
2220 case 0x26:
2221 /* STS */
2222 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2223 break;
2224 case 0x27:
2225 /* STT */
2226 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2227 break;
2228 case 0x28:
2229 /* LDL */
2230 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2231 break;
2232 case 0x29:
2233 /* LDQ */
2234 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2235 break;
2236 case 0x2A:
2237 /* LDL_L */
2238 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2239 break;
2240 case 0x2B:
2241 /* LDQ_L */
2242 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2243 break;
2244 case 0x2C:
2245 /* STL */
2246 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2247 break;
2248 case 0x2D:
2249 /* STQ */
2250 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2251 break;
2252 case 0x2E:
2253 /* STL_C */
2254 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2255 break;
2256 case 0x2F:
2257 /* STQ_C */
2258 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2259 break;
2260 case 0x30:
2261 /* BR */
2262 if (ra != 31)
2263 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2264 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2265 ret = 1;
2266 break;
2267 case 0x31: /* FBEQ */
2268 case 0x32: /* FBLT */
2269 case 0x33: /* FBLE */
2270 gen_fbcond(ctx, opc, ra, disp16);
2271 ret = 1;
2272 break;
2273 case 0x34:
2274 /* BSR */
2275 if (ra != 31)
2276 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2277 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2278 ret = 1;
2279 break;
2280 case 0x35: /* FBNE */
2281 case 0x36: /* FBGE */
2282 case 0x37: /* FBGT */
2283 gen_fbcond(ctx, opc, ra, disp16);
2284 ret = 1;
2285 break;
2286 case 0x38:
2287 /* BLBC */
2288 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2289 ret = 1;
2290 break;
2291 case 0x39:
2292 /* BEQ */
2293 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2294 ret = 1;
2295 break;
2296 case 0x3A:
2297 /* BLT */
2298 gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2299 ret = 1;
2300 break;
2301 case 0x3B:
2302 /* BLE */
2303 gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2304 ret = 1;
2305 break;
2306 case 0x3C:
2307 /* BLBS */
2308 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2309 ret = 1;
2310 break;
2311 case 0x3D:
2312 /* BNE */
2313 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2314 ret = 1;
2315 break;
2316 case 0x3E:
2317 /* BGE */
2318 gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2319 ret = 1;
2320 break;
2321 case 0x3F:
2322 /* BGT */
2323 gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2324 ret = 1;
2325 break;
2326 invalid_opc:
2327 gen_invalid(ctx);
2328 ret = 3;
2329 break;
2332 return ret;
2335 static inline void gen_intermediate_code_internal(CPUState *env,
2336 TranslationBlock *tb,
2337 int search_pc)
2339 #if defined ALPHA_DEBUG_DISAS
2340 static int insn_count;
2341 #endif
2342 DisasContext ctx, *ctxp = &ctx;
2343 target_ulong pc_start;
2344 uint32_t insn;
2345 uint16_t *gen_opc_end;
2346 CPUBreakpoint *bp;
2347 int j, lj = -1;
2348 int ret;
2349 int num_insns;
2350 int max_insns;
2352 pc_start = tb->pc;
2353 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2354 ctx.pc = pc_start;
2355 ctx.amask = env->amask;
2356 ctx.env = env;
2357 #if defined (CONFIG_USER_ONLY)
2358 ctx.mem_idx = 0;
2359 #else
2360 ctx.mem_idx = ((env->ps >> 3) & 3);
2361 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2362 #endif
2363 num_insns = 0;
2364 max_insns = tb->cflags & CF_COUNT_MASK;
2365 if (max_insns == 0)
2366 max_insns = CF_COUNT_MASK;
2368 gen_icount_start();
2369 for (ret = 0; ret == 0;) {
2370 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2371 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2372 if (bp->pc == ctx.pc) {
2373 gen_excp(&ctx, EXCP_DEBUG, 0);
2374 break;
2378 if (search_pc) {
2379 j = gen_opc_ptr - gen_opc_buf;
2380 if (lj < j) {
2381 lj++;
2382 while (lj < j)
2383 gen_opc_instr_start[lj++] = 0;
2385 gen_opc_pc[lj] = ctx.pc;
2386 gen_opc_instr_start[lj] = 1;
2387 gen_opc_icount[lj] = num_insns;
2389 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2390 gen_io_start();
2391 #if defined ALPHA_DEBUG_DISAS
2392 insn_count++;
2393 LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2394 ctx.pc, ctx.mem_idx);
2395 #endif
2396 insn = ldl_code(ctx.pc);
2397 #if defined ALPHA_DEBUG_DISAS
2398 insn_count++;
2399 LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2400 #endif
2401 num_insns++;
2402 ctx.pc += 4;
2403 ret = translate_one(ctxp, insn);
2404 if (ret != 0)
2405 break;
2406 /* if we reach a page boundary or are single stepping, stop
2407 * generation
2409 if (env->singlestep_enabled) {
2410 gen_excp(&ctx, EXCP_DEBUG, 0);
2411 break;
2414 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2415 break;
2417 if (gen_opc_ptr >= gen_opc_end)
2418 break;
2420 if (num_insns >= max_insns)
2421 break;
2423 if (singlestep) {
2424 break;
2427 if (ret != 1 && ret != 3) {
2428 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2430 #if defined (DO_TB_FLUSH)
2431 gen_helper_tb_flush();
2432 #endif
2433 if (tb->cflags & CF_LAST_IO)
2434 gen_io_end();
2435 /* Generate the return instruction */
2436 tcg_gen_exit_tb(0);
2437 gen_icount_end(tb, num_insns);
2438 *gen_opc_ptr = INDEX_op_end;
2439 if (search_pc) {
2440 j = gen_opc_ptr - gen_opc_buf;
2441 lj++;
2442 while (lj <= j)
2443 gen_opc_instr_start[lj++] = 0;
2444 } else {
2445 tb->size = ctx.pc - pc_start;
2446 tb->icount = num_insns;
2448 #if defined ALPHA_DEBUG_DISAS
2449 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2450 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2451 qemu_log("IN: %s\n", lookup_symbol(pc_start));
2452 log_target_disas(pc_start, ctx.pc - pc_start, 1);
2453 qemu_log("\n");
2455 #endif
2458 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2460 gen_intermediate_code_internal(env, tb, 0);
2463 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2465 gen_intermediate_code_internal(env, tb, 1);
2468 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2470 CPUAlphaState *env;
2471 uint64_t hwpcb;
2473 env = qemu_mallocz(sizeof(CPUAlphaState));
2474 cpu_exec_init(env);
2475 alpha_translate_init();
2476 tlb_flush(env, 1);
2477 /* XXX: should not be hardcoded */
2478 env->implver = IMPLVER_2106x;
2479 env->ps = 0x1F00;
2480 #if defined (CONFIG_USER_ONLY)
2481 env->ps |= 1 << 3;
2482 #endif
2483 pal_init(env);
2484 /* Initialize IPR */
2485 hwpcb = env->ipr[IPR_PCBB];
2486 env->ipr[IPR_ASN] = 0;
2487 env->ipr[IPR_ASTEN] = 0;
2488 env->ipr[IPR_ASTSR] = 0;
2489 env->ipr[IPR_DATFX] = 0;
2490 /* XXX: fix this */
2491 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495 env->ipr[IPR_FEN] = 0;
2496 env->ipr[IPR_IPL] = 31;
2497 env->ipr[IPR_MCES] = 0;
2498 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2499 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500 env->ipr[IPR_SISR] = 0;
2501 env->ipr[IPR_VIRBND] = -1ULL;
2503 qemu_init_vcpu(env);
2504 return env;
2507 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2508 unsigned long searched_pc, int pc_pos, void *puc)
2510 env->pc = gen_opc_pc[pc_pos];