Add vcmpequ{b, h, w} and vcmpgt{s, u}{b, h, w} instructions
[qemu/mini2440.git] / target-alpha / translate.c
blob2524662d266161810bc9fcbf036cb951fc19dc45
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 /* #define DO_SINGLE_STEP */
37 #define ALPHA_DEBUG_DISAS
38 /* #define DO_TB_FLUSH */
40 typedef struct DisasContext DisasContext;
41 struct DisasContext {
42 uint64_t pc;
43 int mem_idx;
44 #if !defined (CONFIG_USER_ONLY)
45 int pal_mode;
46 #endif
47 uint32_t amask;
50 /* global register indexes */
51 static TCGv_ptr cpu_env;
52 static TCGv cpu_ir[31];
53 static TCGv cpu_fir[31];
54 static TCGv cpu_pc;
55 static TCGv cpu_lock;
57 /* register names */
58 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
60 #include "gen-icount.h"
62 static void alpha_translate_init(void)
64 int i;
65 char *p;
66 static int done_init = 0;
68 if (done_init)
69 return;
71 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
73 p = cpu_reg_names;
74 for (i = 0; i < 31; i++) {
75 sprintf(p, "ir%d", i);
76 cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
77 offsetof(CPUState, ir[i]), p);
78 p += (i < 10) ? 4 : 5;
80 sprintf(p, "fir%d", i);
81 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
82 offsetof(CPUState, fir[i]), p);
83 p += (i < 10) ? 5 : 6;
86 cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
87 offsetof(CPUState, pc), "pc");
89 cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
90 offsetof(CPUState, lock), "lock");
92 /* register helpers */
93 #define GEN_HELPER 2
94 #include "helper.h"
96 done_init = 1;
99 static always_inline void gen_excp (DisasContext *ctx,
100 int exception, int error_code)
102 TCGv_i32 tmp1, tmp2;
104 tcg_gen_movi_i64(cpu_pc, ctx->pc);
105 tmp1 = tcg_const_i32(exception);
106 tmp2 = tcg_const_i32(error_code);
107 gen_helper_excp(tmp1, tmp2);
108 tcg_temp_free_i32(tmp2);
109 tcg_temp_free_i32(tmp1);
112 static always_inline void gen_invalid (DisasContext *ctx)
114 gen_excp(ctx, EXCP_OPCDEC, 0);
117 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
119 TCGv tmp = tcg_temp_new();
120 TCGv_i32 tmp32 = tcg_temp_new_i32();
121 tcg_gen_qemu_ld32u(tmp, t1, flags);
122 tcg_gen_trunc_i64_i32(tmp32, tmp);
123 gen_helper_memory_to_f(t0, tmp32);
124 tcg_temp_free_i32(tmp32);
125 tcg_temp_free(tmp);
128 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
130 TCGv tmp = tcg_temp_new();
131 tcg_gen_qemu_ld64(tmp, t1, flags);
132 gen_helper_memory_to_g(t0, tmp);
133 tcg_temp_free(tmp);
136 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
138 TCGv tmp = tcg_temp_new();
139 TCGv_i32 tmp32 = tcg_temp_new_i32();
140 tcg_gen_qemu_ld32u(tmp, t1, flags);
141 tcg_gen_trunc_i64_i32(tmp32, tmp);
142 gen_helper_memory_to_s(t0, tmp32);
143 tcg_temp_free_i32(tmp32);
144 tcg_temp_free(tmp);
147 static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
149 tcg_gen_mov_i64(cpu_lock, t1);
150 tcg_gen_qemu_ld32s(t0, t1, flags);
153 static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
155 tcg_gen_mov_i64(cpu_lock, t1);
156 tcg_gen_qemu_ld64(t0, t1, flags);
159 static always_inline void gen_load_mem (DisasContext *ctx,
160 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
161 int ra, int rb, int32_t disp16,
162 int fp, int clear)
164 TCGv addr;
166 if (unlikely(ra == 31))
167 return;
169 addr = tcg_temp_new();
170 if (rb != 31) {
171 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
172 if (clear)
173 tcg_gen_andi_i64(addr, addr, ~0x7);
174 } else {
175 if (clear)
176 disp16 &= ~0x7;
177 tcg_gen_movi_i64(addr, disp16);
179 if (fp)
180 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
181 else
182 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
183 tcg_temp_free(addr);
186 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
188 TCGv_i32 tmp32 = tcg_temp_new_i32();
189 TCGv tmp = tcg_temp_new();
190 gen_helper_f_to_memory(tmp32, t0);
191 tcg_gen_extu_i32_i64(tmp, tmp32);
192 tcg_gen_qemu_st32(tmp, t1, flags);
193 tcg_temp_free(tmp);
194 tcg_temp_free_i32(tmp32);
197 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
199 TCGv tmp = tcg_temp_new();
200 gen_helper_g_to_memory(tmp, t0);
201 tcg_gen_qemu_st64(tmp, t1, flags);
202 tcg_temp_free(tmp);
205 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
207 TCGv_i32 tmp32 = tcg_temp_new_i32();
208 TCGv tmp = tcg_temp_new();
209 gen_helper_s_to_memory(tmp32, t0);
210 tcg_gen_extu_i32_i64(tmp, tmp32);
211 tcg_gen_qemu_st32(tmp, t1, flags);
212 tcg_temp_free(tmp);
213 tcg_temp_free_i32(tmp32);
216 static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
218 int l1, l2;
220 l1 = gen_new_label();
221 l2 = gen_new_label();
222 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
223 tcg_gen_qemu_st32(t0, t1, flags);
224 tcg_gen_movi_i64(t0, 1);
225 tcg_gen_br(l2);
226 gen_set_label(l1);
227 tcg_gen_movi_i64(t0, 0);
228 gen_set_label(l2);
229 tcg_gen_movi_i64(cpu_lock, -1);
232 static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
234 int l1, l2;
236 l1 = gen_new_label();
237 l2 = gen_new_label();
238 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
239 tcg_gen_qemu_st64(t0, t1, flags);
240 tcg_gen_movi_i64(t0, 1);
241 tcg_gen_br(l2);
242 gen_set_label(l1);
243 tcg_gen_movi_i64(t0, 0);
244 gen_set_label(l2);
245 tcg_gen_movi_i64(cpu_lock, -1);
248 static always_inline void gen_store_mem (DisasContext *ctx,
249 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
250 int ra, int rb, int32_t disp16,
251 int fp, int clear, int local)
253 TCGv addr;
254 if (local)
255 addr = tcg_temp_local_new();
256 else
257 addr = tcg_temp_new();
258 if (rb != 31) {
259 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
260 if (clear)
261 tcg_gen_andi_i64(addr, addr, ~0x7);
262 } else {
263 if (clear)
264 disp16 &= ~0x7;
265 tcg_gen_movi_i64(addr, disp16);
267 if (ra != 31) {
268 if (fp)
269 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
270 else
271 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
272 } else {
273 TCGv zero;
274 if (local)
275 zero = tcg_const_local_i64(0);
276 else
277 zero = tcg_const_i64(0);
278 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
279 tcg_temp_free(zero);
281 tcg_temp_free(addr);
284 static always_inline void gen_bcond (DisasContext *ctx,
285 TCGCond cond,
286 int ra, int32_t disp16, int mask)
288 int l1, l2;
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 if (likely(ra != 31)) {
293 if (mask) {
294 TCGv tmp = tcg_temp_new();
295 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
296 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
297 tcg_temp_free(tmp);
298 } else
299 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
300 } else {
301 /* Very uncommon case - Do not bother to optimize. */
302 TCGv tmp = tcg_const_i64(0);
303 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
304 tcg_temp_free(tmp);
306 tcg_gen_movi_i64(cpu_pc, ctx->pc);
307 tcg_gen_br(l2);
308 gen_set_label(l1);
309 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
310 gen_set_label(l2);
313 static always_inline void gen_fbcond (DisasContext *ctx, int opc,
314 int ra, int32_t disp16)
316 int l1, l2;
317 TCGv tmp;
318 TCGv src;
320 l1 = gen_new_label();
321 l2 = gen_new_label();
322 if (ra != 31) {
323 tmp = tcg_temp_new();
324 src = cpu_fir[ra];
325 } else {
326 tmp = tcg_const_i64(0);
327 src = tmp;
329 switch (opc) {
330 case 0x31: /* FBEQ */
331 gen_helper_cmpfeq(tmp, src);
332 break;
333 case 0x32: /* FBLT */
334 gen_helper_cmpflt(tmp, src);
335 break;
336 case 0x33: /* FBLE */
337 gen_helper_cmpfle(tmp, src);
338 break;
339 case 0x35: /* FBNE */
340 gen_helper_cmpfne(tmp, src);
341 break;
342 case 0x36: /* FBGE */
343 gen_helper_cmpfge(tmp, src);
344 break;
345 case 0x37: /* FBGT */
346 gen_helper_cmpfgt(tmp, src);
347 break;
348 default:
349 abort();
351 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
352 tcg_gen_movi_i64(cpu_pc, ctx->pc);
353 tcg_gen_br(l2);
354 gen_set_label(l1);
355 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
356 gen_set_label(l2);
359 static always_inline void gen_cmov (TCGCond inv_cond,
360 int ra, int rb, int rc,
361 int islit, uint8_t lit, int mask)
363 int l1;
365 if (unlikely(rc == 31))
366 return;
368 l1 = gen_new_label();
370 if (ra != 31) {
371 if (mask) {
372 TCGv tmp = tcg_temp_new();
373 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
374 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
375 tcg_temp_free(tmp);
376 } else
377 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
378 } else {
379 /* Very uncommon case - Do not bother to optimize. */
380 TCGv tmp = tcg_const_i64(0);
381 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
382 tcg_temp_free(tmp);
385 if (islit)
386 tcg_gen_movi_i64(cpu_ir[rc], lit);
387 else
388 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
389 gen_set_label(l1);
392 #define FARITH2(name) \
393 static always_inline void glue(gen_f, name)(int rb, int rc) \
395 if (unlikely(rc == 31)) \
396 return; \
398 if (rb != 31) \
399 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
400 else { \
401 TCGv tmp = tcg_const_i64(0); \
402 gen_helper_ ## name (cpu_fir[rc], tmp); \
403 tcg_temp_free(tmp); \
406 FARITH2(sqrts)
407 FARITH2(sqrtf)
408 FARITH2(sqrtg)
409 FARITH2(sqrtt)
410 FARITH2(cvtgf)
411 FARITH2(cvtgq)
412 FARITH2(cvtqf)
413 FARITH2(cvtqg)
414 FARITH2(cvtst)
415 FARITH2(cvtts)
416 FARITH2(cvttq)
417 FARITH2(cvtqs)
418 FARITH2(cvtqt)
419 FARITH2(cvtlq)
420 FARITH2(cvtql)
421 FARITH2(cvtqlv)
422 FARITH2(cvtqlsv)
424 #define FARITH3(name) \
425 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
427 if (unlikely(rc == 31)) \
428 return; \
430 if (ra != 31) { \
431 if (rb != 31) \
432 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
433 else { \
434 TCGv tmp = tcg_const_i64(0); \
435 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
436 tcg_temp_free(tmp); \
438 } else { \
439 TCGv tmp = tcg_const_i64(0); \
440 if (rb != 31) \
441 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
442 else \
443 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
444 tcg_temp_free(tmp); \
448 FARITH3(addf)
449 FARITH3(subf)
450 FARITH3(mulf)
451 FARITH3(divf)
452 FARITH3(addg)
453 FARITH3(subg)
454 FARITH3(mulg)
455 FARITH3(divg)
456 FARITH3(cmpgeq)
457 FARITH3(cmpglt)
458 FARITH3(cmpgle)
459 FARITH3(adds)
460 FARITH3(subs)
461 FARITH3(muls)
462 FARITH3(divs)
463 FARITH3(addt)
464 FARITH3(subt)
465 FARITH3(mult)
466 FARITH3(divt)
467 FARITH3(cmptun)
468 FARITH3(cmpteq)
469 FARITH3(cmptlt)
470 FARITH3(cmptle)
471 FARITH3(cpys)
472 FARITH3(cpysn)
473 FARITH3(cpyse)
475 #define FCMOV(name) \
476 static always_inline void glue(gen_f, name) (int ra, int rb, int rc) \
478 int l1; \
479 TCGv tmp; \
481 if (unlikely(rc == 31)) \
482 return; \
484 l1 = gen_new_label(); \
485 tmp = tcg_temp_new(); \
486 if (ra != 31) { \
487 tmp = tcg_temp_new(); \
488 gen_helper_ ## name (tmp, cpu_fir[ra]); \
489 } else { \
490 tmp = tcg_const_i64(0); \
491 gen_helper_ ## name (tmp, tmp); \
493 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
494 if (rb != 31) \
495 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
496 else \
497 tcg_gen_movi_i64(cpu_fir[rc], 0); \
498 gen_set_label(l1); \
500 FCMOV(cmpfeq)
501 FCMOV(cmpfne)
502 FCMOV(cmpflt)
503 FCMOV(cmpfge)
504 FCMOV(cmpfle)
505 FCMOV(cmpfgt)
507 /* EXTWH, EXTWH, EXTLH, EXTQH */
508 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
509 int ra, int rb, int rc,
510 int islit, uint8_t lit)
512 if (unlikely(rc == 31))
513 return;
515 if (ra != 31) {
516 if (islit) {
517 if (lit != 0)
518 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
519 else
520 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
521 } else {
522 TCGv tmp1, tmp2;
523 tmp1 = tcg_temp_new();
524 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
525 tcg_gen_shli_i64(tmp1, tmp1, 3);
526 tmp2 = tcg_const_i64(64);
527 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
528 tcg_temp_free(tmp2);
529 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
530 tcg_temp_free(tmp1);
532 if (tcg_gen_ext_i64)
533 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
534 } else
535 tcg_gen_movi_i64(cpu_ir[rc], 0);
538 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
539 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
540 int ra, int rb, int rc,
541 int islit, uint8_t lit)
543 if (unlikely(rc == 31))
544 return;
546 if (ra != 31) {
547 if (islit) {
548 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
549 } else {
550 TCGv tmp = tcg_temp_new();
551 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
552 tcg_gen_shli_i64(tmp, tmp, 3);
553 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
554 tcg_temp_free(tmp);
556 if (tcg_gen_ext_i64)
557 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
558 } else
559 tcg_gen_movi_i64(cpu_ir[rc], 0);
562 /* Code to call arith3 helpers */
563 #define ARITH3(name) \
564 static always_inline void glue(gen_, name) (int ra, int rb, int rc, \
565 int islit, uint8_t lit) \
567 if (unlikely(rc == 31)) \
568 return; \
570 if (ra != 31) { \
571 if (islit) { \
572 TCGv tmp = tcg_const_i64(lit); \
573 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
574 tcg_temp_free(tmp); \
575 } else \
576 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
577 } else { \
578 TCGv tmp1 = tcg_const_i64(0); \
579 if (islit) { \
580 TCGv tmp2 = tcg_const_i64(lit); \
581 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
582 tcg_temp_free(tmp2); \
583 } else \
584 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
585 tcg_temp_free(tmp1); \
588 ARITH3(cmpbge)
589 ARITH3(addlv)
590 ARITH3(sublv)
591 ARITH3(addqv)
592 ARITH3(subqv)
593 ARITH3(mskbl)
594 ARITH3(insbl)
595 ARITH3(mskwl)
596 ARITH3(inswl)
597 ARITH3(mskll)
598 ARITH3(insll)
599 ARITH3(zap)
600 ARITH3(zapnot)
601 ARITH3(mskql)
602 ARITH3(insql)
603 ARITH3(mskwh)
604 ARITH3(inswh)
605 ARITH3(msklh)
606 ARITH3(inslh)
607 ARITH3(mskqh)
608 ARITH3(insqh)
609 ARITH3(umulh)
610 ARITH3(mullv)
611 ARITH3(mulqv)
613 static always_inline void gen_cmp(TCGCond cond,
614 int ra, int rb, int rc,
615 int islit, uint8_t lit)
617 int l1, l2;
618 TCGv tmp;
620 if (unlikely(rc == 31))
621 return;
623 l1 = gen_new_label();
624 l2 = gen_new_label();
626 if (ra != 31) {
627 tmp = tcg_temp_new();
628 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
629 } else
630 tmp = tcg_const_i64(0);
631 if (islit)
632 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
633 else
634 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
636 tcg_gen_movi_i64(cpu_ir[rc], 0);
637 tcg_gen_br(l2);
638 gen_set_label(l1);
639 tcg_gen_movi_i64(cpu_ir[rc], 1);
640 gen_set_label(l2);
643 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
645 uint32_t palcode;
646 int32_t disp21, disp16, disp12;
647 uint16_t fn11, fn16;
648 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
649 uint8_t lit;
650 int ret;
652 /* Decode all instruction fields */
653 opc = insn >> 26;
654 ra = (insn >> 21) & 0x1F;
655 rb = (insn >> 16) & 0x1F;
656 rc = insn & 0x1F;
657 sbz = (insn >> 13) & 0x07;
658 islit = (insn >> 12) & 1;
659 if (rb == 31 && !islit) {
660 islit = 1;
661 lit = 0;
662 } else
663 lit = (insn >> 13) & 0xFF;
664 palcode = insn & 0x03FFFFFF;
665 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
666 disp16 = (int16_t)(insn & 0x0000FFFF);
667 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
668 fn16 = insn & 0x0000FFFF;
669 fn11 = (insn >> 5) & 0x000007FF;
670 fpfn = fn11 & 0x3F;
671 fn7 = (insn >> 5) & 0x0000007F;
672 fn2 = (insn >> 5) & 0x00000003;
673 ret = 0;
674 #if defined ALPHA_DEBUG_DISAS
675 if (logfile != NULL) {
676 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
677 opc, ra, rb, rc, disp16);
679 #endif
680 switch (opc) {
681 case 0x00:
682 /* CALL_PAL */
683 if (palcode >= 0x80 && palcode < 0xC0) {
684 /* Unprivileged PAL call */
685 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
686 #if !defined (CONFIG_USER_ONLY)
687 } else if (palcode < 0x40) {
688 /* Privileged PAL code */
689 if (ctx->mem_idx & 1)
690 goto invalid_opc;
691 else
692 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
693 #endif
694 } else {
695 /* Invalid PAL call */
696 goto invalid_opc;
698 ret = 3;
699 break;
700 case 0x01:
701 /* OPC01 */
702 goto invalid_opc;
703 case 0x02:
704 /* OPC02 */
705 goto invalid_opc;
706 case 0x03:
707 /* OPC03 */
708 goto invalid_opc;
709 case 0x04:
710 /* OPC04 */
711 goto invalid_opc;
712 case 0x05:
713 /* OPC05 */
714 goto invalid_opc;
715 case 0x06:
716 /* OPC06 */
717 goto invalid_opc;
718 case 0x07:
719 /* OPC07 */
720 goto invalid_opc;
721 case 0x08:
722 /* LDA */
723 if (likely(ra != 31)) {
724 if (rb != 31)
725 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
726 else
727 tcg_gen_movi_i64(cpu_ir[ra], disp16);
729 break;
730 case 0x09:
731 /* LDAH */
732 if (likely(ra != 31)) {
733 if (rb != 31)
734 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
735 else
736 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
738 break;
739 case 0x0A:
740 /* LDBU */
741 if (!(ctx->amask & AMASK_BWX))
742 goto invalid_opc;
743 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
744 break;
745 case 0x0B:
746 /* LDQ_U */
747 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
748 break;
749 case 0x0C:
750 /* LDWU */
751 if (!(ctx->amask & AMASK_BWX))
752 goto invalid_opc;
753 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
754 break;
755 case 0x0D:
756 /* STW */
757 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
758 break;
759 case 0x0E:
760 /* STB */
761 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
762 break;
763 case 0x0F:
764 /* STQ_U */
765 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
766 break;
767 case 0x10:
768 switch (fn7) {
769 case 0x00:
770 /* ADDL */
771 if (likely(rc != 31)) {
772 if (ra != 31) {
773 if (islit) {
774 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
775 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
776 } else {
777 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
778 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
780 } else {
781 if (islit)
782 tcg_gen_movi_i64(cpu_ir[rc], lit);
783 else
784 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
787 break;
788 case 0x02:
789 /* S4ADDL */
790 if (likely(rc != 31)) {
791 if (ra != 31) {
792 TCGv tmp = tcg_temp_new();
793 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
794 if (islit)
795 tcg_gen_addi_i64(tmp, tmp, lit);
796 else
797 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
798 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
799 tcg_temp_free(tmp);
800 } else {
801 if (islit)
802 tcg_gen_movi_i64(cpu_ir[rc], lit);
803 else
804 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
807 break;
808 case 0x09:
809 /* SUBL */
810 if (likely(rc != 31)) {
811 if (ra != 31) {
812 if (islit)
813 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
814 else
815 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
816 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
817 } else {
818 if (islit)
819 tcg_gen_movi_i64(cpu_ir[rc], -lit);
820 else {
821 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
822 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
825 break;
826 case 0x0B:
827 /* S4SUBL */
828 if (likely(rc != 31)) {
829 if (ra != 31) {
830 TCGv tmp = tcg_temp_new();
831 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
832 if (islit)
833 tcg_gen_subi_i64(tmp, tmp, lit);
834 else
835 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
836 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
837 tcg_temp_free(tmp);
838 } else {
839 if (islit)
840 tcg_gen_movi_i64(cpu_ir[rc], -lit);
841 else {
842 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
843 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
847 break;
848 case 0x0F:
849 /* CMPBGE */
850 gen_cmpbge(ra, rb, rc, islit, lit);
851 break;
852 case 0x12:
853 /* S8ADDL */
854 if (likely(rc != 31)) {
855 if (ra != 31) {
856 TCGv tmp = tcg_temp_new();
857 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
858 if (islit)
859 tcg_gen_addi_i64(tmp, tmp, lit);
860 else
861 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
862 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
863 tcg_temp_free(tmp);
864 } else {
865 if (islit)
866 tcg_gen_movi_i64(cpu_ir[rc], lit);
867 else
868 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
871 break;
872 case 0x1B:
873 /* S8SUBL */
874 if (likely(rc != 31)) {
875 if (ra != 31) {
876 TCGv tmp = tcg_temp_new();
877 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
878 if (islit)
879 tcg_gen_subi_i64(tmp, tmp, lit);
880 else
881 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
882 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
883 tcg_temp_free(tmp);
884 } else {
885 if (islit)
886 tcg_gen_movi_i64(cpu_ir[rc], -lit);
887 else
888 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
889 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
893 break;
894 case 0x1D:
895 /* CMPULT */
896 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
897 break;
898 case 0x20:
899 /* ADDQ */
900 if (likely(rc != 31)) {
901 if (ra != 31) {
902 if (islit)
903 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
904 else
905 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
906 } else {
907 if (islit)
908 tcg_gen_movi_i64(cpu_ir[rc], lit);
909 else
910 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
913 break;
914 case 0x22:
915 /* S4ADDQ */
916 if (likely(rc != 31)) {
917 if (ra != 31) {
918 TCGv tmp = tcg_temp_new();
919 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
920 if (islit)
921 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
922 else
923 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
924 tcg_temp_free(tmp);
925 } else {
926 if (islit)
927 tcg_gen_movi_i64(cpu_ir[rc], lit);
928 else
929 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
932 break;
933 case 0x29:
934 /* SUBQ */
935 if (likely(rc != 31)) {
936 if (ra != 31) {
937 if (islit)
938 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
939 else
940 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
941 } else {
942 if (islit)
943 tcg_gen_movi_i64(cpu_ir[rc], -lit);
944 else
945 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
948 break;
949 case 0x2B:
950 /* S4SUBQ */
951 if (likely(rc != 31)) {
952 if (ra != 31) {
953 TCGv tmp = tcg_temp_new();
954 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
955 if (islit)
956 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
957 else
958 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
959 tcg_temp_free(tmp);
960 } else {
961 if (islit)
962 tcg_gen_movi_i64(cpu_ir[rc], -lit);
963 else
964 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
967 break;
968 case 0x2D:
969 /* CMPEQ */
970 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
971 break;
972 case 0x32:
973 /* S8ADDQ */
974 if (likely(rc != 31)) {
975 if (ra != 31) {
976 TCGv tmp = tcg_temp_new();
977 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
978 if (islit)
979 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
980 else
981 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
982 tcg_temp_free(tmp);
983 } else {
984 if (islit)
985 tcg_gen_movi_i64(cpu_ir[rc], lit);
986 else
987 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
990 break;
991 case 0x3B:
992 /* S8SUBQ */
993 if (likely(rc != 31)) {
994 if (ra != 31) {
995 TCGv tmp = tcg_temp_new();
996 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
997 if (islit)
998 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
999 else
1000 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
1001 tcg_temp_free(tmp);
1002 } else {
1003 if (islit)
1004 tcg_gen_movi_i64(cpu_ir[rc], -lit);
1005 else
1006 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1009 break;
1010 case 0x3D:
1011 /* CMPULE */
1012 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1013 break;
1014 case 0x40:
1015 /* ADDL/V */
1016 gen_addlv(ra, rb, rc, islit, lit);
1017 break;
1018 case 0x49:
1019 /* SUBL/V */
1020 gen_sublv(ra, rb, rc, islit, lit);
1021 break;
1022 case 0x4D:
1023 /* CMPLT */
1024 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1025 break;
1026 case 0x60:
1027 /* ADDQ/V */
1028 gen_addqv(ra, rb, rc, islit, lit);
1029 break;
1030 case 0x69:
1031 /* SUBQ/V */
1032 gen_subqv(ra, rb, rc, islit, lit);
1033 break;
1034 case 0x6D:
1035 /* CMPLE */
1036 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1037 break;
1038 default:
1039 goto invalid_opc;
1041 break;
1042 case 0x11:
1043 switch (fn7) {
1044 case 0x00:
1045 /* AND */
1046 if (likely(rc != 31)) {
1047 if (ra == 31)
1048 tcg_gen_movi_i64(cpu_ir[rc], 0);
1049 else if (islit)
1050 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1051 else
1052 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1054 break;
1055 case 0x08:
1056 /* BIC */
1057 if (likely(rc != 31)) {
1058 if (ra != 31) {
1059 if (islit)
1060 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1061 else
1062 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1063 } else
1064 tcg_gen_movi_i64(cpu_ir[rc], 0);
1066 break;
1067 case 0x14:
1068 /* CMOVLBS */
1069 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1070 break;
1071 case 0x16:
1072 /* CMOVLBC */
1073 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1074 break;
1075 case 0x20:
1076 /* BIS */
1077 if (likely(rc != 31)) {
1078 if (ra != 31) {
1079 if (islit)
1080 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1081 else
1082 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1083 } else {
1084 if (islit)
1085 tcg_gen_movi_i64(cpu_ir[rc], lit);
1086 else
1087 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1090 break;
1091 case 0x24:
1092 /* CMOVEQ */
1093 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1094 break;
1095 case 0x26:
1096 /* CMOVNE */
1097 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1098 break;
1099 case 0x28:
1100 /* ORNOT */
1101 if (likely(rc != 31)) {
1102 if (ra != 31) {
1103 if (islit)
1104 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1105 else
1106 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1107 } else {
1108 if (islit)
1109 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1110 else
1111 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1114 break;
1115 case 0x40:
1116 /* XOR */
1117 if (likely(rc != 31)) {
1118 if (ra != 31) {
1119 if (islit)
1120 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1121 else
1122 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1123 } else {
1124 if (islit)
1125 tcg_gen_movi_i64(cpu_ir[rc], lit);
1126 else
1127 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1130 break;
1131 case 0x44:
1132 /* CMOVLT */
1133 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1134 break;
1135 case 0x46:
1136 /* CMOVGE */
1137 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1138 break;
1139 case 0x48:
1140 /* EQV */
1141 if (likely(rc != 31)) {
1142 if (ra != 31) {
1143 if (islit)
1144 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1145 else
1146 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1147 } else {
1148 if (islit)
1149 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1150 else
1151 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1154 break;
1155 case 0x61:
1156 /* AMASK */
1157 if (likely(rc != 31)) {
1158 if (islit)
1159 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1160 else
1161 gen_helper_amask(cpu_ir[rc], cpu_ir[rb]);
1163 break;
1164 case 0x64:
1165 /* CMOVLE */
1166 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1167 break;
1168 case 0x66:
1169 /* CMOVGT */
1170 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1171 break;
1172 case 0x6C:
1173 /* IMPLVER */
1174 if (rc != 31)
1175 gen_helper_load_implver(cpu_ir[rc]);
1176 break;
1177 default:
1178 goto invalid_opc;
1180 break;
1181 case 0x12:
1182 switch (fn7) {
1183 case 0x02:
1184 /* MSKBL */
1185 gen_mskbl(ra, rb, rc, islit, lit);
1186 break;
1187 case 0x06:
1188 /* EXTBL */
1189 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1190 break;
1191 case 0x0B:
1192 /* INSBL */
1193 gen_insbl(ra, rb, rc, islit, lit);
1194 break;
1195 case 0x12:
1196 /* MSKWL */
1197 gen_mskwl(ra, rb, rc, islit, lit);
1198 break;
1199 case 0x16:
1200 /* EXTWL */
1201 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1202 break;
1203 case 0x1B:
1204 /* INSWL */
1205 gen_inswl(ra, rb, rc, islit, lit);
1206 break;
1207 case 0x22:
1208 /* MSKLL */
1209 gen_mskll(ra, rb, rc, islit, lit);
1210 break;
1211 case 0x26:
1212 /* EXTLL */
1213 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1214 break;
1215 case 0x2B:
1216 /* INSLL */
1217 gen_insll(ra, rb, rc, islit, lit);
1218 break;
1219 case 0x30:
1220 /* ZAP */
1221 gen_zap(ra, rb, rc, islit, lit);
1222 break;
1223 case 0x31:
1224 /* ZAPNOT */
1225 gen_zapnot(ra, rb, rc, islit, lit);
1226 break;
1227 case 0x32:
1228 /* MSKQL */
1229 gen_mskql(ra, rb, rc, islit, lit);
1230 break;
1231 case 0x34:
1232 /* SRL */
1233 if (likely(rc != 31)) {
1234 if (ra != 31) {
1235 if (islit)
1236 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1237 else {
1238 TCGv shift = tcg_temp_new();
1239 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1240 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1241 tcg_temp_free(shift);
1243 } else
1244 tcg_gen_movi_i64(cpu_ir[rc], 0);
1246 break;
1247 case 0x36:
1248 /* EXTQL */
1249 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1250 break;
1251 case 0x39:
1252 /* SLL */
1253 if (likely(rc != 31)) {
1254 if (ra != 31) {
1255 if (islit)
1256 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1257 else {
1258 TCGv shift = tcg_temp_new();
1259 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1260 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1261 tcg_temp_free(shift);
1263 } else
1264 tcg_gen_movi_i64(cpu_ir[rc], 0);
1266 break;
1267 case 0x3B:
1268 /* INSQL */
1269 gen_insql(ra, rb, rc, islit, lit);
1270 break;
1271 case 0x3C:
1272 /* SRA */
1273 if (likely(rc != 31)) {
1274 if (ra != 31) {
1275 if (islit)
1276 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1277 else {
1278 TCGv shift = tcg_temp_new();
1279 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1280 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1281 tcg_temp_free(shift);
1283 } else
1284 tcg_gen_movi_i64(cpu_ir[rc], 0);
1286 break;
1287 case 0x52:
1288 /* MSKWH */
1289 gen_mskwh(ra, rb, rc, islit, lit);
1290 break;
1291 case 0x57:
1292 /* INSWH */
1293 gen_inswh(ra, rb, rc, islit, lit);
1294 break;
1295 case 0x5A:
1296 /* EXTWH */
1297 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1298 break;
1299 case 0x62:
1300 /* MSKLH */
1301 gen_msklh(ra, rb, rc, islit, lit);
1302 break;
1303 case 0x67:
1304 /* INSLH */
1305 gen_inslh(ra, rb, rc, islit, lit);
1306 break;
1307 case 0x6A:
1308 /* EXTLH */
1309 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1310 break;
1311 case 0x72:
1312 /* MSKQH */
1313 gen_mskqh(ra, rb, rc, islit, lit);
1314 break;
1315 case 0x77:
1316 /* INSQH */
1317 gen_insqh(ra, rb, rc, islit, lit);
1318 break;
1319 case 0x7A:
1320 /* EXTQH */
1321 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1322 break;
1323 default:
1324 goto invalid_opc;
1326 break;
1327 case 0x13:
1328 switch (fn7) {
1329 case 0x00:
1330 /* MULL */
1331 if (likely(rc != 31)) {
1332 if (ra == 31)
1333 tcg_gen_movi_i64(cpu_ir[rc], 0);
1334 else {
1335 if (islit)
1336 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1337 else
1338 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1339 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1342 break;
1343 case 0x20:
1344 /* MULQ */
1345 if (likely(rc != 31)) {
1346 if (ra == 31)
1347 tcg_gen_movi_i64(cpu_ir[rc], 0);
1348 else if (islit)
1349 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1350 else
1351 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1353 break;
1354 case 0x30:
1355 /* UMULH */
1356 gen_umulh(ra, rb, rc, islit, lit);
1357 break;
1358 case 0x40:
1359 /* MULL/V */
1360 gen_mullv(ra, rb, rc, islit, lit);
1361 break;
1362 case 0x60:
1363 /* MULQ/V */
1364 gen_mulqv(ra, rb, rc, islit, lit);
1365 break;
1366 default:
1367 goto invalid_opc;
1369 break;
1370 case 0x14:
1371 switch (fpfn) { /* f11 & 0x3F */
1372 case 0x04:
1373 /* ITOFS */
1374 if (!(ctx->amask & AMASK_FIX))
1375 goto invalid_opc;
1376 if (likely(rc != 31)) {
1377 if (ra != 31) {
1378 TCGv_i32 tmp = tcg_temp_new_i32();
1379 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1380 gen_helper_memory_to_s(cpu_fir[rc], tmp);
1381 tcg_temp_free_i32(tmp);
1382 } else
1383 tcg_gen_movi_i64(cpu_fir[rc], 0);
1385 break;
1386 case 0x0A:
1387 /* SQRTF */
1388 if (!(ctx->amask & AMASK_FIX))
1389 goto invalid_opc;
1390 gen_fsqrtf(rb, rc);
1391 break;
1392 case 0x0B:
1393 /* SQRTS */
1394 if (!(ctx->amask & AMASK_FIX))
1395 goto invalid_opc;
1396 gen_fsqrts(rb, rc);
1397 break;
1398 case 0x14:
1399 /* ITOFF */
1400 if (!(ctx->amask & AMASK_FIX))
1401 goto invalid_opc;
1402 if (likely(rc != 31)) {
1403 if (ra != 31) {
1404 TCGv_i32 tmp = tcg_temp_new_i32();
1405 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1406 gen_helper_memory_to_f(cpu_fir[rc], tmp);
1407 tcg_temp_free_i32(tmp);
1408 } else
1409 tcg_gen_movi_i64(cpu_fir[rc], 0);
1411 break;
1412 case 0x24:
1413 /* ITOFT */
1414 if (!(ctx->amask & AMASK_FIX))
1415 goto invalid_opc;
1416 if (likely(rc != 31)) {
1417 if (ra != 31)
1418 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1419 else
1420 tcg_gen_movi_i64(cpu_fir[rc], 0);
1422 break;
1423 case 0x2A:
1424 /* SQRTG */
1425 if (!(ctx->amask & AMASK_FIX))
1426 goto invalid_opc;
1427 gen_fsqrtg(rb, rc);
1428 break;
1429 case 0x02B:
1430 /* SQRTT */
1431 if (!(ctx->amask & AMASK_FIX))
1432 goto invalid_opc;
1433 gen_fsqrtt(rb, rc);
1434 break;
1435 default:
1436 goto invalid_opc;
1438 break;
1439 case 0x15:
1440 /* VAX floating point */
1441 /* XXX: rounding mode and trap are ignored (!) */
1442 switch (fpfn) { /* f11 & 0x3F */
1443 case 0x00:
1444 /* ADDF */
1445 gen_faddf(ra, rb, rc);
1446 break;
1447 case 0x01:
1448 /* SUBF */
1449 gen_fsubf(ra, rb, rc);
1450 break;
1451 case 0x02:
1452 /* MULF */
1453 gen_fmulf(ra, rb, rc);
1454 break;
1455 case 0x03:
1456 /* DIVF */
1457 gen_fdivf(ra, rb, rc);
1458 break;
1459 case 0x1E:
1460 /* CVTDG */
1461 #if 0 // TODO
1462 gen_fcvtdg(rb, rc);
1463 #else
1464 goto invalid_opc;
1465 #endif
1466 break;
1467 case 0x20:
1468 /* ADDG */
1469 gen_faddg(ra, rb, rc);
1470 break;
1471 case 0x21:
1472 /* SUBG */
1473 gen_fsubg(ra, rb, rc);
1474 break;
1475 case 0x22:
1476 /* MULG */
1477 gen_fmulg(ra, rb, rc);
1478 break;
1479 case 0x23:
1480 /* DIVG */
1481 gen_fdivg(ra, rb, rc);
1482 break;
1483 case 0x25:
1484 /* CMPGEQ */
1485 gen_fcmpgeq(ra, rb, rc);
1486 break;
1487 case 0x26:
1488 /* CMPGLT */
1489 gen_fcmpglt(ra, rb, rc);
1490 break;
1491 case 0x27:
1492 /* CMPGLE */
1493 gen_fcmpgle(ra, rb, rc);
1494 break;
1495 case 0x2C:
1496 /* CVTGF */
1497 gen_fcvtgf(rb, rc);
1498 break;
1499 case 0x2D:
1500 /* CVTGD */
1501 #if 0 // TODO
1502 gen_fcvtgd(rb, rc);
1503 #else
1504 goto invalid_opc;
1505 #endif
1506 break;
1507 case 0x2F:
1508 /* CVTGQ */
1509 gen_fcvtgq(rb, rc);
1510 break;
1511 case 0x3C:
1512 /* CVTQF */
1513 gen_fcvtqf(rb, rc);
1514 break;
1515 case 0x3E:
1516 /* CVTQG */
1517 gen_fcvtqg(rb, rc);
1518 break;
1519 default:
1520 goto invalid_opc;
1522 break;
1523 case 0x16:
1524 /* IEEE floating-point */
1525 /* XXX: rounding mode and traps are ignored (!) */
1526 switch (fpfn) { /* f11 & 0x3F */
1527 case 0x00:
1528 /* ADDS */
1529 gen_fadds(ra, rb, rc);
1530 break;
1531 case 0x01:
1532 /* SUBS */
1533 gen_fsubs(ra, rb, rc);
1534 break;
1535 case 0x02:
1536 /* MULS */
1537 gen_fmuls(ra, rb, rc);
1538 break;
1539 case 0x03:
1540 /* DIVS */
1541 gen_fdivs(ra, rb, rc);
1542 break;
1543 case 0x20:
1544 /* ADDT */
1545 gen_faddt(ra, rb, rc);
1546 break;
1547 case 0x21:
1548 /* SUBT */
1549 gen_fsubt(ra, rb, rc);
1550 break;
1551 case 0x22:
1552 /* MULT */
1553 gen_fmult(ra, rb, rc);
1554 break;
1555 case 0x23:
1556 /* DIVT */
1557 gen_fdivt(ra, rb, rc);
1558 break;
1559 case 0x24:
1560 /* CMPTUN */
1561 gen_fcmptun(ra, rb, rc);
1562 break;
1563 case 0x25:
1564 /* CMPTEQ */
1565 gen_fcmpteq(ra, rb, rc);
1566 break;
1567 case 0x26:
1568 /* CMPTLT */
1569 gen_fcmptlt(ra, rb, rc);
1570 break;
1571 case 0x27:
1572 /* CMPTLE */
1573 gen_fcmptle(ra, rb, rc);
1574 break;
1575 case 0x2C:
1576 /* XXX: incorrect */
1577 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1578 /* CVTST */
1579 gen_fcvtst(rb, rc);
1580 } else {
1581 /* CVTTS */
1582 gen_fcvtts(rb, rc);
1584 break;
1585 case 0x2F:
1586 /* CVTTQ */
1587 gen_fcvttq(rb, rc);
1588 break;
1589 case 0x3C:
1590 /* CVTQS */
1591 gen_fcvtqs(rb, rc);
1592 break;
1593 case 0x3E:
1594 /* CVTQT */
1595 gen_fcvtqt(rb, rc);
1596 break;
1597 default:
1598 goto invalid_opc;
1600 break;
1601 case 0x17:
1602 switch (fn11) {
1603 case 0x010:
1604 /* CVTLQ */
1605 gen_fcvtlq(rb, rc);
1606 break;
1607 case 0x020:
1608 if (likely(rc != 31)) {
1609 if (ra == rb)
1610 /* FMOV */
1611 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1612 else
1613 /* CPYS */
1614 gen_fcpys(ra, rb, rc);
1616 break;
1617 case 0x021:
1618 /* CPYSN */
1619 gen_fcpysn(ra, rb, rc);
1620 break;
1621 case 0x022:
1622 /* CPYSE */
1623 gen_fcpyse(ra, rb, rc);
1624 break;
1625 case 0x024:
1626 /* MT_FPCR */
1627 if (likely(ra != 31))
1628 gen_helper_store_fpcr(cpu_fir[ra]);
1629 else {
1630 TCGv tmp = tcg_const_i64(0);
1631 gen_helper_store_fpcr(tmp);
1632 tcg_temp_free(tmp);
1634 break;
1635 case 0x025:
1636 /* MF_FPCR */
1637 if (likely(ra != 31))
1638 gen_helper_load_fpcr(cpu_fir[ra]);
1639 break;
1640 case 0x02A:
1641 /* FCMOVEQ */
1642 gen_fcmpfeq(ra, rb, rc);
1643 break;
1644 case 0x02B:
1645 /* FCMOVNE */
1646 gen_fcmpfne(ra, rb, rc);
1647 break;
1648 case 0x02C:
1649 /* FCMOVLT */
1650 gen_fcmpflt(ra, rb, rc);
1651 break;
1652 case 0x02D:
1653 /* FCMOVGE */
1654 gen_fcmpfge(ra, rb, rc);
1655 break;
1656 case 0x02E:
1657 /* FCMOVLE */
1658 gen_fcmpfle(ra, rb, rc);
1659 break;
1660 case 0x02F:
1661 /* FCMOVGT */
1662 gen_fcmpfgt(ra, rb, rc);
1663 break;
1664 case 0x030:
1665 /* CVTQL */
1666 gen_fcvtql(rb, rc);
1667 break;
1668 case 0x130:
1669 /* CVTQL/V */
1670 gen_fcvtqlv(rb, rc);
1671 break;
1672 case 0x530:
1673 /* CVTQL/SV */
1674 gen_fcvtqlsv(rb, rc);
1675 break;
1676 default:
1677 goto invalid_opc;
1679 break;
1680 case 0x18:
1681 switch ((uint16_t)disp16) {
1682 case 0x0000:
1683 /* TRAPB */
1684 /* No-op. Just exit from the current tb */
1685 ret = 2;
1686 break;
1687 case 0x0400:
1688 /* EXCB */
1689 /* No-op. Just exit from the current tb */
1690 ret = 2;
1691 break;
1692 case 0x4000:
1693 /* MB */
1694 /* No-op */
1695 break;
1696 case 0x4400:
1697 /* WMB */
1698 /* No-op */
1699 break;
1700 case 0x8000:
1701 /* FETCH */
1702 /* No-op */
1703 break;
1704 case 0xA000:
1705 /* FETCH_M */
1706 /* No-op */
1707 break;
1708 case 0xC000:
1709 /* RPCC */
1710 if (ra != 31)
1711 gen_helper_load_pcc(cpu_ir[ra]);
1712 break;
1713 case 0xE000:
1714 /* RC */
1715 if (ra != 31)
1716 gen_helper_rc(cpu_ir[ra]);
1717 break;
1718 case 0xE800:
1719 /* ECB */
1720 /* XXX: TODO: evict tb cache at address rb */
1721 #if 0
1722 ret = 2;
1723 #else
1724 goto invalid_opc;
1725 #endif
1726 break;
1727 case 0xF000:
1728 /* RS */
1729 if (ra != 31)
1730 gen_helper_rs(cpu_ir[ra]);
1731 break;
1732 case 0xF800:
1733 /* WH64 */
1734 /* No-op */
1735 break;
1736 default:
1737 goto invalid_opc;
1739 break;
1740 case 0x19:
1741 /* HW_MFPR (PALcode) */
1742 #if defined (CONFIG_USER_ONLY)
1743 goto invalid_opc;
1744 #else
1745 if (!ctx->pal_mode)
1746 goto invalid_opc;
1747 if (ra != 31) {
1748 TCGv tmp = tcg_const_i32(insn & 0xFF);
1749 gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1750 tcg_temp_free(tmp);
1752 break;
1753 #endif
1754 case 0x1A:
1755 if (rb != 31)
1756 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1757 else
1758 tcg_gen_movi_i64(cpu_pc, 0);
1759 if (ra != 31)
1760 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1761 /* Those four jumps only differ by the branch prediction hint */
1762 switch (fn2) {
1763 case 0x0:
1764 /* JMP */
1765 break;
1766 case 0x1:
1767 /* JSR */
1768 break;
1769 case 0x2:
1770 /* RET */
1771 break;
1772 case 0x3:
1773 /* JSR_COROUTINE */
1774 break;
1776 ret = 1;
1777 break;
1778 case 0x1B:
1779 /* HW_LD (PALcode) */
1780 #if defined (CONFIG_USER_ONLY)
1781 goto invalid_opc;
1782 #else
1783 if (!ctx->pal_mode)
1784 goto invalid_opc;
1785 if (ra != 31) {
1786 TCGv addr = tcg_temp_new();
1787 if (rb != 31)
1788 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1789 else
1790 tcg_gen_movi_i64(addr, disp12);
1791 switch ((insn >> 12) & 0xF) {
1792 case 0x0:
1793 /* Longword physical access */
1794 gen_helper_ldl_raw(cpu_ir[ra], addr);
1795 break;
1796 case 0x1:
1797 /* Quadword physical access */
1798 gen_helper_ldq_raw(cpu_ir[ra], addr);
1799 break;
1800 case 0x2:
1801 /* Longword physical access with lock */
1802 gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1803 break;
1804 case 0x3:
1805 /* Quadword physical access with lock */
1806 gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1807 break;
1808 case 0x4:
1809 /* Longword virtual PTE fetch */
1810 gen_helper_ldl_kernel(cpu_ir[ra], addr);
1811 break;
1812 case 0x5:
1813 /* Quadword virtual PTE fetch */
1814 gen_helper_ldq_kernel(cpu_ir[ra], addr);
1815 break;
1816 case 0x6:
1817 /* Incpu_ir[ra]id */
1818 goto incpu_ir[ra]id_opc;
1819 case 0x7:
1820 /* Incpu_ir[ra]id */
1821 goto incpu_ir[ra]id_opc;
1822 case 0x8:
1823 /* Longword virtual access */
1824 gen_helper_st_virt_to_phys(addr, addr);
1825 gen_helper_ldl_raw(cpu_ir[ra], addr);
1826 break;
1827 case 0x9:
1828 /* Quadword virtual access */
1829 gen_helper_st_virt_to_phys(addr, addr);
1830 gen_helper_ldq_raw(cpu_ir[ra], addr);
1831 break;
1832 case 0xA:
1833 /* Longword virtual access with protection check */
1834 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->flags);
1835 break;
1836 case 0xB:
1837 /* Quadword virtual access with protection check */
1838 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->flags);
1839 break;
1840 case 0xC:
1841 /* Longword virtual access with altenate access mode */
1842 gen_helper_set_alt_mode();
1843 gen_helper_st_virt_to_phys(addr, addr);
1844 gen_helper_ldl_raw(cpu_ir[ra], addr);
1845 gen_helper_restore_mode();
1846 break;
1847 case 0xD:
1848 /* Quadword virtual access with altenate access mode */
1849 gen_helper_set_alt_mode();
1850 gen_helper_st_virt_to_phys(addr, addr);
1851 gen_helper_ldq_raw(cpu_ir[ra], addr);
1852 gen_helper_restore_mode();
1853 break;
1854 case 0xE:
1855 /* Longword virtual access with alternate access mode and
1856 * protection checks
1858 gen_helper_set_alt_mode();
1859 gen_helper_ldl_data(cpu_ir[ra], addr);
1860 gen_helper_restore_mode();
1861 break;
1862 case 0xF:
1863 /* Quadword virtual access with alternate access mode and
1864 * protection checks
1866 gen_helper_set_alt_mode();
1867 gen_helper_ldq_data(cpu_ir[ra], addr);
1868 gen_helper_restore_mode();
1869 break;
1871 tcg_temp_free(addr);
1873 break;
1874 #endif
1875 case 0x1C:
1876 switch (fn7) {
1877 case 0x00:
1878 /* SEXTB */
1879 if (!(ctx->amask & AMASK_BWX))
1880 goto invalid_opc;
1881 if (likely(rc != 31)) {
1882 if (islit)
1883 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1884 else
1885 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1887 break;
1888 case 0x01:
1889 /* SEXTW */
1890 if (!(ctx->amask & AMASK_BWX))
1891 goto invalid_opc;
1892 if (likely(rc != 31)) {
1893 if (islit)
1894 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1895 else
1896 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1898 break;
1899 case 0x30:
1900 /* CTPOP */
1901 if (!(ctx->amask & AMASK_CIX))
1902 goto invalid_opc;
1903 if (likely(rc != 31)) {
1904 if (islit)
1905 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1906 else
1907 gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1909 break;
1910 case 0x31:
1911 /* PERR */
1912 if (!(ctx->amask & AMASK_MVI))
1913 goto invalid_opc;
1914 /* XXX: TODO */
1915 goto invalid_opc;
1916 break;
1917 case 0x32:
1918 /* CTLZ */
1919 if (!(ctx->amask & AMASK_CIX))
1920 goto invalid_opc;
1921 if (likely(rc != 31)) {
1922 if (islit)
1923 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1924 else
1925 gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1927 break;
1928 case 0x33:
1929 /* CTTZ */
1930 if (!(ctx->amask & AMASK_CIX))
1931 goto invalid_opc;
1932 if (likely(rc != 31)) {
1933 if (islit)
1934 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1935 else
1936 gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1938 break;
1939 case 0x34:
1940 /* UNPKBW */
1941 if (!(ctx->amask & AMASK_MVI))
1942 goto invalid_opc;
1943 /* XXX: TODO */
1944 goto invalid_opc;
1945 break;
1946 case 0x35:
1947 /* UNPKWL */
1948 if (!(ctx->amask & AMASK_MVI))
1949 goto invalid_opc;
1950 /* XXX: TODO */
1951 goto invalid_opc;
1952 break;
1953 case 0x36:
1954 /* PKWB */
1955 if (!(ctx->amask & AMASK_MVI))
1956 goto invalid_opc;
1957 /* XXX: TODO */
1958 goto invalid_opc;
1959 break;
1960 case 0x37:
1961 /* PKLB */
1962 if (!(ctx->amask & AMASK_MVI))
1963 goto invalid_opc;
1964 /* XXX: TODO */
1965 goto invalid_opc;
1966 break;
1967 case 0x38:
1968 /* MINSB8 */
1969 if (!(ctx->amask & AMASK_MVI))
1970 goto invalid_opc;
1971 /* XXX: TODO */
1972 goto invalid_opc;
1973 break;
1974 case 0x39:
1975 /* MINSW4 */
1976 if (!(ctx->amask & AMASK_MVI))
1977 goto invalid_opc;
1978 /* XXX: TODO */
1979 goto invalid_opc;
1980 break;
1981 case 0x3A:
1982 /* MINUB8 */
1983 if (!(ctx->amask & AMASK_MVI))
1984 goto invalid_opc;
1985 /* XXX: TODO */
1986 goto invalid_opc;
1987 break;
1988 case 0x3B:
1989 /* MINUW4 */
1990 if (!(ctx->amask & AMASK_MVI))
1991 goto invalid_opc;
1992 /* XXX: TODO */
1993 goto invalid_opc;
1994 break;
1995 case 0x3C:
1996 /* MAXUB8 */
1997 if (!(ctx->amask & AMASK_MVI))
1998 goto invalid_opc;
1999 /* XXX: TODO */
2000 goto invalid_opc;
2001 break;
2002 case 0x3D:
2003 /* MAXUW4 */
2004 if (!(ctx->amask & AMASK_MVI))
2005 goto invalid_opc;
2006 /* XXX: TODO */
2007 goto invalid_opc;
2008 break;
2009 case 0x3E:
2010 /* MAXSB8 */
2011 if (!(ctx->amask & AMASK_MVI))
2012 goto invalid_opc;
2013 /* XXX: TODO */
2014 goto invalid_opc;
2015 break;
2016 case 0x3F:
2017 /* MAXSW4 */
2018 if (!(ctx->amask & AMASK_MVI))
2019 goto invalid_opc;
2020 /* XXX: TODO */
2021 goto invalid_opc;
2022 break;
2023 case 0x70:
2024 /* FTOIT */
2025 if (!(ctx->amask & AMASK_FIX))
2026 goto invalid_opc;
2027 if (likely(rc != 31)) {
2028 if (ra != 31)
2029 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2030 else
2031 tcg_gen_movi_i64(cpu_ir[rc], 0);
2033 break;
2034 case 0x78:
2035 /* FTOIS */
2036 if (!(ctx->amask & AMASK_FIX))
2037 goto invalid_opc;
2038 if (rc != 31) {
2039 TCGv_i32 tmp1 = tcg_temp_new_i32();
2040 if (ra != 31)
2041 gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2042 else {
2043 TCGv tmp2 = tcg_const_i64(0);
2044 gen_helper_s_to_memory(tmp1, tmp2);
2045 tcg_temp_free(tmp2);
2047 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2048 tcg_temp_free_i32(tmp1);
2050 break;
2051 default:
2052 goto invalid_opc;
2054 break;
2055 case 0x1D:
2056 /* HW_MTPR (PALcode) */
2057 #if defined (CONFIG_USER_ONLY)
2058 goto invalid_opc;
2059 #else
2060 if (!ctx->pal_mode)
2061 goto invalid_opc;
2062 else {
2063 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2064 if (ra != 31)
2065 gen_helper_mtpr(tmp1, cpu_ir[ra]);
2066 else {
2067 TCGv tmp2 = tcg_const_i64(0);
2068 gen_helper_mtpr(tmp1, tmp2);
2069 tcg_temp_free(tmp2);
2071 tcg_temp_free(tmp1);
2072 ret = 2;
2074 break;
2075 #endif
2076 case 0x1E:
2077 /* HW_REI (PALcode) */
2078 #if defined (CONFIG_USER_ONLY)
2079 goto invalid_opc;
2080 #else
2081 if (!ctx->pal_mode)
2082 goto invalid_opc;
2083 if (rb == 31) {
2084 /* "Old" alpha */
2085 gen_helper_hw_rei();
2086 } else {
2087 TCGv tmp;
2089 if (ra != 31) {
2090 tmp = tcg_temp_new();
2091 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2092 } else
2093 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2094 gen_helper_hw_ret(tmp);
2095 tcg_temp_free(tmp);
2097 ret = 2;
2098 break;
2099 #endif
2100 case 0x1F:
2101 /* HW_ST (PALcode) */
2102 #if defined (CONFIG_USER_ONLY)
2103 goto invalid_opc;
2104 #else
2105 if (!ctx->pal_mode)
2106 goto invalid_opc;
2107 else {
2108 TCGv addr, val;
2109 addr = tcg_temp_new();
2110 if (rb != 31)
2111 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2112 else
2113 tcg_gen_movi_i64(addr, disp12);
2114 if (ra != 31)
2115 val = cpu_ir[ra];
2116 else {
2117 val = tcg_temp_new();
2118 tcg_gen_movi_i64(val, 0);
2120 switch ((insn >> 12) & 0xF) {
2121 case 0x0:
2122 /* Longword physical access */
2123 gen_helper_stl_raw(val, addr);
2124 break;
2125 case 0x1:
2126 /* Quadword physical access */
2127 gen_helper_stq_raw(val, addr);
2128 break;
2129 case 0x2:
2130 /* Longword physical access with lock */
2131 gen_helper_stl_c_raw(val, val, addr);
2132 break;
2133 case 0x3:
2134 /* Quadword physical access with lock */
2135 gen_helper_stq_c_raw(val, val, addr);
2136 break;
2137 case 0x4:
2138 /* Longword virtual access */
2139 gen_helper_st_virt_to_phys(addr, addr);
2140 gen_helper_stl_raw(val, addr);
2141 break;
2142 case 0x5:
2143 /* Quadword virtual access */
2144 gen_helper_st_virt_to_phys(addr, addr);
2145 gen_helper_stq_raw(val, addr);
2146 break;
2147 case 0x6:
2148 /* Invalid */
2149 goto invalid_opc;
2150 case 0x7:
2151 /* Invalid */
2152 goto invalid_opc;
2153 case 0x8:
2154 /* Invalid */
2155 goto invalid_opc;
2156 case 0x9:
2157 /* Invalid */
2158 goto invalid_opc;
2159 case 0xA:
2160 /* Invalid */
2161 goto invalid_opc;
2162 case 0xB:
2163 /* Invalid */
2164 goto invalid_opc;
2165 case 0xC:
2166 /* Longword virtual access with alternate access mode */
2167 gen_helper_set_alt_mode();
2168 gen_helper_st_virt_to_phys(addr, addr);
2169 gen_helper_stl_raw(val, addr);
2170 gen_helper_restore_mode();
2171 break;
2172 case 0xD:
2173 /* Quadword virtual access with alternate access mode */
2174 gen_helper_set_alt_mode();
2175 gen_helper_st_virt_to_phys(addr, addr);
2176 gen_helper_stl_raw(val, addr);
2177 gen_helper_restore_mode();
2178 break;
2179 case 0xE:
2180 /* Invalid */
2181 goto invalid_opc;
2182 case 0xF:
2183 /* Invalid */
2184 goto invalid_opc;
2186 if (ra != 31)
2187 tcg_temp_free(val);
2188 tcg_temp_free(addr);
2190 ret = 2;
2191 break;
2192 #endif
2193 case 0x20:
2194 /* LDF */
2195 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2196 break;
2197 case 0x21:
2198 /* LDG */
2199 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2200 break;
2201 case 0x22:
2202 /* LDS */
2203 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2204 break;
2205 case 0x23:
2206 /* LDT */
2207 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2208 break;
2209 case 0x24:
2210 /* STF */
2211 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2212 break;
2213 case 0x25:
2214 /* STG */
2215 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2216 break;
2217 case 0x26:
2218 /* STS */
2219 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2220 break;
2221 case 0x27:
2222 /* STT */
2223 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2224 break;
2225 case 0x28:
2226 /* LDL */
2227 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2228 break;
2229 case 0x29:
2230 /* LDQ */
2231 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2232 break;
2233 case 0x2A:
2234 /* LDL_L */
2235 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2236 break;
2237 case 0x2B:
2238 /* LDQ_L */
2239 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2240 break;
2241 case 0x2C:
2242 /* STL */
2243 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2244 break;
2245 case 0x2D:
2246 /* STQ */
2247 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2248 break;
2249 case 0x2E:
2250 /* STL_C */
2251 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2252 break;
2253 case 0x2F:
2254 /* STQ_C */
2255 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2256 break;
2257 case 0x30:
2258 /* BR */
2259 if (ra != 31)
2260 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2261 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2262 ret = 1;
2263 break;
2264 case 0x31: /* FBEQ */
2265 case 0x32: /* FBLT */
2266 case 0x33: /* FBLE */
2267 gen_fbcond(ctx, opc, ra, disp16);
2268 ret = 1;
2269 break;
2270 case 0x34:
2271 /* BSR */
2272 if (ra != 31)
2273 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2274 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2275 ret = 1;
2276 break;
2277 case 0x35: /* FBNE */
2278 case 0x36: /* FBGE */
2279 case 0x37: /* FBGT */
2280 gen_fbcond(ctx, opc, ra, disp16);
2281 ret = 1;
2282 break;
2283 case 0x38:
2284 /* BLBC */
2285 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2286 ret = 1;
2287 break;
2288 case 0x39:
2289 /* BEQ */
2290 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2291 ret = 1;
2292 break;
2293 case 0x3A:
2294 /* BLT */
2295 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2296 ret = 1;
2297 break;
2298 case 0x3B:
2299 /* BLE */
2300 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2301 ret = 1;
2302 break;
2303 case 0x3C:
2304 /* BLBS */
2305 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2306 ret = 1;
2307 break;
2308 case 0x3D:
2309 /* BNE */
2310 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2311 ret = 1;
2312 break;
2313 case 0x3E:
2314 /* BGE */
2315 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2316 ret = 1;
2317 break;
2318 case 0x3F:
2319 /* BGT */
2320 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2321 ret = 1;
2322 break;
2323 invalid_opc:
2324 gen_invalid(ctx);
2325 ret = 3;
2326 break;
2329 return ret;
2332 static always_inline void gen_intermediate_code_internal (CPUState *env,
2333 TranslationBlock *tb,
2334 int search_pc)
2336 #if defined ALPHA_DEBUG_DISAS
2337 static int insn_count;
2338 #endif
2339 DisasContext ctx, *ctxp = &ctx;
2340 target_ulong pc_start;
2341 uint32_t insn;
2342 uint16_t *gen_opc_end;
2343 CPUBreakpoint *bp;
2344 int j, lj = -1;
2345 int ret;
2346 int num_insns;
2347 int max_insns;
2349 pc_start = tb->pc;
2350 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2351 ctx.pc = pc_start;
2352 ctx.amask = env->amask;
2353 #if defined (CONFIG_USER_ONLY)
2354 ctx.mem_idx = 0;
2355 #else
2356 ctx.mem_idx = ((env->ps >> 3) & 3);
2357 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2358 #endif
2359 num_insns = 0;
2360 max_insns = tb->cflags & CF_COUNT_MASK;
2361 if (max_insns == 0)
2362 max_insns = CF_COUNT_MASK;
2364 gen_icount_start();
2365 for (ret = 0; ret == 0;) {
2366 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
2367 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
2368 if (bp->pc == ctx.pc) {
2369 gen_excp(&ctx, EXCP_DEBUG, 0);
2370 break;
2374 if (search_pc) {
2375 j = gen_opc_ptr - gen_opc_buf;
2376 if (lj < j) {
2377 lj++;
2378 while (lj < j)
2379 gen_opc_instr_start[lj++] = 0;
2380 gen_opc_pc[lj] = ctx.pc;
2381 gen_opc_instr_start[lj] = 1;
2382 gen_opc_icount[lj] = num_insns;
2385 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2386 gen_io_start();
2387 #if defined ALPHA_DEBUG_DISAS
2388 insn_count++;
2389 if (logfile != NULL) {
2390 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2391 ctx.pc, ctx.mem_idx);
2393 #endif
2394 insn = ldl_code(ctx.pc);
2395 #if defined ALPHA_DEBUG_DISAS
2396 insn_count++;
2397 if (logfile != NULL) {
2398 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2400 #endif
2401 num_insns++;
2402 ctx.pc += 4;
2403 ret = translate_one(ctxp, insn);
2404 if (ret != 0)
2405 break;
2406 /* if we reach a page boundary or are single stepping, stop
2407 * generation
2409 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2410 num_insns >= max_insns) {
2411 break;
2414 if (env->singlestep_enabled) {
2415 gen_excp(&ctx, EXCP_DEBUG, 0);
2416 break;
2419 #if defined (DO_SINGLE_STEP)
2420 break;
2421 #endif
2423 if (ret != 1 && ret != 3) {
2424 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2426 #if defined (DO_TB_FLUSH)
2427 gen_helper_tb_flush();
2428 #endif
2429 if (tb->cflags & CF_LAST_IO)
2430 gen_io_end();
2431 /* Generate the return instruction */
2432 tcg_gen_exit_tb(0);
2433 gen_icount_end(tb, num_insns);
2434 *gen_opc_ptr = INDEX_op_end;
2435 if (search_pc) {
2436 j = gen_opc_ptr - gen_opc_buf;
2437 lj++;
2438 while (lj <= j)
2439 gen_opc_instr_start[lj++] = 0;
2440 } else {
2441 tb->size = ctx.pc - pc_start;
2442 tb->icount = num_insns;
2444 #if defined ALPHA_DEBUG_DISAS
2445 if (loglevel & CPU_LOG_TB_CPU) {
2446 cpu_dump_state(env, logfile, fprintf, 0);
2448 if (loglevel & CPU_LOG_TB_IN_ASM) {
2449 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2450 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2451 fprintf(logfile, "\n");
2453 #endif
2456 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2458 gen_intermediate_code_internal(env, tb, 0);
2461 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2463 gen_intermediate_code_internal(env, tb, 1);
2466 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2468 CPUAlphaState *env;
2469 uint64_t hwpcb;
2471 env = qemu_mallocz(sizeof(CPUAlphaState));
2472 if (!env)
2473 return NULL;
2474 cpu_exec_init(env);
2475 alpha_translate_init();
2476 tlb_flush(env, 1);
2477 /* XXX: should not be hardcoded */
2478 env->implver = IMPLVER_2106x;
2479 env->ps = 0x1F00;
2480 #if defined (CONFIG_USER_ONLY)
2481 env->ps |= 1 << 3;
2482 #endif
2483 pal_init(env);
2484 /* Initialize IPR */
2485 hwpcb = env->ipr[IPR_PCBB];
2486 env->ipr[IPR_ASN] = 0;
2487 env->ipr[IPR_ASTEN] = 0;
2488 env->ipr[IPR_ASTSR] = 0;
2489 env->ipr[IPR_DATFX] = 0;
2490 /* XXX: fix this */
2491 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495 env->ipr[IPR_FEN] = 0;
2496 env->ipr[IPR_IPL] = 31;
2497 env->ipr[IPR_MCES] = 0;
2498 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2499 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500 env->ipr[IPR_SISR] = 0;
2501 env->ipr[IPR_VIRBND] = -1ULL;
2503 return env;
2506 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2507 unsigned long searched_pc, int pc_pos, void *puc)
2509 env->pc = gen_opc_pc[pc_pos];