target-alpha: Remove bogus DO_TB_FLUSH code from translator.
[qemu/aliguori-queue.git] / target-alpha / translate.c
blobae1f5a836346172448d4c9c1e7d581b6387c6e15
1 /*
2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdint.h>
21 #include <stdlib.h>
22 #include <stdio.h>
24 #include "cpu.h"
25 #include "exec-all.h"
26 #include "disas.h"
27 #include "host-utils.h"
28 #include "tcg-op.h"
29 #include "qemu-common.h"
31 #include "helper.h"
32 #define GEN_HELPER 1
33 #include "helper.h"
35 #undef ALPHA_DEBUG_DISAS
37 #ifdef ALPHA_DEBUG_DISAS
38 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
39 #else
40 # define LOG_DISAS(...) do { } while (0)
41 #endif
43 typedef struct DisasContext DisasContext;
44 struct DisasContext {
45 uint64_t pc;
46 int mem_idx;
47 #if !defined (CONFIG_USER_ONLY)
48 int pal_mode;
49 #endif
50 CPUAlphaState *env;
51 uint32_t amask;
54 /* global register indexes */
55 static TCGv_ptr cpu_env;
56 static TCGv cpu_ir[31];
57 static TCGv cpu_fir[31];
58 static TCGv cpu_pc;
59 static TCGv cpu_lock;
61 /* register names */
62 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
64 #include "gen-icount.h"
66 static void alpha_translate_init(void)
68 int i;
69 char *p;
70 static int done_init = 0;
72 if (done_init)
73 return;
75 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
77 p = cpu_reg_names;
78 for (i = 0; i < 31; i++) {
79 sprintf(p, "ir%d", i);
80 cpu_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
81 offsetof(CPUState, ir[i]), p);
82 p += (i < 10) ? 4 : 5;
84 sprintf(p, "fir%d", i);
85 cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
86 offsetof(CPUState, fir[i]), p);
87 p += (i < 10) ? 5 : 6;
90 cpu_pc = tcg_global_mem_new_i64(TCG_AREG0,
91 offsetof(CPUState, pc), "pc");
93 cpu_lock = tcg_global_mem_new_i64(TCG_AREG0,
94 offsetof(CPUState, lock), "lock");
96 /* register helpers */
97 #define GEN_HELPER 2
98 #include "helper.h"
100 done_init = 1;
103 static inline void gen_excp(DisasContext *ctx, int exception, int error_code)
105 TCGv_i32 tmp1, tmp2;
107 tcg_gen_movi_i64(cpu_pc, ctx->pc);
108 tmp1 = tcg_const_i32(exception);
109 tmp2 = tcg_const_i32(error_code);
110 gen_helper_excp(tmp1, tmp2);
111 tcg_temp_free_i32(tmp2);
112 tcg_temp_free_i32(tmp1);
115 static inline void gen_invalid(DisasContext *ctx)
117 gen_excp(ctx, EXCP_OPCDEC, 0);
120 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
122 TCGv tmp = tcg_temp_new();
123 TCGv_i32 tmp32 = tcg_temp_new_i32();
124 tcg_gen_qemu_ld32u(tmp, t1, flags);
125 tcg_gen_trunc_i64_i32(tmp32, tmp);
126 gen_helper_memory_to_f(t0, tmp32);
127 tcg_temp_free_i32(tmp32);
128 tcg_temp_free(tmp);
131 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
133 TCGv tmp = tcg_temp_new();
134 tcg_gen_qemu_ld64(tmp, t1, flags);
135 gen_helper_memory_to_g(t0, tmp);
136 tcg_temp_free(tmp);
139 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
141 TCGv tmp = tcg_temp_new();
142 TCGv_i32 tmp32 = tcg_temp_new_i32();
143 tcg_gen_qemu_ld32u(tmp, t1, flags);
144 tcg_gen_trunc_i64_i32(tmp32, tmp);
145 gen_helper_memory_to_s(t0, tmp32);
146 tcg_temp_free_i32(tmp32);
147 tcg_temp_free(tmp);
150 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
152 tcg_gen_mov_i64(cpu_lock, t1);
153 tcg_gen_qemu_ld32s(t0, t1, flags);
156 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
158 tcg_gen_mov_i64(cpu_lock, t1);
159 tcg_gen_qemu_ld64(t0, t1, flags);
162 static inline void gen_load_mem(DisasContext *ctx,
163 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
164 int flags),
165 int ra, int rb, int32_t disp16, int fp,
166 int clear)
168 TCGv addr;
170 if (unlikely(ra == 31))
171 return;
173 addr = tcg_temp_new();
174 if (rb != 31) {
175 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
176 if (clear)
177 tcg_gen_andi_i64(addr, addr, ~0x7);
178 } else {
179 if (clear)
180 disp16 &= ~0x7;
181 tcg_gen_movi_i64(addr, disp16);
183 if (fp)
184 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
185 else
186 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
187 tcg_temp_free(addr);
190 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
192 TCGv_i32 tmp32 = tcg_temp_new_i32();
193 TCGv tmp = tcg_temp_new();
194 gen_helper_f_to_memory(tmp32, t0);
195 tcg_gen_extu_i32_i64(tmp, tmp32);
196 tcg_gen_qemu_st32(tmp, t1, flags);
197 tcg_temp_free(tmp);
198 tcg_temp_free_i32(tmp32);
201 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
203 TCGv tmp = tcg_temp_new();
204 gen_helper_g_to_memory(tmp, t0);
205 tcg_gen_qemu_st64(tmp, t1, flags);
206 tcg_temp_free(tmp);
209 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
211 TCGv_i32 tmp32 = tcg_temp_new_i32();
212 TCGv tmp = tcg_temp_new();
213 gen_helper_s_to_memory(tmp32, t0);
214 tcg_gen_extu_i32_i64(tmp, tmp32);
215 tcg_gen_qemu_st32(tmp, t1, flags);
216 tcg_temp_free(tmp);
217 tcg_temp_free_i32(tmp32);
220 static inline void gen_qemu_stl_c(TCGv t0, TCGv t1, int flags)
222 int l1, l2;
224 l1 = gen_new_label();
225 l2 = gen_new_label();
226 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
227 tcg_gen_qemu_st32(t0, t1, flags);
228 tcg_gen_movi_i64(t0, 1);
229 tcg_gen_br(l2);
230 gen_set_label(l1);
231 tcg_gen_movi_i64(t0, 0);
232 gen_set_label(l2);
233 tcg_gen_movi_i64(cpu_lock, -1);
236 static inline void gen_qemu_stq_c(TCGv t0, TCGv t1, int flags)
238 int l1, l2;
240 l1 = gen_new_label();
241 l2 = gen_new_label();
242 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
243 tcg_gen_qemu_st64(t0, t1, flags);
244 tcg_gen_movi_i64(t0, 1);
245 tcg_gen_br(l2);
246 gen_set_label(l1);
247 tcg_gen_movi_i64(t0, 0);
248 gen_set_label(l2);
249 tcg_gen_movi_i64(cpu_lock, -1);
252 static inline void gen_store_mem(DisasContext *ctx,
253 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
254 int flags),
255 int ra, int rb, int32_t disp16, int fp,
256 int clear, int local)
258 TCGv addr;
259 if (local)
260 addr = tcg_temp_local_new();
261 else
262 addr = tcg_temp_new();
263 if (rb != 31) {
264 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
265 if (clear)
266 tcg_gen_andi_i64(addr, addr, ~0x7);
267 } else {
268 if (clear)
269 disp16 &= ~0x7;
270 tcg_gen_movi_i64(addr, disp16);
272 if (ra != 31) {
273 if (fp)
274 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
275 else
276 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
277 } else {
278 TCGv zero;
279 if (local)
280 zero = tcg_const_local_i64(0);
281 else
282 zero = tcg_const_i64(0);
283 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
284 tcg_temp_free(zero);
286 tcg_temp_free(addr);
289 static inline void gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
290 int32_t disp, int mask)
292 int l1, l2;
294 l1 = gen_new_label();
295 l2 = gen_new_label();
296 if (likely(ra != 31)) {
297 if (mask) {
298 TCGv tmp = tcg_temp_new();
299 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
300 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
301 tcg_temp_free(tmp);
302 } else
303 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
304 } else {
305 /* Very uncommon case - Do not bother to optimize. */
306 TCGv tmp = tcg_const_i64(0);
307 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
308 tcg_temp_free(tmp);
310 tcg_gen_movi_i64(cpu_pc, ctx->pc);
311 tcg_gen_br(l2);
312 gen_set_label(l1);
313 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp << 2));
314 gen_set_label(l2);
317 static inline void gen_fbcond(DisasContext *ctx, int opc, int ra,
318 int32_t disp16)
320 int l1, l2;
321 TCGv tmp;
322 TCGv src;
324 l1 = gen_new_label();
325 l2 = gen_new_label();
326 if (ra != 31) {
327 tmp = tcg_temp_new();
328 src = cpu_fir[ra];
329 } else {
330 tmp = tcg_const_i64(0);
331 src = tmp;
333 switch (opc) {
334 case 0x31: /* FBEQ */
335 gen_helper_cmpfeq(tmp, src);
336 break;
337 case 0x32: /* FBLT */
338 gen_helper_cmpflt(tmp, src);
339 break;
340 case 0x33: /* FBLE */
341 gen_helper_cmpfle(tmp, src);
342 break;
343 case 0x35: /* FBNE */
344 gen_helper_cmpfne(tmp, src);
345 break;
346 case 0x36: /* FBGE */
347 gen_helper_cmpfge(tmp, src);
348 break;
349 case 0x37: /* FBGT */
350 gen_helper_cmpfgt(tmp, src);
351 break;
352 default:
353 abort();
355 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
356 tcg_gen_movi_i64(cpu_pc, ctx->pc);
357 tcg_gen_br(l2);
358 gen_set_label(l1);
359 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
360 gen_set_label(l2);
363 static inline void gen_cmov(TCGCond inv_cond, int ra, int rb, int rc,
364 int islit, uint8_t lit, int mask)
366 int l1;
368 if (unlikely(rc == 31))
369 return;
371 l1 = gen_new_label();
373 if (ra != 31) {
374 if (mask) {
375 TCGv tmp = tcg_temp_new();
376 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
377 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
378 tcg_temp_free(tmp);
379 } else
380 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
381 } else {
382 /* Very uncommon case - Do not bother to optimize. */
383 TCGv tmp = tcg_const_i64(0);
384 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
385 tcg_temp_free(tmp);
388 if (islit)
389 tcg_gen_movi_i64(cpu_ir[rc], lit);
390 else
391 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
392 gen_set_label(l1);
395 #define FARITH2(name) \
396 static inline void glue(gen_f, name)(int rb, int rc) \
398 if (unlikely(rc == 31)) \
399 return; \
401 if (rb != 31) \
402 gen_helper_ ## name (cpu_fir[rc], cpu_fir[rb]); \
403 else { \
404 TCGv tmp = tcg_const_i64(0); \
405 gen_helper_ ## name (cpu_fir[rc], tmp); \
406 tcg_temp_free(tmp); \
409 FARITH2(sqrts)
410 FARITH2(sqrtf)
411 FARITH2(sqrtg)
412 FARITH2(sqrtt)
413 FARITH2(cvtgf)
414 FARITH2(cvtgq)
415 FARITH2(cvtqf)
416 FARITH2(cvtqg)
417 FARITH2(cvtst)
418 FARITH2(cvtts)
419 FARITH2(cvttq)
420 FARITH2(cvtqs)
421 FARITH2(cvtqt)
422 FARITH2(cvtlq)
423 FARITH2(cvtql)
424 FARITH2(cvtqlv)
425 FARITH2(cvtqlsv)
427 #define FARITH3(name) \
428 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
430 if (unlikely(rc == 31)) \
431 return; \
433 if (ra != 31) { \
434 if (rb != 31) \
435 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]); \
436 else { \
437 TCGv tmp = tcg_const_i64(0); \
438 gen_helper_ ## name (cpu_fir[rc], cpu_fir[ra], tmp); \
439 tcg_temp_free(tmp); \
441 } else { \
442 TCGv tmp = tcg_const_i64(0); \
443 if (rb != 31) \
444 gen_helper_ ## name (cpu_fir[rc], tmp, cpu_fir[rb]); \
445 else \
446 gen_helper_ ## name (cpu_fir[rc], tmp, tmp); \
447 tcg_temp_free(tmp); \
451 FARITH3(addf)
452 FARITH3(subf)
453 FARITH3(mulf)
454 FARITH3(divf)
455 FARITH3(addg)
456 FARITH3(subg)
457 FARITH3(mulg)
458 FARITH3(divg)
459 FARITH3(cmpgeq)
460 FARITH3(cmpglt)
461 FARITH3(cmpgle)
462 FARITH3(adds)
463 FARITH3(subs)
464 FARITH3(muls)
465 FARITH3(divs)
466 FARITH3(addt)
467 FARITH3(subt)
468 FARITH3(mult)
469 FARITH3(divt)
470 FARITH3(cmptun)
471 FARITH3(cmpteq)
472 FARITH3(cmptlt)
473 FARITH3(cmptle)
474 FARITH3(cpys)
475 FARITH3(cpysn)
476 FARITH3(cpyse)
478 #define FCMOV(name) \
479 static inline void glue(gen_f, name)(int ra, int rb, int rc) \
481 int l1; \
482 TCGv tmp; \
484 if (unlikely(rc == 31)) \
485 return; \
487 l1 = gen_new_label(); \
488 tmp = tcg_temp_new(); \
489 if (ra != 31) { \
490 tmp = tcg_temp_new(); \
491 gen_helper_ ## name (tmp, cpu_fir[ra]); \
492 } else { \
493 tmp = tcg_const_i64(0); \
494 gen_helper_ ## name (tmp, tmp); \
496 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1); \
497 if (rb != 31) \
498 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]); \
499 else \
500 tcg_gen_movi_i64(cpu_fir[rc], 0); \
501 gen_set_label(l1); \
503 FCMOV(cmpfeq)
504 FCMOV(cmpfne)
505 FCMOV(cmpflt)
506 FCMOV(cmpfge)
507 FCMOV(cmpfle)
508 FCMOV(cmpfgt)
510 /* EXTWH, EXTWH, EXTLH, EXTQH */
511 static inline void gen_ext_h(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
512 int ra, int rb, int rc, int islit, uint8_t lit)
514 if (unlikely(rc == 31))
515 return;
517 if (ra != 31) {
518 if (islit) {
519 if (lit != 0)
520 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
521 else
522 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
523 } else {
524 TCGv tmp1;
525 tmp1 = tcg_temp_new();
527 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
528 tcg_gen_shli_i64(tmp1, tmp1, 3);
529 tcg_gen_neg_i64(tmp1, tmp1);
530 tcg_gen_andi_i64(tmp1, tmp1, 0x3f);
531 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
533 tcg_temp_free(tmp1);
535 if (tcg_gen_ext_i64)
536 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
537 } else
538 tcg_gen_movi_i64(cpu_ir[rc], 0);
541 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
542 static inline void gen_ext_l(void(*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
543 int ra, int rb, int rc, int islit, uint8_t lit)
545 if (unlikely(rc == 31))
546 return;
548 if (ra != 31) {
549 if (islit) {
550 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
551 } else {
552 TCGv tmp = tcg_temp_new();
553 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
554 tcg_gen_shli_i64(tmp, tmp, 3);
555 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
556 tcg_temp_free(tmp);
558 if (tcg_gen_ext_i64)
559 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
560 } else
561 tcg_gen_movi_i64(cpu_ir[rc], 0);
564 /* Code to call arith3 helpers */
565 #define ARITH3(name) \
566 static inline void glue(gen_, name)(int ra, int rb, int rc, int islit,\
567 uint8_t lit) \
569 if (unlikely(rc == 31)) \
570 return; \
572 if (ra != 31) { \
573 if (islit) { \
574 TCGv tmp = tcg_const_i64(lit); \
575 gen_helper_ ## name(cpu_ir[rc], cpu_ir[ra], tmp); \
576 tcg_temp_free(tmp); \
577 } else \
578 gen_helper_ ## name (cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]); \
579 } else { \
580 TCGv tmp1 = tcg_const_i64(0); \
581 if (islit) { \
582 TCGv tmp2 = tcg_const_i64(lit); \
583 gen_helper_ ## name (cpu_ir[rc], tmp1, tmp2); \
584 tcg_temp_free(tmp2); \
585 } else \
586 gen_helper_ ## name (cpu_ir[rc], tmp1, cpu_ir[rb]); \
587 tcg_temp_free(tmp1); \
590 ARITH3(cmpbge)
591 ARITH3(addlv)
592 ARITH3(sublv)
593 ARITH3(addqv)
594 ARITH3(subqv)
595 ARITH3(mskbl)
596 ARITH3(insbl)
597 ARITH3(mskwl)
598 ARITH3(inswl)
599 ARITH3(mskll)
600 ARITH3(insll)
601 ARITH3(zap)
602 ARITH3(zapnot)
603 ARITH3(mskql)
604 ARITH3(insql)
605 ARITH3(mskwh)
606 ARITH3(inswh)
607 ARITH3(msklh)
608 ARITH3(inslh)
609 ARITH3(mskqh)
610 ARITH3(insqh)
611 ARITH3(umulh)
612 ARITH3(mullv)
613 ARITH3(mulqv)
615 static inline void gen_cmp(TCGCond cond, int ra, int rb, int rc, int islit,
616 uint8_t lit)
618 int l1, l2;
619 TCGv tmp;
621 if (unlikely(rc == 31))
622 return;
624 l1 = gen_new_label();
625 l2 = gen_new_label();
627 if (ra != 31) {
628 tmp = tcg_temp_new();
629 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
630 } else
631 tmp = tcg_const_i64(0);
632 if (islit)
633 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
634 else
635 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
637 tcg_gen_movi_i64(cpu_ir[rc], 0);
638 tcg_gen_br(l2);
639 gen_set_label(l1);
640 tcg_gen_movi_i64(cpu_ir[rc], 1);
641 gen_set_label(l2);
644 static inline int translate_one(DisasContext *ctx, uint32_t insn)
646 uint32_t palcode;
647 int32_t disp21, disp16, disp12;
648 uint16_t fn11, fn16;
649 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
650 uint8_t lit;
651 int ret;
653 /* Decode all instruction fields */
654 opc = insn >> 26;
655 ra = (insn >> 21) & 0x1F;
656 rb = (insn >> 16) & 0x1F;
657 rc = insn & 0x1F;
658 sbz = (insn >> 13) & 0x07;
659 islit = (insn >> 12) & 1;
660 if (rb == 31 && !islit) {
661 islit = 1;
662 lit = 0;
663 } else
664 lit = (insn >> 13) & 0xFF;
665 palcode = insn & 0x03FFFFFF;
666 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
667 disp16 = (int16_t)(insn & 0x0000FFFF);
668 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
669 fn16 = insn & 0x0000FFFF;
670 fn11 = (insn >> 5) & 0x000007FF;
671 fpfn = fn11 & 0x3F;
672 fn7 = (insn >> 5) & 0x0000007F;
673 fn2 = (insn >> 5) & 0x00000003;
674 ret = 0;
675 LOG_DISAS("opc %02x ra %d rb %d rc %d disp16 %04x\n",
676 opc, ra, rb, rc, disp16);
677 switch (opc) {
678 case 0x00:
679 /* CALL_PAL */
680 if (palcode >= 0x80 && palcode < 0xC0) {
681 /* Unprivileged PAL call */
682 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x3F) << 6), 0);
683 #if !defined (CONFIG_USER_ONLY)
684 } else if (palcode < 0x40) {
685 /* Privileged PAL code */
686 if (ctx->mem_idx & 1)
687 goto invalid_opc;
688 else
689 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x3F) << 6), 0);
690 #endif
691 } else {
692 /* Invalid PAL call */
693 goto invalid_opc;
695 ret = 3;
696 break;
697 case 0x01:
698 /* OPC01 */
699 goto invalid_opc;
700 case 0x02:
701 /* OPC02 */
702 goto invalid_opc;
703 case 0x03:
704 /* OPC03 */
705 goto invalid_opc;
706 case 0x04:
707 /* OPC04 */
708 goto invalid_opc;
709 case 0x05:
710 /* OPC05 */
711 goto invalid_opc;
712 case 0x06:
713 /* OPC06 */
714 goto invalid_opc;
715 case 0x07:
716 /* OPC07 */
717 goto invalid_opc;
718 case 0x08:
719 /* LDA */
720 if (likely(ra != 31)) {
721 if (rb != 31)
722 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
723 else
724 tcg_gen_movi_i64(cpu_ir[ra], disp16);
726 break;
727 case 0x09:
728 /* LDAH */
729 if (likely(ra != 31)) {
730 if (rb != 31)
731 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
732 else
733 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
735 break;
736 case 0x0A:
737 /* LDBU */
738 if (!(ctx->amask & AMASK_BWX))
739 goto invalid_opc;
740 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
741 break;
742 case 0x0B:
743 /* LDQ_U */
744 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
745 break;
746 case 0x0C:
747 /* LDWU */
748 if (!(ctx->amask & AMASK_BWX))
749 goto invalid_opc;
750 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
751 break;
752 case 0x0D:
753 /* STW */
754 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
755 break;
756 case 0x0E:
757 /* STB */
758 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
759 break;
760 case 0x0F:
761 /* STQ_U */
762 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
763 break;
764 case 0x10:
765 switch (fn7) {
766 case 0x00:
767 /* ADDL */
768 if (likely(rc != 31)) {
769 if (ra != 31) {
770 if (islit) {
771 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
772 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
773 } else {
774 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
775 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
777 } else {
778 if (islit)
779 tcg_gen_movi_i64(cpu_ir[rc], lit);
780 else
781 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
784 break;
785 case 0x02:
786 /* S4ADDL */
787 if (likely(rc != 31)) {
788 if (ra != 31) {
789 TCGv tmp = tcg_temp_new();
790 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
791 if (islit)
792 tcg_gen_addi_i64(tmp, tmp, lit);
793 else
794 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
795 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
796 tcg_temp_free(tmp);
797 } else {
798 if (islit)
799 tcg_gen_movi_i64(cpu_ir[rc], lit);
800 else
801 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
804 break;
805 case 0x09:
806 /* SUBL */
807 if (likely(rc != 31)) {
808 if (ra != 31) {
809 if (islit)
810 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
811 else
812 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
813 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
814 } else {
815 if (islit)
816 tcg_gen_movi_i64(cpu_ir[rc], -lit);
817 else {
818 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
819 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
822 break;
823 case 0x0B:
824 /* S4SUBL */
825 if (likely(rc != 31)) {
826 if (ra != 31) {
827 TCGv tmp = tcg_temp_new();
828 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
829 if (islit)
830 tcg_gen_subi_i64(tmp, tmp, lit);
831 else
832 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
833 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
834 tcg_temp_free(tmp);
835 } else {
836 if (islit)
837 tcg_gen_movi_i64(cpu_ir[rc], -lit);
838 else {
839 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
840 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
844 break;
845 case 0x0F:
846 /* CMPBGE */
847 gen_cmpbge(ra, rb, rc, islit, lit);
848 break;
849 case 0x12:
850 /* S8ADDL */
851 if (likely(rc != 31)) {
852 if (ra != 31) {
853 TCGv tmp = tcg_temp_new();
854 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
855 if (islit)
856 tcg_gen_addi_i64(tmp, tmp, lit);
857 else
858 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
859 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
860 tcg_temp_free(tmp);
861 } else {
862 if (islit)
863 tcg_gen_movi_i64(cpu_ir[rc], lit);
864 else
865 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
868 break;
869 case 0x1B:
870 /* S8SUBL */
871 if (likely(rc != 31)) {
872 if (ra != 31) {
873 TCGv tmp = tcg_temp_new();
874 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
875 if (islit)
876 tcg_gen_subi_i64(tmp, tmp, lit);
877 else
878 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
879 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
880 tcg_temp_free(tmp);
881 } else {
882 if (islit)
883 tcg_gen_movi_i64(cpu_ir[rc], -lit);
884 else
885 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
886 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
890 break;
891 case 0x1D:
892 /* CMPULT */
893 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
894 break;
895 case 0x20:
896 /* ADDQ */
897 if (likely(rc != 31)) {
898 if (ra != 31) {
899 if (islit)
900 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
901 else
902 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
903 } else {
904 if (islit)
905 tcg_gen_movi_i64(cpu_ir[rc], lit);
906 else
907 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
910 break;
911 case 0x22:
912 /* S4ADDQ */
913 if (likely(rc != 31)) {
914 if (ra != 31) {
915 TCGv tmp = tcg_temp_new();
916 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
917 if (islit)
918 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
919 else
920 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
921 tcg_temp_free(tmp);
922 } else {
923 if (islit)
924 tcg_gen_movi_i64(cpu_ir[rc], lit);
925 else
926 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
929 break;
930 case 0x29:
931 /* SUBQ */
932 if (likely(rc != 31)) {
933 if (ra != 31) {
934 if (islit)
935 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
936 else
937 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
938 } else {
939 if (islit)
940 tcg_gen_movi_i64(cpu_ir[rc], -lit);
941 else
942 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
945 break;
946 case 0x2B:
947 /* S4SUBQ */
948 if (likely(rc != 31)) {
949 if (ra != 31) {
950 TCGv tmp = tcg_temp_new();
951 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
952 if (islit)
953 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
954 else
955 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
956 tcg_temp_free(tmp);
957 } else {
958 if (islit)
959 tcg_gen_movi_i64(cpu_ir[rc], -lit);
960 else
961 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
964 break;
965 case 0x2D:
966 /* CMPEQ */
967 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
968 break;
969 case 0x32:
970 /* S8ADDQ */
971 if (likely(rc != 31)) {
972 if (ra != 31) {
973 TCGv tmp = tcg_temp_new();
974 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
975 if (islit)
976 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
977 else
978 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
979 tcg_temp_free(tmp);
980 } else {
981 if (islit)
982 tcg_gen_movi_i64(cpu_ir[rc], lit);
983 else
984 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
987 break;
988 case 0x3B:
989 /* S8SUBQ */
990 if (likely(rc != 31)) {
991 if (ra != 31) {
992 TCGv tmp = tcg_temp_new();
993 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
994 if (islit)
995 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
996 else
997 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
998 tcg_temp_free(tmp);
999 } else {
1000 if (islit)
1001 tcg_gen_movi_i64(cpu_ir[rc], -lit);
1002 else
1003 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
1006 break;
1007 case 0x3D:
1008 /* CMPULE */
1009 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
1010 break;
1011 case 0x40:
1012 /* ADDL/V */
1013 gen_addlv(ra, rb, rc, islit, lit);
1014 break;
1015 case 0x49:
1016 /* SUBL/V */
1017 gen_sublv(ra, rb, rc, islit, lit);
1018 break;
1019 case 0x4D:
1020 /* CMPLT */
1021 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
1022 break;
1023 case 0x60:
1024 /* ADDQ/V */
1025 gen_addqv(ra, rb, rc, islit, lit);
1026 break;
1027 case 0x69:
1028 /* SUBQ/V */
1029 gen_subqv(ra, rb, rc, islit, lit);
1030 break;
1031 case 0x6D:
1032 /* CMPLE */
1033 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1034 break;
1035 default:
1036 goto invalid_opc;
1038 break;
1039 case 0x11:
1040 switch (fn7) {
1041 case 0x00:
1042 /* AND */
1043 if (likely(rc != 31)) {
1044 if (ra == 31)
1045 tcg_gen_movi_i64(cpu_ir[rc], 0);
1046 else if (islit)
1047 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1048 else
1049 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1051 break;
1052 case 0x08:
1053 /* BIC */
1054 if (likely(rc != 31)) {
1055 if (ra != 31) {
1056 if (islit)
1057 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1058 else
1059 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1060 } else
1061 tcg_gen_movi_i64(cpu_ir[rc], 0);
1063 break;
1064 case 0x14:
1065 /* CMOVLBS */
1066 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1067 break;
1068 case 0x16:
1069 /* CMOVLBC */
1070 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1071 break;
1072 case 0x20:
1073 /* BIS */
1074 if (likely(rc != 31)) {
1075 if (ra != 31) {
1076 if (islit)
1077 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1078 else
1079 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1080 } else {
1081 if (islit)
1082 tcg_gen_movi_i64(cpu_ir[rc], lit);
1083 else
1084 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1087 break;
1088 case 0x24:
1089 /* CMOVEQ */
1090 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1091 break;
1092 case 0x26:
1093 /* CMOVNE */
1094 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1095 break;
1096 case 0x28:
1097 /* ORNOT */
1098 if (likely(rc != 31)) {
1099 if (ra != 31) {
1100 if (islit)
1101 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1102 else
1103 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1104 } else {
1105 if (islit)
1106 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1107 else
1108 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1111 break;
1112 case 0x40:
1113 /* XOR */
1114 if (likely(rc != 31)) {
1115 if (ra != 31) {
1116 if (islit)
1117 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1118 else
1119 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1120 } else {
1121 if (islit)
1122 tcg_gen_movi_i64(cpu_ir[rc], lit);
1123 else
1124 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1127 break;
1128 case 0x44:
1129 /* CMOVLT */
1130 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1131 break;
1132 case 0x46:
1133 /* CMOVGE */
1134 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1135 break;
1136 case 0x48:
1137 /* EQV */
1138 if (likely(rc != 31)) {
1139 if (ra != 31) {
1140 if (islit)
1141 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1142 else
1143 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1144 } else {
1145 if (islit)
1146 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1147 else
1148 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1151 break;
1152 case 0x61:
1153 /* AMASK */
1154 if (likely(rc != 31)) {
1155 if (islit)
1156 tcg_gen_movi_i64(cpu_ir[rc], lit);
1157 else
1158 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1159 switch (ctx->env->implver) {
1160 case IMPLVER_2106x:
1161 /* EV4, EV45, LCA, LCA45 & EV5 */
1162 break;
1163 case IMPLVER_21164:
1164 case IMPLVER_21264:
1165 case IMPLVER_21364:
1166 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[rc],
1167 ~(uint64_t)ctx->amask);
1168 break;
1171 break;
1172 case 0x64:
1173 /* CMOVLE */
1174 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1175 break;
1176 case 0x66:
1177 /* CMOVGT */
1178 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1179 break;
1180 case 0x6C:
1181 /* IMPLVER */
1182 if (rc != 31)
1183 tcg_gen_movi_i64(cpu_ir[rc], ctx->env->implver);
1184 break;
1185 default:
1186 goto invalid_opc;
1188 break;
1189 case 0x12:
1190 switch (fn7) {
1191 case 0x02:
1192 /* MSKBL */
1193 gen_mskbl(ra, rb, rc, islit, lit);
1194 break;
1195 case 0x06:
1196 /* EXTBL */
1197 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1198 break;
1199 case 0x0B:
1200 /* INSBL */
1201 gen_insbl(ra, rb, rc, islit, lit);
1202 break;
1203 case 0x12:
1204 /* MSKWL */
1205 gen_mskwl(ra, rb, rc, islit, lit);
1206 break;
1207 case 0x16:
1208 /* EXTWL */
1209 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1210 break;
1211 case 0x1B:
1212 /* INSWL */
1213 gen_inswl(ra, rb, rc, islit, lit);
1214 break;
1215 case 0x22:
1216 /* MSKLL */
1217 gen_mskll(ra, rb, rc, islit, lit);
1218 break;
1219 case 0x26:
1220 /* EXTLL */
1221 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1222 break;
1223 case 0x2B:
1224 /* INSLL */
1225 gen_insll(ra, rb, rc, islit, lit);
1226 break;
1227 case 0x30:
1228 /* ZAP */
1229 gen_zap(ra, rb, rc, islit, lit);
1230 break;
1231 case 0x31:
1232 /* ZAPNOT */
1233 gen_zapnot(ra, rb, rc, islit, lit);
1234 break;
1235 case 0x32:
1236 /* MSKQL */
1237 gen_mskql(ra, rb, rc, islit, lit);
1238 break;
1239 case 0x34:
1240 /* SRL */
1241 if (likely(rc != 31)) {
1242 if (ra != 31) {
1243 if (islit)
1244 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1245 else {
1246 TCGv shift = tcg_temp_new();
1247 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1248 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1249 tcg_temp_free(shift);
1251 } else
1252 tcg_gen_movi_i64(cpu_ir[rc], 0);
1254 break;
1255 case 0x36:
1256 /* EXTQL */
1257 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1258 break;
1259 case 0x39:
1260 /* SLL */
1261 if (likely(rc != 31)) {
1262 if (ra != 31) {
1263 if (islit)
1264 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1265 else {
1266 TCGv shift = tcg_temp_new();
1267 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1268 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1269 tcg_temp_free(shift);
1271 } else
1272 tcg_gen_movi_i64(cpu_ir[rc], 0);
1274 break;
1275 case 0x3B:
1276 /* INSQL */
1277 gen_insql(ra, rb, rc, islit, lit);
1278 break;
1279 case 0x3C:
1280 /* SRA */
1281 if (likely(rc != 31)) {
1282 if (ra != 31) {
1283 if (islit)
1284 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1285 else {
1286 TCGv shift = tcg_temp_new();
1287 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1288 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1289 tcg_temp_free(shift);
1291 } else
1292 tcg_gen_movi_i64(cpu_ir[rc], 0);
1294 break;
1295 case 0x52:
1296 /* MSKWH */
1297 gen_mskwh(ra, rb, rc, islit, lit);
1298 break;
1299 case 0x57:
1300 /* INSWH */
1301 gen_inswh(ra, rb, rc, islit, lit);
1302 break;
1303 case 0x5A:
1304 /* EXTWH */
1305 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1306 break;
1307 case 0x62:
1308 /* MSKLH */
1309 gen_msklh(ra, rb, rc, islit, lit);
1310 break;
1311 case 0x67:
1312 /* INSLH */
1313 gen_inslh(ra, rb, rc, islit, lit);
1314 break;
1315 case 0x6A:
1316 /* EXTLH */
1317 gen_ext_h(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1318 break;
1319 case 0x72:
1320 /* MSKQH */
1321 gen_mskqh(ra, rb, rc, islit, lit);
1322 break;
1323 case 0x77:
1324 /* INSQH */
1325 gen_insqh(ra, rb, rc, islit, lit);
1326 break;
1327 case 0x7A:
1328 /* EXTQH */
1329 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1330 break;
1331 default:
1332 goto invalid_opc;
1334 break;
1335 case 0x13:
1336 switch (fn7) {
1337 case 0x00:
1338 /* MULL */
1339 if (likely(rc != 31)) {
1340 if (ra == 31)
1341 tcg_gen_movi_i64(cpu_ir[rc], 0);
1342 else {
1343 if (islit)
1344 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1345 else
1346 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1347 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1350 break;
1351 case 0x20:
1352 /* MULQ */
1353 if (likely(rc != 31)) {
1354 if (ra == 31)
1355 tcg_gen_movi_i64(cpu_ir[rc], 0);
1356 else if (islit)
1357 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1358 else
1359 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1361 break;
1362 case 0x30:
1363 /* UMULH */
1364 gen_umulh(ra, rb, rc, islit, lit);
1365 break;
1366 case 0x40:
1367 /* MULL/V */
1368 gen_mullv(ra, rb, rc, islit, lit);
1369 break;
1370 case 0x60:
1371 /* MULQ/V */
1372 gen_mulqv(ra, rb, rc, islit, lit);
1373 break;
1374 default:
1375 goto invalid_opc;
1377 break;
1378 case 0x14:
1379 switch (fpfn) { /* f11 & 0x3F */
1380 case 0x04:
1381 /* ITOFS */
1382 if (!(ctx->amask & AMASK_FIX))
1383 goto invalid_opc;
1384 if (likely(rc != 31)) {
1385 if (ra != 31) {
1386 TCGv_i32 tmp = tcg_temp_new_i32();
1387 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1388 gen_helper_memory_to_s(cpu_fir[rc], tmp);
1389 tcg_temp_free_i32(tmp);
1390 } else
1391 tcg_gen_movi_i64(cpu_fir[rc], 0);
1393 break;
1394 case 0x0A:
1395 /* SQRTF */
1396 if (!(ctx->amask & AMASK_FIX))
1397 goto invalid_opc;
1398 gen_fsqrtf(rb, rc);
1399 break;
1400 case 0x0B:
1401 /* SQRTS */
1402 if (!(ctx->amask & AMASK_FIX))
1403 goto invalid_opc;
1404 gen_fsqrts(rb, rc);
1405 break;
1406 case 0x14:
1407 /* ITOFF */
1408 if (!(ctx->amask & AMASK_FIX))
1409 goto invalid_opc;
1410 if (likely(rc != 31)) {
1411 if (ra != 31) {
1412 TCGv_i32 tmp = tcg_temp_new_i32();
1413 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1414 gen_helper_memory_to_f(cpu_fir[rc], tmp);
1415 tcg_temp_free_i32(tmp);
1416 } else
1417 tcg_gen_movi_i64(cpu_fir[rc], 0);
1419 break;
1420 case 0x24:
1421 /* ITOFT */
1422 if (!(ctx->amask & AMASK_FIX))
1423 goto invalid_opc;
1424 if (likely(rc != 31)) {
1425 if (ra != 31)
1426 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1427 else
1428 tcg_gen_movi_i64(cpu_fir[rc], 0);
1430 break;
1431 case 0x2A:
1432 /* SQRTG */
1433 if (!(ctx->amask & AMASK_FIX))
1434 goto invalid_opc;
1435 gen_fsqrtg(rb, rc);
1436 break;
1437 case 0x02B:
1438 /* SQRTT */
1439 if (!(ctx->amask & AMASK_FIX))
1440 goto invalid_opc;
1441 gen_fsqrtt(rb, rc);
1442 break;
1443 default:
1444 goto invalid_opc;
1446 break;
1447 case 0x15:
1448 /* VAX floating point */
1449 /* XXX: rounding mode and trap are ignored (!) */
1450 switch (fpfn) { /* f11 & 0x3F */
1451 case 0x00:
1452 /* ADDF */
1453 gen_faddf(ra, rb, rc);
1454 break;
1455 case 0x01:
1456 /* SUBF */
1457 gen_fsubf(ra, rb, rc);
1458 break;
1459 case 0x02:
1460 /* MULF */
1461 gen_fmulf(ra, rb, rc);
1462 break;
1463 case 0x03:
1464 /* DIVF */
1465 gen_fdivf(ra, rb, rc);
1466 break;
1467 case 0x1E:
1468 /* CVTDG */
1469 #if 0 // TODO
1470 gen_fcvtdg(rb, rc);
1471 #else
1472 goto invalid_opc;
1473 #endif
1474 break;
1475 case 0x20:
1476 /* ADDG */
1477 gen_faddg(ra, rb, rc);
1478 break;
1479 case 0x21:
1480 /* SUBG */
1481 gen_fsubg(ra, rb, rc);
1482 break;
1483 case 0x22:
1484 /* MULG */
1485 gen_fmulg(ra, rb, rc);
1486 break;
1487 case 0x23:
1488 /* DIVG */
1489 gen_fdivg(ra, rb, rc);
1490 break;
1491 case 0x25:
1492 /* CMPGEQ */
1493 gen_fcmpgeq(ra, rb, rc);
1494 break;
1495 case 0x26:
1496 /* CMPGLT */
1497 gen_fcmpglt(ra, rb, rc);
1498 break;
1499 case 0x27:
1500 /* CMPGLE */
1501 gen_fcmpgle(ra, rb, rc);
1502 break;
1503 case 0x2C:
1504 /* CVTGF */
1505 gen_fcvtgf(rb, rc);
1506 break;
1507 case 0x2D:
1508 /* CVTGD */
1509 #if 0 // TODO
1510 gen_fcvtgd(rb, rc);
1511 #else
1512 goto invalid_opc;
1513 #endif
1514 break;
1515 case 0x2F:
1516 /* CVTGQ */
1517 gen_fcvtgq(rb, rc);
1518 break;
1519 case 0x3C:
1520 /* CVTQF */
1521 gen_fcvtqf(rb, rc);
1522 break;
1523 case 0x3E:
1524 /* CVTQG */
1525 gen_fcvtqg(rb, rc);
1526 break;
1527 default:
1528 goto invalid_opc;
1530 break;
1531 case 0x16:
1532 /* IEEE floating-point */
1533 /* XXX: rounding mode and traps are ignored (!) */
1534 switch (fpfn) { /* f11 & 0x3F */
1535 case 0x00:
1536 /* ADDS */
1537 gen_fadds(ra, rb, rc);
1538 break;
1539 case 0x01:
1540 /* SUBS */
1541 gen_fsubs(ra, rb, rc);
1542 break;
1543 case 0x02:
1544 /* MULS */
1545 gen_fmuls(ra, rb, rc);
1546 break;
1547 case 0x03:
1548 /* DIVS */
1549 gen_fdivs(ra, rb, rc);
1550 break;
1551 case 0x20:
1552 /* ADDT */
1553 gen_faddt(ra, rb, rc);
1554 break;
1555 case 0x21:
1556 /* SUBT */
1557 gen_fsubt(ra, rb, rc);
1558 break;
1559 case 0x22:
1560 /* MULT */
1561 gen_fmult(ra, rb, rc);
1562 break;
1563 case 0x23:
1564 /* DIVT */
1565 gen_fdivt(ra, rb, rc);
1566 break;
1567 case 0x24:
1568 /* CMPTUN */
1569 gen_fcmptun(ra, rb, rc);
1570 break;
1571 case 0x25:
1572 /* CMPTEQ */
1573 gen_fcmpteq(ra, rb, rc);
1574 break;
1575 case 0x26:
1576 /* CMPTLT */
1577 gen_fcmptlt(ra, rb, rc);
1578 break;
1579 case 0x27:
1580 /* CMPTLE */
1581 gen_fcmptle(ra, rb, rc);
1582 break;
1583 case 0x2C:
1584 /* XXX: incorrect */
1585 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1586 /* CVTST */
1587 gen_fcvtst(rb, rc);
1588 } else {
1589 /* CVTTS */
1590 gen_fcvtts(rb, rc);
1592 break;
1593 case 0x2F:
1594 /* CVTTQ */
1595 gen_fcvttq(rb, rc);
1596 break;
1597 case 0x3C:
1598 /* CVTQS */
1599 gen_fcvtqs(rb, rc);
1600 break;
1601 case 0x3E:
1602 /* CVTQT */
1603 gen_fcvtqt(rb, rc);
1604 break;
1605 default:
1606 goto invalid_opc;
1608 break;
1609 case 0x17:
1610 switch (fn11) {
1611 case 0x010:
1612 /* CVTLQ */
1613 gen_fcvtlq(rb, rc);
1614 break;
1615 case 0x020:
1616 if (likely(rc != 31)) {
1617 if (ra == rb)
1618 /* FMOV */
1619 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1620 else
1621 /* CPYS */
1622 gen_fcpys(ra, rb, rc);
1624 break;
1625 case 0x021:
1626 /* CPYSN */
1627 gen_fcpysn(ra, rb, rc);
1628 break;
1629 case 0x022:
1630 /* CPYSE */
1631 gen_fcpyse(ra, rb, rc);
1632 break;
1633 case 0x024:
1634 /* MT_FPCR */
1635 if (likely(ra != 31))
1636 gen_helper_store_fpcr(cpu_fir[ra]);
1637 else {
1638 TCGv tmp = tcg_const_i64(0);
1639 gen_helper_store_fpcr(tmp);
1640 tcg_temp_free(tmp);
1642 break;
1643 case 0x025:
1644 /* MF_FPCR */
1645 if (likely(ra != 31))
1646 gen_helper_load_fpcr(cpu_fir[ra]);
1647 break;
1648 case 0x02A:
1649 /* FCMOVEQ */
1650 gen_fcmpfeq(ra, rb, rc);
1651 break;
1652 case 0x02B:
1653 /* FCMOVNE */
1654 gen_fcmpfne(ra, rb, rc);
1655 break;
1656 case 0x02C:
1657 /* FCMOVLT */
1658 gen_fcmpflt(ra, rb, rc);
1659 break;
1660 case 0x02D:
1661 /* FCMOVGE */
1662 gen_fcmpfge(ra, rb, rc);
1663 break;
1664 case 0x02E:
1665 /* FCMOVLE */
1666 gen_fcmpfle(ra, rb, rc);
1667 break;
1668 case 0x02F:
1669 /* FCMOVGT */
1670 gen_fcmpfgt(ra, rb, rc);
1671 break;
1672 case 0x030:
1673 /* CVTQL */
1674 gen_fcvtql(rb, rc);
1675 break;
1676 case 0x130:
1677 /* CVTQL/V */
1678 gen_fcvtqlv(rb, rc);
1679 break;
1680 case 0x530:
1681 /* CVTQL/SV */
1682 gen_fcvtqlsv(rb, rc);
1683 break;
1684 default:
1685 goto invalid_opc;
1687 break;
1688 case 0x18:
1689 switch ((uint16_t)disp16) {
1690 case 0x0000:
1691 /* TRAPB */
1692 /* No-op. Just exit from the current tb */
1693 ret = 2;
1694 break;
1695 case 0x0400:
1696 /* EXCB */
1697 /* No-op. Just exit from the current tb */
1698 ret = 2;
1699 break;
1700 case 0x4000:
1701 /* MB */
1702 /* No-op */
1703 break;
1704 case 0x4400:
1705 /* WMB */
1706 /* No-op */
1707 break;
1708 case 0x8000:
1709 /* FETCH */
1710 /* No-op */
1711 break;
1712 case 0xA000:
1713 /* FETCH_M */
1714 /* No-op */
1715 break;
1716 case 0xC000:
1717 /* RPCC */
1718 if (ra != 31)
1719 gen_helper_load_pcc(cpu_ir[ra]);
1720 break;
1721 case 0xE000:
1722 /* RC */
1723 if (ra != 31)
1724 gen_helper_rc(cpu_ir[ra]);
1725 break;
1726 case 0xE800:
1727 /* ECB */
1728 break;
1729 case 0xF000:
1730 /* RS */
1731 if (ra != 31)
1732 gen_helper_rs(cpu_ir[ra]);
1733 break;
1734 case 0xF800:
1735 /* WH64 */
1736 /* No-op */
1737 break;
1738 default:
1739 goto invalid_opc;
1741 break;
1742 case 0x19:
1743 /* HW_MFPR (PALcode) */
1744 #if defined (CONFIG_USER_ONLY)
1745 goto invalid_opc;
1746 #else
1747 if (!ctx->pal_mode)
1748 goto invalid_opc;
1749 if (ra != 31) {
1750 TCGv tmp = tcg_const_i32(insn & 0xFF);
1751 gen_helper_mfpr(cpu_ir[ra], tmp, cpu_ir[ra]);
1752 tcg_temp_free(tmp);
1754 break;
1755 #endif
1756 case 0x1A:
1757 if (rb != 31)
1758 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1759 else
1760 tcg_gen_movi_i64(cpu_pc, 0);
1761 if (ra != 31)
1762 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1763 /* Those four jumps only differ by the branch prediction hint */
1764 switch (fn2) {
1765 case 0x0:
1766 /* JMP */
1767 break;
1768 case 0x1:
1769 /* JSR */
1770 break;
1771 case 0x2:
1772 /* RET */
1773 break;
1774 case 0x3:
1775 /* JSR_COROUTINE */
1776 break;
1778 ret = 1;
1779 break;
1780 case 0x1B:
1781 /* HW_LD (PALcode) */
1782 #if defined (CONFIG_USER_ONLY)
1783 goto invalid_opc;
1784 #else
1785 if (!ctx->pal_mode)
1786 goto invalid_opc;
1787 if (ra != 31) {
1788 TCGv addr = tcg_temp_new();
1789 if (rb != 31)
1790 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1791 else
1792 tcg_gen_movi_i64(addr, disp12);
1793 switch ((insn >> 12) & 0xF) {
1794 case 0x0:
1795 /* Longword physical access (hw_ldl/p) */
1796 gen_helper_ldl_raw(cpu_ir[ra], addr);
1797 break;
1798 case 0x1:
1799 /* Quadword physical access (hw_ldq/p) */
1800 gen_helper_ldq_raw(cpu_ir[ra], addr);
1801 break;
1802 case 0x2:
1803 /* Longword physical access with lock (hw_ldl_l/p) */
1804 gen_helper_ldl_l_raw(cpu_ir[ra], addr);
1805 break;
1806 case 0x3:
1807 /* Quadword physical access with lock (hw_ldq_l/p) */
1808 gen_helper_ldq_l_raw(cpu_ir[ra], addr);
1809 break;
1810 case 0x4:
1811 /* Longword virtual PTE fetch (hw_ldl/v) */
1812 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1813 break;
1814 case 0x5:
1815 /* Quadword virtual PTE fetch (hw_ldq/v) */
1816 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1817 break;
1818 case 0x6:
1819 /* Incpu_ir[ra]id */
1820 goto invalid_opc;
1821 case 0x7:
1822 /* Incpu_ir[ra]id */
1823 goto invalid_opc;
1824 case 0x8:
1825 /* Longword virtual access (hw_ldl) */
1826 gen_helper_st_virt_to_phys(addr, addr);
1827 gen_helper_ldl_raw(cpu_ir[ra], addr);
1828 break;
1829 case 0x9:
1830 /* Quadword virtual access (hw_ldq) */
1831 gen_helper_st_virt_to_phys(addr, addr);
1832 gen_helper_ldq_raw(cpu_ir[ra], addr);
1833 break;
1834 case 0xA:
1835 /* Longword virtual access with protection check (hw_ldl/w) */
1836 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, 0);
1837 break;
1838 case 0xB:
1839 /* Quadword virtual access with protection check (hw_ldq/w) */
1840 tcg_gen_qemu_ld64(cpu_ir[ra], addr, 0);
1841 break;
1842 case 0xC:
1843 /* Longword virtual access with alt access mode (hw_ldl/a)*/
1844 gen_helper_set_alt_mode();
1845 gen_helper_st_virt_to_phys(addr, addr);
1846 gen_helper_ldl_raw(cpu_ir[ra], addr);
1847 gen_helper_restore_mode();
1848 break;
1849 case 0xD:
1850 /* Quadword virtual access with alt access mode (hw_ldq/a) */
1851 gen_helper_set_alt_mode();
1852 gen_helper_st_virt_to_phys(addr, addr);
1853 gen_helper_ldq_raw(cpu_ir[ra], addr);
1854 gen_helper_restore_mode();
1855 break;
1856 case 0xE:
1857 /* Longword virtual access with alternate access mode and
1858 * protection checks (hw_ldl/wa)
1860 gen_helper_set_alt_mode();
1861 gen_helper_ldl_data(cpu_ir[ra], addr);
1862 gen_helper_restore_mode();
1863 break;
1864 case 0xF:
1865 /* Quadword virtual access with alternate access mode and
1866 * protection checks (hw_ldq/wa)
1868 gen_helper_set_alt_mode();
1869 gen_helper_ldq_data(cpu_ir[ra], addr);
1870 gen_helper_restore_mode();
1871 break;
1873 tcg_temp_free(addr);
1875 break;
1876 #endif
1877 case 0x1C:
1878 switch (fn7) {
1879 case 0x00:
1880 /* SEXTB */
1881 if (!(ctx->amask & AMASK_BWX))
1882 goto invalid_opc;
1883 if (likely(rc != 31)) {
1884 if (islit)
1885 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1886 else
1887 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1889 break;
1890 case 0x01:
1891 /* SEXTW */
1892 if (!(ctx->amask & AMASK_BWX))
1893 goto invalid_opc;
1894 if (likely(rc != 31)) {
1895 if (islit)
1896 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1897 else
1898 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1900 break;
1901 case 0x30:
1902 /* CTPOP */
1903 if (!(ctx->amask & AMASK_CIX))
1904 goto invalid_opc;
1905 if (likely(rc != 31)) {
1906 if (islit)
1907 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1908 else
1909 gen_helper_ctpop(cpu_ir[rc], cpu_ir[rb]);
1911 break;
1912 case 0x31:
1913 /* PERR */
1914 if (!(ctx->amask & AMASK_MVI))
1915 goto invalid_opc;
1916 /* XXX: TODO */
1917 goto invalid_opc;
1918 break;
1919 case 0x32:
1920 /* CTLZ */
1921 if (!(ctx->amask & AMASK_CIX))
1922 goto invalid_opc;
1923 if (likely(rc != 31)) {
1924 if (islit)
1925 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1926 else
1927 gen_helper_ctlz(cpu_ir[rc], cpu_ir[rb]);
1929 break;
1930 case 0x33:
1931 /* CTTZ */
1932 if (!(ctx->amask & AMASK_CIX))
1933 goto invalid_opc;
1934 if (likely(rc != 31)) {
1935 if (islit)
1936 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1937 else
1938 gen_helper_cttz(cpu_ir[rc], cpu_ir[rb]);
1940 break;
1941 case 0x34:
1942 /* UNPKBW */
1943 if (!(ctx->amask & AMASK_MVI))
1944 goto invalid_opc;
1945 /* XXX: TODO */
1946 goto invalid_opc;
1947 break;
1948 case 0x35:
1949 /* UNPKWL */
1950 if (!(ctx->amask & AMASK_MVI))
1951 goto invalid_opc;
1952 /* XXX: TODO */
1953 goto invalid_opc;
1954 break;
1955 case 0x36:
1956 /* PKWB */
1957 if (!(ctx->amask & AMASK_MVI))
1958 goto invalid_opc;
1959 /* XXX: TODO */
1960 goto invalid_opc;
1961 break;
1962 case 0x37:
1963 /* PKLB */
1964 if (!(ctx->amask & AMASK_MVI))
1965 goto invalid_opc;
1966 /* XXX: TODO */
1967 goto invalid_opc;
1968 break;
1969 case 0x38:
1970 /* MINSB8 */
1971 if (!(ctx->amask & AMASK_MVI))
1972 goto invalid_opc;
1973 /* XXX: TODO */
1974 goto invalid_opc;
1975 break;
1976 case 0x39:
1977 /* MINSW4 */
1978 if (!(ctx->amask & AMASK_MVI))
1979 goto invalid_opc;
1980 /* XXX: TODO */
1981 goto invalid_opc;
1982 break;
1983 case 0x3A:
1984 /* MINUB8 */
1985 if (!(ctx->amask & AMASK_MVI))
1986 goto invalid_opc;
1987 /* XXX: TODO */
1988 goto invalid_opc;
1989 break;
1990 case 0x3B:
1991 /* MINUW4 */
1992 if (!(ctx->amask & AMASK_MVI))
1993 goto invalid_opc;
1994 /* XXX: TODO */
1995 goto invalid_opc;
1996 break;
1997 case 0x3C:
1998 /* MAXUB8 */
1999 if (!(ctx->amask & AMASK_MVI))
2000 goto invalid_opc;
2001 /* XXX: TODO */
2002 goto invalid_opc;
2003 break;
2004 case 0x3D:
2005 /* MAXUW4 */
2006 if (!(ctx->amask & AMASK_MVI))
2007 goto invalid_opc;
2008 /* XXX: TODO */
2009 goto invalid_opc;
2010 break;
2011 case 0x3E:
2012 /* MAXSB8 */
2013 if (!(ctx->amask & AMASK_MVI))
2014 goto invalid_opc;
2015 /* XXX: TODO */
2016 goto invalid_opc;
2017 break;
2018 case 0x3F:
2019 /* MAXSW4 */
2020 if (!(ctx->amask & AMASK_MVI))
2021 goto invalid_opc;
2022 /* XXX: TODO */
2023 goto invalid_opc;
2024 break;
2025 case 0x70:
2026 /* FTOIT */
2027 if (!(ctx->amask & AMASK_FIX))
2028 goto invalid_opc;
2029 if (likely(rc != 31)) {
2030 if (ra != 31)
2031 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
2032 else
2033 tcg_gen_movi_i64(cpu_ir[rc], 0);
2035 break;
2036 case 0x78:
2037 /* FTOIS */
2038 if (!(ctx->amask & AMASK_FIX))
2039 goto invalid_opc;
2040 if (rc != 31) {
2041 TCGv_i32 tmp1 = tcg_temp_new_i32();
2042 if (ra != 31)
2043 gen_helper_s_to_memory(tmp1, cpu_fir[ra]);
2044 else {
2045 TCGv tmp2 = tcg_const_i64(0);
2046 gen_helper_s_to_memory(tmp1, tmp2);
2047 tcg_temp_free(tmp2);
2049 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
2050 tcg_temp_free_i32(tmp1);
2052 break;
2053 default:
2054 goto invalid_opc;
2056 break;
2057 case 0x1D:
2058 /* HW_MTPR (PALcode) */
2059 #if defined (CONFIG_USER_ONLY)
2060 goto invalid_opc;
2061 #else
2062 if (!ctx->pal_mode)
2063 goto invalid_opc;
2064 else {
2065 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
2066 if (ra != 31)
2067 gen_helper_mtpr(tmp1, cpu_ir[ra]);
2068 else {
2069 TCGv tmp2 = tcg_const_i64(0);
2070 gen_helper_mtpr(tmp1, tmp2);
2071 tcg_temp_free(tmp2);
2073 tcg_temp_free(tmp1);
2074 ret = 2;
2076 break;
2077 #endif
2078 case 0x1E:
2079 /* HW_REI (PALcode) */
2080 #if defined (CONFIG_USER_ONLY)
2081 goto invalid_opc;
2082 #else
2083 if (!ctx->pal_mode)
2084 goto invalid_opc;
2085 if (rb == 31) {
2086 /* "Old" alpha */
2087 gen_helper_hw_rei();
2088 } else {
2089 TCGv tmp;
2091 if (ra != 31) {
2092 tmp = tcg_temp_new();
2093 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
2094 } else
2095 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
2096 gen_helper_hw_ret(tmp);
2097 tcg_temp_free(tmp);
2099 ret = 2;
2100 break;
2101 #endif
2102 case 0x1F:
2103 /* HW_ST (PALcode) */
2104 #if defined (CONFIG_USER_ONLY)
2105 goto invalid_opc;
2106 #else
2107 if (!ctx->pal_mode)
2108 goto invalid_opc;
2109 else {
2110 TCGv addr, val;
2111 addr = tcg_temp_new();
2112 if (rb != 31)
2113 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2114 else
2115 tcg_gen_movi_i64(addr, disp12);
2116 if (ra != 31)
2117 val = cpu_ir[ra];
2118 else {
2119 val = tcg_temp_new();
2120 tcg_gen_movi_i64(val, 0);
2122 switch ((insn >> 12) & 0xF) {
2123 case 0x0:
2124 /* Longword physical access */
2125 gen_helper_stl_raw(val, addr);
2126 break;
2127 case 0x1:
2128 /* Quadword physical access */
2129 gen_helper_stq_raw(val, addr);
2130 break;
2131 case 0x2:
2132 /* Longword physical access with lock */
2133 gen_helper_stl_c_raw(val, val, addr);
2134 break;
2135 case 0x3:
2136 /* Quadword physical access with lock */
2137 gen_helper_stq_c_raw(val, val, addr);
2138 break;
2139 case 0x4:
2140 /* Longword virtual access */
2141 gen_helper_st_virt_to_phys(addr, addr);
2142 gen_helper_stl_raw(val, addr);
2143 break;
2144 case 0x5:
2145 /* Quadword virtual access */
2146 gen_helper_st_virt_to_phys(addr, addr);
2147 gen_helper_stq_raw(val, addr);
2148 break;
2149 case 0x6:
2150 /* Invalid */
2151 goto invalid_opc;
2152 case 0x7:
2153 /* Invalid */
2154 goto invalid_opc;
2155 case 0x8:
2156 /* Invalid */
2157 goto invalid_opc;
2158 case 0x9:
2159 /* Invalid */
2160 goto invalid_opc;
2161 case 0xA:
2162 /* Invalid */
2163 goto invalid_opc;
2164 case 0xB:
2165 /* Invalid */
2166 goto invalid_opc;
2167 case 0xC:
2168 /* Longword virtual access with alternate access mode */
2169 gen_helper_set_alt_mode();
2170 gen_helper_st_virt_to_phys(addr, addr);
2171 gen_helper_stl_raw(val, addr);
2172 gen_helper_restore_mode();
2173 break;
2174 case 0xD:
2175 /* Quadword virtual access with alternate access mode */
2176 gen_helper_set_alt_mode();
2177 gen_helper_st_virt_to_phys(addr, addr);
2178 gen_helper_stl_raw(val, addr);
2179 gen_helper_restore_mode();
2180 break;
2181 case 0xE:
2182 /* Invalid */
2183 goto invalid_opc;
2184 case 0xF:
2185 /* Invalid */
2186 goto invalid_opc;
2188 if (ra == 31)
2189 tcg_temp_free(val);
2190 tcg_temp_free(addr);
2192 break;
2193 #endif
2194 case 0x20:
2195 /* LDF */
2196 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2197 break;
2198 case 0x21:
2199 /* LDG */
2200 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2201 break;
2202 case 0x22:
2203 /* LDS */
2204 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2205 break;
2206 case 0x23:
2207 /* LDT */
2208 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2209 break;
2210 case 0x24:
2211 /* STF */
2212 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2213 break;
2214 case 0x25:
2215 /* STG */
2216 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2217 break;
2218 case 0x26:
2219 /* STS */
2220 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2221 break;
2222 case 0x27:
2223 /* STT */
2224 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2225 break;
2226 case 0x28:
2227 /* LDL */
2228 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2229 break;
2230 case 0x29:
2231 /* LDQ */
2232 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2233 break;
2234 case 0x2A:
2235 /* LDL_L */
2236 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2237 break;
2238 case 0x2B:
2239 /* LDQ_L */
2240 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2241 break;
2242 case 0x2C:
2243 /* STL */
2244 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2245 break;
2246 case 0x2D:
2247 /* STQ */
2248 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2249 break;
2250 case 0x2E:
2251 /* STL_C */
2252 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2253 break;
2254 case 0x2F:
2255 /* STQ_C */
2256 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2257 break;
2258 case 0x30:
2259 /* BR */
2260 if (ra != 31)
2261 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2262 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2263 ret = 1;
2264 break;
2265 case 0x31: /* FBEQ */
2266 case 0x32: /* FBLT */
2267 case 0x33: /* FBLE */
2268 gen_fbcond(ctx, opc, ra, disp16);
2269 ret = 1;
2270 break;
2271 case 0x34:
2272 /* BSR */
2273 if (ra != 31)
2274 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2275 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2276 ret = 1;
2277 break;
2278 case 0x35: /* FBNE */
2279 case 0x36: /* FBGE */
2280 case 0x37: /* FBGT */
2281 gen_fbcond(ctx, opc, ra, disp16);
2282 ret = 1;
2283 break;
2284 case 0x38:
2285 /* BLBC */
2286 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2287 ret = 1;
2288 break;
2289 case 0x39:
2290 /* BEQ */
2291 gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2292 ret = 1;
2293 break;
2294 case 0x3A:
2295 /* BLT */
2296 gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2297 ret = 1;
2298 break;
2299 case 0x3B:
2300 /* BLE */
2301 gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2302 ret = 1;
2303 break;
2304 case 0x3C:
2305 /* BLBS */
2306 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2307 ret = 1;
2308 break;
2309 case 0x3D:
2310 /* BNE */
2311 gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2312 ret = 1;
2313 break;
2314 case 0x3E:
2315 /* BGE */
2316 gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2317 ret = 1;
2318 break;
2319 case 0x3F:
2320 /* BGT */
2321 gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2322 ret = 1;
2323 break;
2324 invalid_opc:
2325 gen_invalid(ctx);
2326 ret = 3;
2327 break;
2330 return ret;
2333 static inline void gen_intermediate_code_internal(CPUState *env,
2334 TranslationBlock *tb,
2335 int search_pc)
2337 #if defined ALPHA_DEBUG_DISAS
2338 static int insn_count;
2339 #endif
2340 DisasContext ctx, *ctxp = &ctx;
2341 target_ulong pc_start;
2342 uint32_t insn;
2343 uint16_t *gen_opc_end;
2344 CPUBreakpoint *bp;
2345 int j, lj = -1;
2346 int ret;
2347 int num_insns;
2348 int max_insns;
2350 pc_start = tb->pc;
2351 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2352 ctx.pc = pc_start;
2353 ctx.amask = env->amask;
2354 ctx.env = env;
2355 #if defined (CONFIG_USER_ONLY)
2356 ctx.mem_idx = 0;
2357 #else
2358 ctx.mem_idx = ((env->ps >> 3) & 3);
2359 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2360 #endif
2361 num_insns = 0;
2362 max_insns = tb->cflags & CF_COUNT_MASK;
2363 if (max_insns == 0)
2364 max_insns = CF_COUNT_MASK;
2366 gen_icount_start();
2367 for (ret = 0; ret == 0;) {
2368 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
2369 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
2370 if (bp->pc == ctx.pc) {
2371 gen_excp(&ctx, EXCP_DEBUG, 0);
2372 break;
2376 if (search_pc) {
2377 j = gen_opc_ptr - gen_opc_buf;
2378 if (lj < j) {
2379 lj++;
2380 while (lj < j)
2381 gen_opc_instr_start[lj++] = 0;
2383 gen_opc_pc[lj] = ctx.pc;
2384 gen_opc_instr_start[lj] = 1;
2385 gen_opc_icount[lj] = num_insns;
2387 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2388 gen_io_start();
2389 #if defined ALPHA_DEBUG_DISAS
2390 insn_count++;
2391 LOG_DISAS("pc " TARGET_FMT_lx " mem_idx %d\n",
2392 ctx.pc, ctx.mem_idx);
2393 #endif
2394 insn = ldl_code(ctx.pc);
2395 #if defined ALPHA_DEBUG_DISAS
2396 insn_count++;
2397 LOG_DISAS("opcode %08x %d\n", insn, insn_count);
2398 #endif
2399 num_insns++;
2400 ctx.pc += 4;
2401 ret = translate_one(ctxp, insn);
2402 if (ret != 0)
2403 break;
2404 /* if we reach a page boundary or are single stepping, stop
2405 * generation
2407 if (env->singlestep_enabled) {
2408 gen_excp(&ctx, EXCP_DEBUG, 0);
2409 break;
2412 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2413 break;
2415 if (gen_opc_ptr >= gen_opc_end)
2416 break;
2418 if (num_insns >= max_insns)
2419 break;
2421 if (singlestep) {
2422 break;
2425 if (ret != 1 && ret != 3) {
2426 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2428 if (tb->cflags & CF_LAST_IO)
2429 gen_io_end();
2430 /* Generate the return instruction */
2431 tcg_gen_exit_tb(0);
2432 gen_icount_end(tb, num_insns);
2433 *gen_opc_ptr = INDEX_op_end;
2434 if (search_pc) {
2435 j = gen_opc_ptr - gen_opc_buf;
2436 lj++;
2437 while (lj <= j)
2438 gen_opc_instr_start[lj++] = 0;
2439 } else {
2440 tb->size = ctx.pc - pc_start;
2441 tb->icount = num_insns;
2443 #if defined ALPHA_DEBUG_DISAS
2444 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
2445 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2446 qemu_log("IN: %s\n", lookup_symbol(pc_start));
2447 log_target_disas(pc_start, ctx.pc - pc_start, 1);
2448 qemu_log("\n");
2450 #endif
2453 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2455 gen_intermediate_code_internal(env, tb, 0);
2458 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2460 gen_intermediate_code_internal(env, tb, 1);
2463 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2465 CPUAlphaState *env;
2466 uint64_t hwpcb;
2468 env = qemu_mallocz(sizeof(CPUAlphaState));
2469 cpu_exec_init(env);
2470 alpha_translate_init();
2471 tlb_flush(env, 1);
2472 /* XXX: should not be hardcoded */
2473 env->implver = IMPLVER_2106x;
2474 env->ps = 0x1F00;
2475 #if defined (CONFIG_USER_ONLY)
2476 env->ps |= 1 << 3;
2477 #endif
2478 pal_init(env);
2479 /* Initialize IPR */
2480 hwpcb = env->ipr[IPR_PCBB];
2481 env->ipr[IPR_ASN] = 0;
2482 env->ipr[IPR_ASTEN] = 0;
2483 env->ipr[IPR_ASTSR] = 0;
2484 env->ipr[IPR_DATFX] = 0;
2485 /* XXX: fix this */
2486 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2487 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2488 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2489 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2490 env->ipr[IPR_FEN] = 0;
2491 env->ipr[IPR_IPL] = 31;
2492 env->ipr[IPR_MCES] = 0;
2493 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2494 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2495 env->ipr[IPR_SISR] = 0;
2496 env->ipr[IPR_VIRBND] = -1ULL;
2498 qemu_init_vcpu(env);
2499 return env;
2502 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2503 unsigned long searched_pc, int pc_pos, void *puc)
2505 env->pc = gen_opc_pc[pc_pos];