Fix qemu_malloc.
[qemu/mini2440.git] / target-sh4 / translate.c
blob6c9dff591a2258bf1b317dec73e35703320a43bf
1 /*
2 * SH4 translation
4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25 #include <assert.h>
27 #define DEBUG_DISAS
28 #define SH4_DEBUG_DISAS
29 //#define SH4_SINGLE_STEP
31 #include "cpu.h"
32 #include "exec-all.h"
33 #include "disas.h"
34 #include "tcg-op.h"
35 #include "qemu-common.h"
37 #include "helper.h"
38 #define GEN_HELPER 1
39 #include "helper.h"
41 typedef struct DisasContext {
42 struct TranslationBlock *tb;
43 target_ulong pc;
44 uint32_t sr;
45 uint32_t fpscr;
46 uint16_t opcode;
47 uint32_t flags;
48 int bstate;
49 int memidx;
50 uint32_t delayed_pc;
51 int singlestep_enabled;
52 uint32_t features;
53 } DisasContext;
55 #if defined(CONFIG_USER_ONLY)
56 #define IS_USER(ctx) 1
57 #else
58 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
59 #endif
61 enum {
62 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
63 * exception condition
65 BS_STOP = 1, /* We want to stop translation for any reason */
66 BS_BRANCH = 2, /* We reached a branch condition */
67 BS_EXCP = 3, /* We reached an exception condition */
70 /* global register indexes */
71 static TCGv_ptr cpu_env;
72 static TCGv cpu_gregs[24];
73 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
74 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
75 static TCGv cpu_pr, cpu_fpscr, cpu_fpul;
76 static TCGv cpu_fregs[32];
78 /* internal register indexes */
79 static TCGv cpu_flags, cpu_delayed_pc;
81 #include "gen-icount.h"
83 static void sh4_translate_init(void)
85 int i;
86 static int done_init = 0;
87 static const char * const gregnames[24] = {
88 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
89 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
90 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
91 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
92 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
94 static const char * const fregnames[32] = {
95 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
96 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
97 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
98 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
99 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
100 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
101 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
102 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
105 if (done_init)
106 return;
108 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
110 for (i = 0; i < 24; i++)
111 cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, gregs[i]),
113 gregnames[i]);
115 cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
116 offsetof(CPUState, pc), "PC");
117 cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
118 offsetof(CPUState, sr), "SR");
119 cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
120 offsetof(CPUState, ssr), "SSR");
121 cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
122 offsetof(CPUState, spc), "SPC");
123 cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
124 offsetof(CPUState, gbr), "GBR");
125 cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
126 offsetof(CPUState, vbr), "VBR");
127 cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
128 offsetof(CPUState, sgr), "SGR");
129 cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
130 offsetof(CPUState, dbr), "DBR");
131 cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
132 offsetof(CPUState, mach), "MACH");
133 cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
134 offsetof(CPUState, macl), "MACL");
135 cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
136 offsetof(CPUState, pr), "PR");
137 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
138 offsetof(CPUState, fpscr), "FPSCR");
139 cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
140 offsetof(CPUState, fpul), "FPUL");
142 cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
143 offsetof(CPUState, flags), "_flags_");
144 cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
145 offsetof(CPUState, delayed_pc),
146 "_delayed_pc_");
148 for (i = 0; i < 32; i++)
149 cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
150 offsetof(CPUState, fregs[i]),
151 fregnames[i]);
153 /* register helpers */
154 #define GEN_HELPER 2
155 #include "helper.h"
157 done_init = 1;
160 void cpu_dump_state(CPUState * env, FILE * f,
161 int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
162 int flags)
164 int i;
165 cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
166 env->pc, env->sr, env->pr, env->fpscr);
167 cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
168 env->spc, env->ssr, env->gbr, env->vbr);
169 cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
170 env->sgr, env->dbr, env->delayed_pc, env->fpul);
171 for (i = 0; i < 24; i += 4) {
172 cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
173 i, env->gregs[i], i + 1, env->gregs[i + 1],
174 i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
176 if (env->flags & DELAY_SLOT) {
177 cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
178 env->delayed_pc);
179 } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
180 cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
181 env->delayed_pc);
185 static void cpu_sh4_reset(CPUSH4State * env)
187 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
188 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
189 log_cpu_state(env, 0);
192 #if defined(CONFIG_USER_ONLY)
193 env->sr = 0;
194 #else
195 env->sr = SR_MD | SR_RB | SR_BL | SR_I3 | SR_I2 | SR_I1 | SR_I0;
196 #endif
197 env->vbr = 0;
198 env->pc = 0xA0000000;
199 #if defined(CONFIG_USER_ONLY)
200 env->fpscr = FPSCR_PR; /* value for userspace according to the kernel */
201 set_float_rounding_mode(float_round_nearest_even, &env->fp_status); /* ?! */
202 #else
203 env->fpscr = 0x00040001; /* CPU reset value according to SH4 manual */
204 set_float_rounding_mode(float_round_to_zero, &env->fp_status);
205 #endif
206 env->mmucr = 0;
209 typedef struct {
210 const char *name;
211 int id;
212 uint32_t pvr;
213 uint32_t prr;
214 uint32_t cvr;
215 uint32_t features;
216 } sh4_def_t;
218 static sh4_def_t sh4_defs[] = {
220 .name = "SH7750R",
221 .id = SH_CPU_SH7750R,
222 .pvr = 0x00050000,
223 .prr = 0x00000100,
224 .cvr = 0x00110000,
225 }, {
226 .name = "SH7751R",
227 .id = SH_CPU_SH7751R,
228 .pvr = 0x04050005,
229 .prr = 0x00000113,
230 .cvr = 0x00110000, /* Neutered caches, should be 0x20480000 */
231 }, {
232 .name = "SH7785",
233 .id = SH_CPU_SH7785,
234 .pvr = 0x10300700,
235 .prr = 0x00000200,
236 .cvr = 0x71440211,
237 .features = SH_FEATURE_SH4A,
241 static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
243 int i;
245 if (strcasecmp(name, "any") == 0)
246 return &sh4_defs[0];
248 for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
249 if (strcasecmp(name, sh4_defs[i].name) == 0)
250 return &sh4_defs[i];
252 return NULL;
255 void sh4_cpu_list(FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
257 int i;
259 for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
260 (*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
263 static void cpu_sh4_register(CPUSH4State *env, const sh4_def_t *def)
265 env->pvr = def->pvr;
266 env->prr = def->prr;
267 env->cvr = def->cvr;
268 env->id = def->id;
271 CPUSH4State *cpu_sh4_init(const char *cpu_model)
273 CPUSH4State *env;
274 const sh4_def_t *def;
276 def = cpu_sh4_find_by_name(cpu_model);
277 if (!def)
278 return NULL;
279 env = qemu_mallocz(sizeof(CPUSH4State));
280 if (!env)
281 return NULL;
282 env->features = def->features;
283 cpu_exec_init(env);
284 sh4_translate_init();
285 env->cpu_model_str = cpu_model;
286 cpu_sh4_reset(env);
287 cpu_sh4_register(env, def);
288 tlb_flush(env, 1);
289 return env;
292 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
294 TranslationBlock *tb;
295 tb = ctx->tb;
297 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
298 !ctx->singlestep_enabled) {
299 /* Use a direct jump if in same page and singlestep not enabled */
300 tcg_gen_goto_tb(n);
301 tcg_gen_movi_i32(cpu_pc, dest);
302 tcg_gen_exit_tb((long) tb + n);
303 } else {
304 tcg_gen_movi_i32(cpu_pc, dest);
305 if (ctx->singlestep_enabled)
306 gen_helper_debug();
307 tcg_gen_exit_tb(0);
311 static void gen_jump(DisasContext * ctx)
313 if (ctx->delayed_pc == (uint32_t) - 1) {
314 /* Target is not statically known, it comes necessarily from a
315 delayed jump as immediate jump are conditinal jumps */
316 tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
317 if (ctx->singlestep_enabled)
318 gen_helper_debug();
319 tcg_gen_exit_tb(0);
320 } else {
321 gen_goto_tb(ctx, 0, ctx->delayed_pc);
325 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
327 TCGv sr;
328 int label = gen_new_label();
329 tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
330 sr = tcg_temp_new();
331 tcg_gen_andi_i32(sr, cpu_sr, SR_T);
332 tcg_gen_brcondi_i32(TCG_COND_NE, sr, t ? SR_T : 0, label);
333 tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
334 gen_set_label(label);
337 /* Immediate conditional jump (bt or bf) */
338 static void gen_conditional_jump(DisasContext * ctx,
339 target_ulong ift, target_ulong ifnott)
341 int l1;
342 TCGv sr;
344 l1 = gen_new_label();
345 sr = tcg_temp_new();
346 tcg_gen_andi_i32(sr, cpu_sr, SR_T);
347 tcg_gen_brcondi_i32(TCG_COND_EQ, sr, SR_T, l1);
348 gen_goto_tb(ctx, 0, ifnott);
349 gen_set_label(l1);
350 gen_goto_tb(ctx, 1, ift);
353 /* Delayed conditional jump (bt or bf) */
354 static void gen_delayed_conditional_jump(DisasContext * ctx)
356 int l1;
357 TCGv ds;
359 l1 = gen_new_label();
360 ds = tcg_temp_new();
361 tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
362 tcg_gen_brcondi_i32(TCG_COND_EQ, ds, DELAY_SLOT_TRUE, l1);
363 gen_goto_tb(ctx, 1, ctx->pc + 2);
364 gen_set_label(l1);
365 tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
366 gen_jump(ctx);
369 static inline void gen_set_t(void)
371 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
374 static inline void gen_clr_t(void)
376 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
379 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
381 int label1 = gen_new_label();
382 int label2 = gen_new_label();
383 tcg_gen_brcond_i32(cond, t1, t0, label1);
384 gen_clr_t();
385 tcg_gen_br(label2);
386 gen_set_label(label1);
387 gen_set_t();
388 gen_set_label(label2);
391 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
393 int label1 = gen_new_label();
394 int label2 = gen_new_label();
395 tcg_gen_brcondi_i32(cond, t0, imm, label1);
396 gen_clr_t();
397 tcg_gen_br(label2);
398 gen_set_label(label1);
399 gen_set_t();
400 gen_set_label(label2);
403 static inline void gen_store_flags(uint32_t flags)
405 tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
406 tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
409 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
411 TCGv tmp = tcg_temp_new();
413 p0 &= 0x1f;
414 p1 &= 0x1f;
416 tcg_gen_andi_i32(tmp, t1, (1 << p1));
417 tcg_gen_andi_i32(t0, t0, ~(1 << p0));
418 if (p0 < p1)
419 tcg_gen_shri_i32(tmp, tmp, p1 - p0);
420 else if (p0 > p1)
421 tcg_gen_shli_i32(tmp, tmp, p0 - p1);
422 tcg_gen_or_i32(t0, t0, tmp);
424 tcg_temp_free(tmp);
427 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
429 tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
432 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
434 TCGv_i32 tmp = tcg_temp_new_i32();
435 tcg_gen_trunc_i64_i32(tmp, t);
436 tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
437 tcg_gen_shri_i64(t, t, 32);
438 tcg_gen_trunc_i64_i32(tmp, t);
439 tcg_gen_mov_i32(cpu_fregs[reg], tmp);
440 tcg_temp_free_i32(tmp);
443 #define B3_0 (ctx->opcode & 0xf)
444 #define B6_4 ((ctx->opcode >> 4) & 0x7)
445 #define B7_4 ((ctx->opcode >> 4) & 0xf)
446 #define B7_0 (ctx->opcode & 0xff)
447 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
448 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
449 (ctx->opcode & 0xfff))
450 #define B11_8 ((ctx->opcode >> 8) & 0xf)
451 #define B15_12 ((ctx->opcode >> 12) & 0xf)
453 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
454 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
456 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
457 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
459 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
460 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
461 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
462 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
464 #define CHECK_NOT_DELAY_SLOT \
465 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
467 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
468 gen_helper_raise_slot_illegal_instruction(); \
469 ctx->bstate = BS_EXCP; \
470 return; \
473 #define CHECK_PRIVILEGED \
474 if (IS_USER(ctx)) { \
475 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
476 gen_helper_raise_illegal_instruction(); \
477 ctx->bstate = BS_EXCP; \
478 return; \
481 #define CHECK_FPU_ENABLED \
482 if (ctx->flags & SR_FD) { \
483 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
484 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
485 gen_helper_raise_slot_fpu_disable(); \
486 } else { \
487 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
488 gen_helper_raise_fpu_disable(); \
490 ctx->bstate = BS_EXCP; \
491 return; \
494 static void _decode_opc(DisasContext * ctx)
496 #if 0
497 fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
498 #endif
500 switch (ctx->opcode) {
501 case 0x0019: /* div0u */
502 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
503 return;
504 case 0x000b: /* rts */
505 CHECK_NOT_DELAY_SLOT
506 tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
507 ctx->flags |= DELAY_SLOT;
508 ctx->delayed_pc = (uint32_t) - 1;
509 return;
510 case 0x0028: /* clrmac */
511 tcg_gen_movi_i32(cpu_mach, 0);
512 tcg_gen_movi_i32(cpu_macl, 0);
513 return;
514 case 0x0048: /* clrs */
515 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
516 return;
517 case 0x0008: /* clrt */
518 gen_clr_t();
519 return;
520 case 0x0038: /* ldtlb */
521 CHECK_PRIVILEGED
522 gen_helper_ldtlb();
523 return;
524 case 0x002b: /* rte */
525 CHECK_PRIVILEGED
526 CHECK_NOT_DELAY_SLOT
527 tcg_gen_mov_i32(cpu_sr, cpu_ssr);
528 tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
529 ctx->flags |= DELAY_SLOT;
530 ctx->delayed_pc = (uint32_t) - 1;
531 return;
532 case 0x0058: /* sets */
533 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
534 return;
535 case 0x0018: /* sett */
536 gen_set_t();
537 return;
538 case 0xfbfd: /* frchg */
539 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
540 ctx->bstate = BS_STOP;
541 return;
542 case 0xf3fd: /* fschg */
543 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
544 ctx->bstate = BS_STOP;
545 return;
546 case 0x0009: /* nop */
547 return;
548 case 0x001b: /* sleep */
549 CHECK_PRIVILEGED
550 gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
551 return;
554 switch (ctx->opcode & 0xf000) {
555 case 0x1000: /* mov.l Rm,@(disp,Rn) */
557 TCGv addr = tcg_temp_new();
558 tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
559 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
560 tcg_temp_free(addr);
562 return;
563 case 0x5000: /* mov.l @(disp,Rm),Rn */
565 TCGv addr = tcg_temp_new();
566 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
567 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
568 tcg_temp_free(addr);
570 return;
571 case 0xe000: /* mov #imm,Rn */
572 tcg_gen_movi_i32(REG(B11_8), B7_0s);
573 return;
574 case 0x9000: /* mov.w @(disp,PC),Rn */
576 TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
577 tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
578 tcg_temp_free(addr);
580 return;
581 case 0xd000: /* mov.l @(disp,PC),Rn */
583 TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
584 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
585 tcg_temp_free(addr);
587 return;
588 case 0x7000: /* add #imm,Rn */
589 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
590 return;
591 case 0xa000: /* bra disp */
592 CHECK_NOT_DELAY_SLOT
593 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
594 tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
595 ctx->flags |= DELAY_SLOT;
596 return;
597 case 0xb000: /* bsr disp */
598 CHECK_NOT_DELAY_SLOT
599 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
600 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
601 tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
602 ctx->flags |= DELAY_SLOT;
603 return;
606 switch (ctx->opcode & 0xf00f) {
607 case 0x6003: /* mov Rm,Rn */
608 tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
609 return;
610 case 0x2000: /* mov.b Rm,@Rn */
611 tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
612 return;
613 case 0x2001: /* mov.w Rm,@Rn */
614 tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
615 return;
616 case 0x2002: /* mov.l Rm,@Rn */
617 tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
618 return;
619 case 0x6000: /* mov.b @Rm,Rn */
620 tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
621 return;
622 case 0x6001: /* mov.w @Rm,Rn */
623 tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
624 return;
625 case 0x6002: /* mov.l @Rm,Rn */
626 tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
627 return;
628 case 0x2004: /* mov.b Rm,@-Rn */
630 TCGv addr = tcg_temp_new();
631 tcg_gen_subi_i32(addr, REG(B11_8), 1);
632 tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx); /* might cause re-execution */
633 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1); /* modify register status */
634 tcg_temp_free(addr);
636 return;
637 case 0x2005: /* mov.w Rm,@-Rn */
639 TCGv addr = tcg_temp_new();
640 tcg_gen_subi_i32(addr, REG(B11_8), 2);
641 tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
642 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 2);
643 tcg_temp_free(addr);
645 return;
646 case 0x2006: /* mov.l Rm,@-Rn */
648 TCGv addr = tcg_temp_new();
649 tcg_gen_subi_i32(addr, REG(B11_8), 4);
650 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
651 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
653 return;
654 case 0x6004: /* mov.b @Rm+,Rn */
655 tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
656 if ( B11_8 != B7_4 )
657 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
658 return;
659 case 0x6005: /* mov.w @Rm+,Rn */
660 tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
661 if ( B11_8 != B7_4 )
662 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
663 return;
664 case 0x6006: /* mov.l @Rm+,Rn */
665 tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
666 if ( B11_8 != B7_4 )
667 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
668 return;
669 case 0x0004: /* mov.b Rm,@(R0,Rn) */
671 TCGv addr = tcg_temp_new();
672 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
673 tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
674 tcg_temp_free(addr);
676 return;
677 case 0x0005: /* mov.w Rm,@(R0,Rn) */
679 TCGv addr = tcg_temp_new();
680 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
681 tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
682 tcg_temp_free(addr);
684 return;
685 case 0x0006: /* mov.l Rm,@(R0,Rn) */
687 TCGv addr = tcg_temp_new();
688 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
689 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
690 tcg_temp_free(addr);
692 return;
693 case 0x000c: /* mov.b @(R0,Rm),Rn */
695 TCGv addr = tcg_temp_new();
696 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
697 tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
698 tcg_temp_free(addr);
700 return;
701 case 0x000d: /* mov.w @(R0,Rm),Rn */
703 TCGv addr = tcg_temp_new();
704 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
705 tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
706 tcg_temp_free(addr);
708 return;
709 case 0x000e: /* mov.l @(R0,Rm),Rn */
711 TCGv addr = tcg_temp_new();
712 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
713 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
714 tcg_temp_free(addr);
716 return;
717 case 0x6008: /* swap.b Rm,Rn */
719 TCGv highw, high, low;
720 highw = tcg_temp_new();
721 tcg_gen_andi_i32(highw, REG(B7_4), 0xffff0000);
722 high = tcg_temp_new();
723 tcg_gen_ext8u_i32(high, REG(B7_4));
724 tcg_gen_shli_i32(high, high, 8);
725 low = tcg_temp_new();
726 tcg_gen_shri_i32(low, REG(B7_4), 8);
727 tcg_gen_ext8u_i32(low, low);
728 tcg_gen_or_i32(REG(B11_8), high, low);
729 tcg_gen_or_i32(REG(B11_8), REG(B11_8), highw);
730 tcg_temp_free(low);
731 tcg_temp_free(high);
733 return;
734 case 0x6009: /* swap.w Rm,Rn */
736 TCGv high, low;
737 high = tcg_temp_new();
738 tcg_gen_ext16u_i32(high, REG(B7_4));
739 tcg_gen_shli_i32(high, high, 16);
740 low = tcg_temp_new();
741 tcg_gen_shri_i32(low, REG(B7_4), 16);
742 tcg_gen_ext16u_i32(low, low);
743 tcg_gen_or_i32(REG(B11_8), high, low);
744 tcg_temp_free(low);
745 tcg_temp_free(high);
747 return;
748 case 0x200d: /* xtrct Rm,Rn */
750 TCGv high, low;
751 high = tcg_temp_new();
752 tcg_gen_ext16u_i32(high, REG(B7_4));
753 tcg_gen_shli_i32(high, high, 16);
754 low = tcg_temp_new();
755 tcg_gen_shri_i32(low, REG(B11_8), 16);
756 tcg_gen_ext16u_i32(low, low);
757 tcg_gen_or_i32(REG(B11_8), high, low);
758 tcg_temp_free(low);
759 tcg_temp_free(high);
761 return;
762 case 0x300c: /* add Rm,Rn */
763 tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
764 return;
765 case 0x300e: /* addc Rm,Rn */
766 gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
767 return;
768 case 0x300f: /* addv Rm,Rn */
769 gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
770 return;
771 case 0x2009: /* and Rm,Rn */
772 tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
773 return;
774 case 0x3000: /* cmp/eq Rm,Rn */
775 gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
776 return;
777 case 0x3003: /* cmp/ge Rm,Rn */
778 gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
779 return;
780 case 0x3007: /* cmp/gt Rm,Rn */
781 gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
782 return;
783 case 0x3006: /* cmp/hi Rm,Rn */
784 gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
785 return;
786 case 0x3002: /* cmp/hs Rm,Rn */
787 gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
788 return;
789 case 0x200c: /* cmp/str Rm,Rn */
791 int label1 = gen_new_label();
792 int label2 = gen_new_label();
793 TCGv cmp1 = tcg_temp_local_new();
794 TCGv cmp2 = tcg_temp_local_new();
795 tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
796 tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
797 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
798 tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
799 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
800 tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
801 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
802 tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
803 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
804 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
805 tcg_gen_br(label2);
806 gen_set_label(label1);
807 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
808 gen_set_label(label2);
809 tcg_temp_free(cmp2);
810 tcg_temp_free(cmp1);
812 return;
813 case 0x2007: /* div0s Rm,Rn */
815 gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31); /* SR_Q */
816 gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31); /* SR_M */
817 TCGv val = tcg_temp_new();
818 tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
819 gen_copy_bit_i32(cpu_sr, 0, val, 31); /* SR_T */
820 tcg_temp_free(val);
822 return;
823 case 0x3004: /* div1 Rm,Rn */
824 gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
825 return;
826 case 0x300d: /* dmuls.l Rm,Rn */
828 TCGv_i64 tmp1 = tcg_temp_new_i64();
829 TCGv_i64 tmp2 = tcg_temp_new_i64();
831 tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
832 tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
833 tcg_gen_mul_i64(tmp1, tmp1, tmp2);
834 tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
835 tcg_gen_shri_i64(tmp1, tmp1, 32);
836 tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
838 tcg_temp_free_i64(tmp2);
839 tcg_temp_free_i64(tmp1);
841 return;
842 case 0x3005: /* dmulu.l Rm,Rn */
844 TCGv_i64 tmp1 = tcg_temp_new_i64();
845 TCGv_i64 tmp2 = tcg_temp_new_i64();
847 tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
848 tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
849 tcg_gen_mul_i64(tmp1, tmp1, tmp2);
850 tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
851 tcg_gen_shri_i64(tmp1, tmp1, 32);
852 tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
854 tcg_temp_free_i64(tmp2);
855 tcg_temp_free_i64(tmp1);
857 return;
858 case 0x600e: /* exts.b Rm,Rn */
859 tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
860 return;
861 case 0x600f: /* exts.w Rm,Rn */
862 tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
863 return;
864 case 0x600c: /* extu.b Rm,Rn */
865 tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
866 return;
867 case 0x600d: /* extu.w Rm,Rn */
868 tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
869 return;
870 case 0x000f: /* mac.l @Rm+,@Rn+ */
872 TCGv arg0, arg1;
873 arg0 = tcg_temp_new();
874 tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
875 arg1 = tcg_temp_new();
876 tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
877 gen_helper_macl(arg0, arg1);
878 tcg_temp_free(arg1);
879 tcg_temp_free(arg0);
880 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
881 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
883 return;
884 case 0x400f: /* mac.w @Rm+,@Rn+ */
886 TCGv arg0, arg1;
887 arg0 = tcg_temp_new();
888 tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
889 arg1 = tcg_temp_new();
890 tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
891 gen_helper_macw(arg0, arg1);
892 tcg_temp_free(arg1);
893 tcg_temp_free(arg0);
894 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
895 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
897 return;
898 case 0x0007: /* mul.l Rm,Rn */
899 tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
900 return;
901 case 0x200f: /* muls.w Rm,Rn */
903 TCGv arg0, arg1;
904 arg0 = tcg_temp_new();
905 tcg_gen_ext16s_i32(arg0, REG(B7_4));
906 arg1 = tcg_temp_new();
907 tcg_gen_ext16s_i32(arg1, REG(B11_8));
908 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
909 tcg_temp_free(arg1);
910 tcg_temp_free(arg0);
912 return;
913 case 0x200e: /* mulu.w Rm,Rn */
915 TCGv arg0, arg1;
916 arg0 = tcg_temp_new();
917 tcg_gen_ext16u_i32(arg0, REG(B7_4));
918 arg1 = tcg_temp_new();
919 tcg_gen_ext16u_i32(arg1, REG(B11_8));
920 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
921 tcg_temp_free(arg1);
922 tcg_temp_free(arg0);
924 return;
925 case 0x600b: /* neg Rm,Rn */
926 tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
927 return;
928 case 0x600a: /* negc Rm,Rn */
929 gen_helper_negc(REG(B11_8), REG(B7_4));
930 return;
931 case 0x6007: /* not Rm,Rn */
932 tcg_gen_not_i32(REG(B11_8), REG(B7_4));
933 return;
934 case 0x200b: /* or Rm,Rn */
935 tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
936 return;
937 case 0x400c: /* shad Rm,Rn */
939 int label1 = gen_new_label();
940 int label2 = gen_new_label();
941 int label3 = gen_new_label();
942 int label4 = gen_new_label();
943 TCGv shift = tcg_temp_local_new();
944 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
945 /* Rm positive, shift to the left */
946 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
947 tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
948 tcg_gen_br(label4);
949 /* Rm negative, shift to the right */
950 gen_set_label(label1);
951 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
952 tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
953 tcg_gen_not_i32(shift, REG(B7_4));
954 tcg_gen_andi_i32(shift, shift, 0x1f);
955 tcg_gen_addi_i32(shift, shift, 1);
956 tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
957 tcg_gen_br(label4);
958 /* Rm = -32 */
959 gen_set_label(label2);
960 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
961 tcg_gen_movi_i32(REG(B11_8), 0);
962 tcg_gen_br(label4);
963 gen_set_label(label3);
964 tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
965 gen_set_label(label4);
966 tcg_temp_free(shift);
968 return;
969 case 0x400d: /* shld Rm,Rn */
971 int label1 = gen_new_label();
972 int label2 = gen_new_label();
973 int label3 = gen_new_label();
974 TCGv shift = tcg_temp_local_new();
975 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
976 /* Rm positive, shift to the left */
977 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
978 tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
979 tcg_gen_br(label3);
980 /* Rm negative, shift to the right */
981 gen_set_label(label1);
982 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
983 tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
984 tcg_gen_not_i32(shift, REG(B7_4));
985 tcg_gen_andi_i32(shift, shift, 0x1f);
986 tcg_gen_addi_i32(shift, shift, 1);
987 tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
988 tcg_gen_br(label3);
989 /* Rm = -32 */
990 gen_set_label(label2);
991 tcg_gen_movi_i32(REG(B11_8), 0);
992 gen_set_label(label3);
993 tcg_temp_free(shift);
995 return;
996 case 0x3008: /* sub Rm,Rn */
997 tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
998 return;
999 case 0x300a: /* subc Rm,Rn */
1000 gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
1001 return;
1002 case 0x300b: /* subv Rm,Rn */
1003 gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
1004 return;
1005 case 0x2008: /* tst Rm,Rn */
1007 TCGv val = tcg_temp_new();
1008 tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1009 gen_cmp_imm(TCG_COND_EQ, val, 0);
1010 tcg_temp_free(val);
1012 return;
1013 case 0x200a: /* xor Rm,Rn */
1014 tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1015 return;
1016 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1017 CHECK_FPU_ENABLED
1018 if (ctx->fpscr & FPSCR_SZ) {
1019 TCGv_i64 fp = tcg_temp_new_i64();
1020 gen_load_fpr64(fp, XREG(B7_4));
1021 gen_store_fpr64(fp, XREG(B11_8));
1022 tcg_temp_free_i64(fp);
1023 } else {
1024 tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1026 return;
1027 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1028 CHECK_FPU_ENABLED
1029 if (ctx->fpscr & FPSCR_SZ) {
1030 TCGv addr_hi = tcg_temp_new();
1031 int fr = XREG(B7_4);
1032 tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1033 tcg_gen_qemu_st32(cpu_fregs[fr ], REG(B11_8), ctx->memidx);
1034 tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1035 tcg_temp_free(addr_hi);
1036 } else {
1037 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1039 return;
1040 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1041 CHECK_FPU_ENABLED
1042 if (ctx->fpscr & FPSCR_SZ) {
1043 TCGv addr_hi = tcg_temp_new();
1044 int fr = XREG(B11_8);
1045 tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1046 tcg_gen_qemu_ld32u(cpu_fregs[fr ], REG(B7_4), ctx->memidx);
1047 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1048 tcg_temp_free(addr_hi);
1049 } else {
1050 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1052 return;
1053 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1054 CHECK_FPU_ENABLED
1055 if (ctx->fpscr & FPSCR_SZ) {
1056 TCGv addr_hi = tcg_temp_new();
1057 int fr = XREG(B11_8);
1058 tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1059 tcg_gen_qemu_ld32u(cpu_fregs[fr ], REG(B7_4), ctx->memidx);
1060 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1061 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1062 tcg_temp_free(addr_hi);
1063 } else {
1064 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1065 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1067 return;
1068 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1069 CHECK_FPU_ENABLED
1070 if (ctx->fpscr & FPSCR_SZ) {
1071 TCGv addr = tcg_temp_new_i32();
1072 int fr = XREG(B7_4);
1073 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1074 tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1075 tcg_gen_subi_i32(addr, REG(B11_8), 8);
1076 tcg_gen_qemu_st32(cpu_fregs[fr ], addr, ctx->memidx);
1077 tcg_gen_mov_i32(REG(B11_8), addr);
1078 tcg_temp_free(addr);
1079 } else {
1080 TCGv addr;
1081 addr = tcg_temp_new_i32();
1082 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1083 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1084 tcg_temp_free(addr);
1085 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
1087 return;
1088 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1089 CHECK_FPU_ENABLED
1091 TCGv addr = tcg_temp_new_i32();
1092 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1093 if (ctx->fpscr & FPSCR_SZ) {
1094 int fr = XREG(B11_8);
1095 tcg_gen_qemu_ld32u(cpu_fregs[fr ], addr, ctx->memidx);
1096 tcg_gen_addi_i32(addr, addr, 4);
1097 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1098 } else {
1099 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1101 tcg_temp_free(addr);
1103 return;
1104 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1105 CHECK_FPU_ENABLED
1107 TCGv addr = tcg_temp_new();
1108 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1109 if (ctx->fpscr & FPSCR_SZ) {
1110 int fr = XREG(B7_4);
1111 tcg_gen_qemu_ld32u(cpu_fregs[fr ], addr, ctx->memidx);
1112 tcg_gen_addi_i32(addr, addr, 4);
1113 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1114 } else {
1115 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1117 tcg_temp_free(addr);
1119 return;
1120 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1121 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1122 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1123 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1124 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1125 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1127 CHECK_FPU_ENABLED
1128 if (ctx->fpscr & FPSCR_PR) {
1129 TCGv_i64 fp0, fp1;
1131 if (ctx->opcode & 0x0110)
1132 break; /* illegal instruction */
1133 fp0 = tcg_temp_new_i64();
1134 fp1 = tcg_temp_new_i64();
1135 gen_load_fpr64(fp0, DREG(B11_8));
1136 gen_load_fpr64(fp1, DREG(B7_4));
1137 switch (ctx->opcode & 0xf00f) {
1138 case 0xf000: /* fadd Rm,Rn */
1139 gen_helper_fadd_DT(fp0, fp0, fp1);
1140 break;
1141 case 0xf001: /* fsub Rm,Rn */
1142 gen_helper_fsub_DT(fp0, fp0, fp1);
1143 break;
1144 case 0xf002: /* fmul Rm,Rn */
1145 gen_helper_fmul_DT(fp0, fp0, fp1);
1146 break;
1147 case 0xf003: /* fdiv Rm,Rn */
1148 gen_helper_fdiv_DT(fp0, fp0, fp1);
1149 break;
1150 case 0xf004: /* fcmp/eq Rm,Rn */
1151 gen_helper_fcmp_eq_DT(fp0, fp1);
1152 return;
1153 case 0xf005: /* fcmp/gt Rm,Rn */
1154 gen_helper_fcmp_gt_DT(fp0, fp1);
1155 return;
1157 gen_store_fpr64(fp0, DREG(B11_8));
1158 tcg_temp_free_i64(fp0);
1159 tcg_temp_free_i64(fp1);
1160 } else {
1161 switch (ctx->opcode & 0xf00f) {
1162 case 0xf000: /* fadd Rm,Rn */
1163 gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1164 break;
1165 case 0xf001: /* fsub Rm,Rn */
1166 gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1167 break;
1168 case 0xf002: /* fmul Rm,Rn */
1169 gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1170 break;
1171 case 0xf003: /* fdiv Rm,Rn */
1172 gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1173 break;
1174 case 0xf004: /* fcmp/eq Rm,Rn */
1175 gen_helper_fcmp_eq_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1176 return;
1177 case 0xf005: /* fcmp/gt Rm,Rn */
1178 gen_helper_fcmp_gt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1179 return;
1183 return;
1184 case 0xf00e: /* fmac FR0,RM,Rn */
1186 CHECK_FPU_ENABLED
1187 if (ctx->fpscr & FPSCR_PR) {
1188 break; /* illegal instruction */
1189 } else {
1190 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)],
1191 cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], cpu_fregs[FREG(B11_8)]);
1192 return;
1197 switch (ctx->opcode & 0xff00) {
1198 case 0xc900: /* and #imm,R0 */
1199 tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1200 return;
1201 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1203 TCGv addr, val;
1204 addr = tcg_temp_new();
1205 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1206 val = tcg_temp_new();
1207 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1208 tcg_gen_andi_i32(val, val, B7_0);
1209 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1210 tcg_temp_free(val);
1211 tcg_temp_free(addr);
1213 return;
1214 case 0x8b00: /* bf label */
1215 CHECK_NOT_DELAY_SLOT
1216 gen_conditional_jump(ctx, ctx->pc + 2,
1217 ctx->pc + 4 + B7_0s * 2);
1218 ctx->bstate = BS_BRANCH;
1219 return;
1220 case 0x8f00: /* bf/s label */
1221 CHECK_NOT_DELAY_SLOT
1222 gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1223 ctx->flags |= DELAY_SLOT_CONDITIONAL;
1224 return;
1225 case 0x8900: /* bt label */
1226 CHECK_NOT_DELAY_SLOT
1227 gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1228 ctx->pc + 2);
1229 ctx->bstate = BS_BRANCH;
1230 return;
1231 case 0x8d00: /* bt/s label */
1232 CHECK_NOT_DELAY_SLOT
1233 gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1234 ctx->flags |= DELAY_SLOT_CONDITIONAL;
1235 return;
1236 case 0x8800: /* cmp/eq #imm,R0 */
1237 gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1238 return;
1239 case 0xc400: /* mov.b @(disp,GBR),R0 */
1241 TCGv addr = tcg_temp_new();
1242 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1243 tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1244 tcg_temp_free(addr);
1246 return;
1247 case 0xc500: /* mov.w @(disp,GBR),R0 */
1249 TCGv addr = tcg_temp_new();
1250 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1251 tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1252 tcg_temp_free(addr);
1254 return;
1255 case 0xc600: /* mov.l @(disp,GBR),R0 */
1257 TCGv addr = tcg_temp_new();
1258 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1259 tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1260 tcg_temp_free(addr);
1262 return;
1263 case 0xc000: /* mov.b R0,@(disp,GBR) */
1265 TCGv addr = tcg_temp_new();
1266 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1267 tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1268 tcg_temp_free(addr);
1270 return;
1271 case 0xc100: /* mov.w R0,@(disp,GBR) */
1273 TCGv addr = tcg_temp_new();
1274 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1275 tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1276 tcg_temp_free(addr);
1278 return;
1279 case 0xc200: /* mov.l R0,@(disp,GBR) */
1281 TCGv addr = tcg_temp_new();
1282 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1283 tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1284 tcg_temp_free(addr);
1286 return;
1287 case 0x8000: /* mov.b R0,@(disp,Rn) */
1289 TCGv addr = tcg_temp_new();
1290 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1291 tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1292 tcg_temp_free(addr);
1294 return;
1295 case 0x8100: /* mov.w R0,@(disp,Rn) */
1297 TCGv addr = tcg_temp_new();
1298 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1299 tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1300 tcg_temp_free(addr);
1302 return;
1303 case 0x8400: /* mov.b @(disp,Rn),R0 */
1305 TCGv addr = tcg_temp_new();
1306 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1307 tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1308 tcg_temp_free(addr);
1310 return;
1311 case 0x8500: /* mov.w @(disp,Rn),R0 */
1313 TCGv addr = tcg_temp_new();
1314 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1315 tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1316 tcg_temp_free(addr);
1318 return;
1319 case 0xc700: /* mova @(disp,PC),R0 */
1320 tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1321 return;
1322 case 0xcb00: /* or #imm,R0 */
1323 tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1324 return;
1325 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1327 TCGv addr, val;
1328 addr = tcg_temp_new();
1329 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1330 val = tcg_temp_new();
1331 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1332 tcg_gen_ori_i32(val, val, B7_0);
1333 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1334 tcg_temp_free(val);
1335 tcg_temp_free(addr);
1337 return;
1338 case 0xc300: /* trapa #imm */
1340 TCGv imm;
1341 CHECK_NOT_DELAY_SLOT
1342 tcg_gen_movi_i32(cpu_pc, ctx->pc);
1343 imm = tcg_const_i32(B7_0);
1344 gen_helper_trapa(imm);
1345 tcg_temp_free(imm);
1346 ctx->bstate = BS_BRANCH;
1348 return;
1349 case 0xc800: /* tst #imm,R0 */
1351 TCGv val = tcg_temp_new();
1352 tcg_gen_andi_i32(val, REG(0), B7_0);
1353 gen_cmp_imm(TCG_COND_EQ, val, 0);
1354 tcg_temp_free(val);
1356 return;
1357 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1359 TCGv val = tcg_temp_new();
1360 tcg_gen_add_i32(val, REG(0), cpu_gbr);
1361 tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1362 tcg_gen_andi_i32(val, val, B7_0);
1363 gen_cmp_imm(TCG_COND_EQ, val, 0);
1364 tcg_temp_free(val);
1366 return;
1367 case 0xca00: /* xor #imm,R0 */
1368 tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1369 return;
1370 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1372 TCGv addr, val;
1373 addr = tcg_temp_new();
1374 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1375 val = tcg_temp_new();
1376 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1377 tcg_gen_xori_i32(val, val, B7_0);
1378 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1379 tcg_temp_free(val);
1380 tcg_temp_free(addr);
1382 return;
1385 switch (ctx->opcode & 0xf08f) {
1386 case 0x408e: /* ldc Rm,Rn_BANK */
1387 CHECK_PRIVILEGED
1388 tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1389 return;
1390 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1391 CHECK_PRIVILEGED
1392 tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1393 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1394 return;
1395 case 0x0082: /* stc Rm_BANK,Rn */
1396 CHECK_PRIVILEGED
1397 tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1398 return;
1399 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1400 CHECK_PRIVILEGED
1402 TCGv addr = tcg_temp_new();
1403 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1404 tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1405 tcg_temp_free(addr);
1406 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
1408 return;
1411 switch (ctx->opcode & 0xf0ff) {
1412 case 0x0023: /* braf Rn */
1413 CHECK_NOT_DELAY_SLOT
1414 tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1415 ctx->flags |= DELAY_SLOT;
1416 ctx->delayed_pc = (uint32_t) - 1;
1417 return;
1418 case 0x0003: /* bsrf Rn */
1419 CHECK_NOT_DELAY_SLOT
1420 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1421 tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1422 ctx->flags |= DELAY_SLOT;
1423 ctx->delayed_pc = (uint32_t) - 1;
1424 return;
1425 case 0x4015: /* cmp/pl Rn */
1426 gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1427 return;
1428 case 0x4011: /* cmp/pz Rn */
1429 gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1430 return;
1431 case 0x4010: /* dt Rn */
1432 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1433 gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1434 return;
1435 case 0x402b: /* jmp @Rn */
1436 CHECK_NOT_DELAY_SLOT
1437 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1438 ctx->flags |= DELAY_SLOT;
1439 ctx->delayed_pc = (uint32_t) - 1;
1440 return;
1441 case 0x400b: /* jsr @Rn */
1442 CHECK_NOT_DELAY_SLOT
1443 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1444 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1445 ctx->flags |= DELAY_SLOT;
1446 ctx->delayed_pc = (uint32_t) - 1;
1447 return;
1448 case 0x400e: /* ldc Rm,SR */
1449 CHECK_PRIVILEGED
1450 tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1451 ctx->bstate = BS_STOP;
1452 return;
1453 case 0x4007: /* ldc.l @Rm+,SR */
1454 CHECK_PRIVILEGED
1456 TCGv val = tcg_temp_new();
1457 tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1458 tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1459 tcg_temp_free(val);
1460 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1461 ctx->bstate = BS_STOP;
1463 return;
1464 case 0x0002: /* stc SR,Rn */
1465 CHECK_PRIVILEGED
1466 tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1467 return;
1468 case 0x4003: /* stc SR,@-Rn */
1469 CHECK_PRIVILEGED
1471 TCGv addr = tcg_temp_new();
1472 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1473 tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1474 tcg_temp_free(addr);
1475 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
1477 return;
1478 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1479 case ldnum: \
1480 prechk \
1481 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1482 return; \
1483 case ldpnum: \
1484 prechk \
1485 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1486 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1487 return; \
1488 case stnum: \
1489 prechk \
1490 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1491 return; \
1492 case stpnum: \
1493 prechk \
1495 TCGv addr = tcg_temp_new(); \
1496 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1497 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1498 tcg_temp_free(addr); \
1499 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4); \
1501 return;
1502 LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {})
1503 LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1504 LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1505 LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1506 LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1507 LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1508 LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1509 LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {})
1510 LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1511 case 0x406a: /* lds Rm,FPSCR */
1512 CHECK_FPU_ENABLED
1513 gen_helper_ld_fpscr(REG(B11_8));
1514 ctx->bstate = BS_STOP;
1515 return;
1516 case 0x4066: /* lds.l @Rm+,FPSCR */
1517 CHECK_FPU_ENABLED
1519 TCGv addr = tcg_temp_new();
1520 tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1521 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1522 gen_helper_ld_fpscr(addr);
1523 tcg_temp_free(addr);
1524 ctx->bstate = BS_STOP;
1526 return;
1527 case 0x006a: /* sts FPSCR,Rn */
1528 CHECK_FPU_ENABLED
1529 tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1530 return;
1531 case 0x4062: /* sts FPSCR,@-Rn */
1532 CHECK_FPU_ENABLED
1534 TCGv addr, val;
1535 val = tcg_temp_new();
1536 tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1537 addr = tcg_temp_new();
1538 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1539 tcg_gen_qemu_st32(val, addr, ctx->memidx);
1540 tcg_temp_free(addr);
1541 tcg_temp_free(val);
1542 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4);
1544 return;
1545 case 0x00c3: /* movca.l R0,@Rm */
1546 tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1547 return;
1548 case 0x40a9:
1549 /* MOVUA.L @Rm,R0 (Rm) -> R0
1550 Load non-boundary-aligned data */
1551 tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1552 return;
1553 case 0x40e9:
1554 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1555 Load non-boundary-aligned data */
1556 tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1557 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1558 return;
1559 case 0x0029: /* movt Rn */
1560 tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1561 return;
1562 case 0x0093: /* ocbi @Rn */
1564 TCGv dummy = tcg_temp_new();
1565 tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1566 tcg_temp_free(dummy);
1568 return;
1569 case 0x00a3: /* ocbp @Rn */
1571 TCGv dummy = tcg_temp_new();
1572 tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1573 tcg_temp_free(dummy);
1575 return;
1576 case 0x00b3: /* ocbwb @Rn */
1578 TCGv dummy = tcg_temp_new();
1579 tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1580 tcg_temp_free(dummy);
1582 return;
1583 case 0x0083: /* pref @Rn */
1584 return;
1585 case 0x00d3: /* prefi @Rn */
1586 if (ctx->features & SH_FEATURE_SH4A)
1587 return;
1588 else
1589 break;
1590 case 0x00e3: /* icbi @Rn */
1591 if (ctx->features & SH_FEATURE_SH4A)
1592 return;
1593 else
1594 break;
1595 case 0x00ab: /* synco */
1596 if (ctx->features & SH_FEATURE_SH4A)
1597 return;
1598 else
1599 break;
1600 case 0x4024: /* rotcl Rn */
1602 TCGv tmp = tcg_temp_new();
1603 tcg_gen_mov_i32(tmp, cpu_sr);
1604 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1605 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1606 gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1607 tcg_temp_free(tmp);
1609 return;
1610 case 0x4025: /* rotcr Rn */
1612 TCGv tmp = tcg_temp_new();
1613 tcg_gen_mov_i32(tmp, cpu_sr);
1614 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1615 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1616 gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1617 tcg_temp_free(tmp);
1619 return;
1620 case 0x4004: /* rotl Rn */
1621 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1622 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1623 gen_copy_bit_i32(REG(B11_8), 0, cpu_sr, 0);
1624 return;
1625 case 0x4005: /* rotr Rn */
1626 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1627 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1628 gen_copy_bit_i32(REG(B11_8), 31, cpu_sr, 0);
1629 return;
1630 case 0x4000: /* shll Rn */
1631 case 0x4020: /* shal Rn */
1632 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1633 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1634 return;
1635 case 0x4021: /* shar Rn */
1636 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1637 tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1638 return;
1639 case 0x4001: /* shlr Rn */
1640 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1641 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1642 return;
1643 case 0x4008: /* shll2 Rn */
1644 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1645 return;
1646 case 0x4018: /* shll8 Rn */
1647 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1648 return;
1649 case 0x4028: /* shll16 Rn */
1650 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1651 return;
1652 case 0x4009: /* shlr2 Rn */
1653 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1654 return;
1655 case 0x4019: /* shlr8 Rn */
1656 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1657 return;
1658 case 0x4029: /* shlr16 Rn */
1659 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1660 return;
1661 case 0x401b: /* tas.b @Rn */
1663 TCGv addr, val;
1664 addr = tcg_temp_local_new();
1665 tcg_gen_mov_i32(addr, REG(B11_8));
1666 val = tcg_temp_local_new();
1667 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1668 gen_cmp_imm(TCG_COND_EQ, val, 0);
1669 tcg_gen_ori_i32(val, val, 0x80);
1670 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1671 tcg_temp_free(val);
1672 tcg_temp_free(addr);
1674 return;
1675 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1676 CHECK_FPU_ENABLED
1677 tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1678 return;
1679 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1680 CHECK_FPU_ENABLED
1681 tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1682 return;
1683 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1684 CHECK_FPU_ENABLED
1685 if (ctx->fpscr & FPSCR_PR) {
1686 TCGv_i64 fp;
1687 if (ctx->opcode & 0x0100)
1688 break; /* illegal instruction */
1689 fp = tcg_temp_new_i64();
1690 gen_helper_float_DT(fp, cpu_fpul);
1691 gen_store_fpr64(fp, DREG(B11_8));
1692 tcg_temp_free_i64(fp);
1694 else {
1695 gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_fpul);
1697 return;
1698 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1699 CHECK_FPU_ENABLED
1700 if (ctx->fpscr & FPSCR_PR) {
1701 TCGv_i64 fp;
1702 if (ctx->opcode & 0x0100)
1703 break; /* illegal instruction */
1704 fp = tcg_temp_new_i64();
1705 gen_load_fpr64(fp, DREG(B11_8));
1706 gen_helper_ftrc_DT(cpu_fpul, fp);
1707 tcg_temp_free_i64(fp);
1709 else {
1710 gen_helper_ftrc_FT(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1712 return;
1713 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1714 CHECK_FPU_ENABLED
1716 gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1718 return;
1719 case 0xf05d: /* fabs FRn/DRn */
1720 CHECK_FPU_ENABLED
1721 if (ctx->fpscr & FPSCR_PR) {
1722 if (ctx->opcode & 0x0100)
1723 break; /* illegal instruction */
1724 TCGv_i64 fp = tcg_temp_new_i64();
1725 gen_load_fpr64(fp, DREG(B11_8));
1726 gen_helper_fabs_DT(fp, fp);
1727 gen_store_fpr64(fp, DREG(B11_8));
1728 tcg_temp_free_i64(fp);
1729 } else {
1730 gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1732 return;
1733 case 0xf06d: /* fsqrt FRn */
1734 CHECK_FPU_ENABLED
1735 if (ctx->fpscr & FPSCR_PR) {
1736 if (ctx->opcode & 0x0100)
1737 break; /* illegal instruction */
1738 TCGv_i64 fp = tcg_temp_new_i64();
1739 gen_load_fpr64(fp, DREG(B11_8));
1740 gen_helper_fsqrt_DT(fp, fp);
1741 gen_store_fpr64(fp, DREG(B11_8));
1742 tcg_temp_free_i64(fp);
1743 } else {
1744 gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1746 return;
1747 case 0xf07d: /* fsrra FRn */
1748 CHECK_FPU_ENABLED
1749 break;
1750 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1751 CHECK_FPU_ENABLED
1752 if (!(ctx->fpscr & FPSCR_PR)) {
1753 tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1755 return;
1756 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1757 CHECK_FPU_ENABLED
1758 if (!(ctx->fpscr & FPSCR_PR)) {
1759 tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1761 return;
1762 case 0xf0ad: /* fcnvsd FPUL,DRn */
1763 CHECK_FPU_ENABLED
1765 TCGv_i64 fp = tcg_temp_new_i64();
1766 gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
1767 gen_store_fpr64(fp, DREG(B11_8));
1768 tcg_temp_free_i64(fp);
1770 return;
1771 case 0xf0bd: /* fcnvds DRn,FPUL */
1772 CHECK_FPU_ENABLED
1774 TCGv_i64 fp = tcg_temp_new_i64();
1775 gen_load_fpr64(fp, DREG(B11_8));
1776 gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
1777 tcg_temp_free_i64(fp);
1779 return;
1781 #if 0
1782 fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1783 ctx->opcode, ctx->pc);
1784 fflush(stderr);
1785 #endif
1786 gen_helper_raise_illegal_instruction();
1787 ctx->bstate = BS_EXCP;
1790 static void decode_opc(DisasContext * ctx)
1792 uint32_t old_flags = ctx->flags;
1794 _decode_opc(ctx);
1796 if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1797 if (ctx->flags & DELAY_SLOT_CLEARME) {
1798 gen_store_flags(0);
1799 } else {
1800 /* go out of the delay slot */
1801 uint32_t new_flags = ctx->flags;
1802 new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1803 gen_store_flags(new_flags);
1805 ctx->flags = 0;
1806 ctx->bstate = BS_BRANCH;
1807 if (old_flags & DELAY_SLOT_CONDITIONAL) {
1808 gen_delayed_conditional_jump(ctx);
1809 } else if (old_flags & DELAY_SLOT) {
1810 gen_jump(ctx);
1815 /* go into a delay slot */
1816 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1817 gen_store_flags(ctx->flags);
1820 static inline void
1821 gen_intermediate_code_internal(CPUState * env, TranslationBlock * tb,
1822 int search_pc)
1824 DisasContext ctx;
1825 target_ulong pc_start;
1826 static uint16_t *gen_opc_end;
1827 CPUBreakpoint *bp;
1828 int i, ii;
1829 int num_insns;
1830 int max_insns;
1832 pc_start = tb->pc;
1833 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1834 ctx.pc = pc_start;
1835 ctx.flags = (uint32_t)tb->flags;
1836 ctx.bstate = BS_NONE;
1837 ctx.sr = env->sr;
1838 ctx.fpscr = env->fpscr;
1839 ctx.memidx = (env->sr & SR_MD) ? 1 : 0;
1840 /* We don't know if the delayed pc came from a dynamic or static branch,
1841 so assume it is a dynamic branch. */
1842 ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1843 ctx.tb = tb;
1844 ctx.singlestep_enabled = env->singlestep_enabled;
1845 ctx.features = env->features;
1847 #ifdef DEBUG_DISAS
1848 qemu_log_mask(CPU_LOG_TB_CPU,
1849 "------------------------------------------------\n");
1850 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
1851 #endif
1853 ii = -1;
1854 num_insns = 0;
1855 max_insns = tb->cflags & CF_COUNT_MASK;
1856 if (max_insns == 0)
1857 max_insns = CF_COUNT_MASK;
1858 gen_icount_start();
1859 while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1860 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
1861 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
1862 if (ctx.pc == bp->pc) {
1863 /* We have hit a breakpoint - make sure PC is up-to-date */
1864 tcg_gen_movi_i32(cpu_pc, ctx.pc);
1865 gen_helper_debug();
1866 ctx.bstate = BS_EXCP;
1867 break;
1871 if (search_pc) {
1872 i = gen_opc_ptr - gen_opc_buf;
1873 if (ii < i) {
1874 ii++;
1875 while (ii < i)
1876 gen_opc_instr_start[ii++] = 0;
1878 gen_opc_pc[ii] = ctx.pc;
1879 gen_opc_hflags[ii] = ctx.flags;
1880 gen_opc_instr_start[ii] = 1;
1881 gen_opc_icount[ii] = num_insns;
1883 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1884 gen_io_start();
1885 #if 0
1886 fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1887 fflush(stderr);
1888 #endif
1889 ctx.opcode = lduw_code(ctx.pc);
1890 decode_opc(&ctx);
1891 num_insns++;
1892 ctx.pc += 2;
1893 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1894 break;
1895 if (env->singlestep_enabled)
1896 break;
1897 if (num_insns >= max_insns)
1898 break;
1899 #ifdef SH4_SINGLE_STEP
1900 break;
1901 #endif
1903 if (tb->cflags & CF_LAST_IO)
1904 gen_io_end();
1905 if (env->singlestep_enabled) {
1906 tcg_gen_movi_i32(cpu_pc, ctx.pc);
1907 gen_helper_debug();
1908 } else {
1909 switch (ctx.bstate) {
1910 case BS_STOP:
1911 /* gen_op_interrupt_restart(); */
1912 /* fall through */
1913 case BS_NONE:
1914 if (ctx.flags) {
1915 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1917 gen_goto_tb(&ctx, 0, ctx.pc);
1918 break;
1919 case BS_EXCP:
1920 /* gen_op_interrupt_restart(); */
1921 tcg_gen_exit_tb(0);
1922 break;
1923 case BS_BRANCH:
1924 default:
1925 break;
1929 gen_icount_end(tb, num_insns);
1930 *gen_opc_ptr = INDEX_op_end;
1931 if (search_pc) {
1932 i = gen_opc_ptr - gen_opc_buf;
1933 ii++;
1934 while (ii <= i)
1935 gen_opc_instr_start[ii++] = 0;
1936 } else {
1937 tb->size = ctx.pc - pc_start;
1938 tb->icount = num_insns;
1941 #ifdef DEBUG_DISAS
1942 #ifdef SH4_DEBUG_DISAS
1943 qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
1944 #endif
1945 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
1946 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
1947 log_target_disas(pc_start, ctx.pc - pc_start, 0);
1948 qemu_log("\n");
1950 #endif
1953 void gen_intermediate_code(CPUState * env, struct TranslationBlock *tb)
1955 gen_intermediate_code_internal(env, tb, 0);
1958 void gen_intermediate_code_pc(CPUState * env, struct TranslationBlock *tb)
1960 gen_intermediate_code_internal(env, tb, 1);
1963 void gen_pc_load(CPUState *env, TranslationBlock *tb,
1964 unsigned long searched_pc, int pc_pos, void *puc)
1966 env->pc = gen_opc_pc[pc_pos];
1967 env->flags = gen_opc_hflags[pc_pos];