target-sh4: fix fpu disabled/illegal exception
[qemu.git] / target-sh4 / translate.c
blob37915d53150449ac45e6cd3616ccfc6d0e26fa1b
1 /*
2 * SH4 translation
4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 #include <stdarg.h>
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <string.h>
23 #include <inttypes.h>
25 #define DEBUG_DISAS
26 #define SH4_DEBUG_DISAS
27 //#define SH4_SINGLE_STEP
29 #include "cpu.h"
30 #include "exec-all.h"
31 #include "disas.h"
32 #include "tcg-op.h"
33 #include "qemu-common.h"
35 #include "helper.h"
36 #define GEN_HELPER 1
37 #include "helper.h"
39 typedef struct DisasContext {
40 struct TranslationBlock *tb;
41 target_ulong pc;
42 uint32_t sr;
43 uint32_t fpscr;
44 uint16_t opcode;
45 uint32_t flags;
46 int bstate;
47 int memidx;
48 uint32_t delayed_pc;
49 int singlestep_enabled;
50 uint32_t features;
51 int has_movcal;
52 } DisasContext;
54 #if defined(CONFIG_USER_ONLY)
55 #define IS_USER(ctx) 1
56 #else
57 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
58 #endif
60 enum {
61 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
62 * exception condition
64 BS_STOP = 1, /* We want to stop translation for any reason */
65 BS_BRANCH = 2, /* We reached a branch condition */
66 BS_EXCP = 3, /* We reached an exception condition */
69 /* global register indexes */
70 static TCGv_ptr cpu_env;
71 static TCGv cpu_gregs[24];
72 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
73 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
74 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
75 static TCGv cpu_fregs[32];
77 /* internal register indexes */
78 static TCGv cpu_flags, cpu_delayed_pc;
80 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
82 #include "gen-icount.h"
84 static void sh4_translate_init(void)
86 int i;
87 static int done_init = 0;
88 static const char * const gregnames[24] = {
89 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
90 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
91 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
92 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
93 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
95 static const char * const fregnames[32] = {
96 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
97 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
98 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
99 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
100 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
101 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
102 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
103 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
106 if (done_init)
107 return;
109 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
111 for (i = 0; i < 24; i++)
112 cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
113 offsetof(CPUState, gregs[i]),
114 gregnames[i]);
116 cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
117 offsetof(CPUState, pc), "PC");
118 cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
119 offsetof(CPUState, sr), "SR");
120 cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
121 offsetof(CPUState, ssr), "SSR");
122 cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
123 offsetof(CPUState, spc), "SPC");
124 cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
125 offsetof(CPUState, gbr), "GBR");
126 cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
127 offsetof(CPUState, vbr), "VBR");
128 cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
129 offsetof(CPUState, sgr), "SGR");
130 cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
131 offsetof(CPUState, dbr), "DBR");
132 cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
133 offsetof(CPUState, mach), "MACH");
134 cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
135 offsetof(CPUState, macl), "MACL");
136 cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
137 offsetof(CPUState, pr), "PR");
138 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
139 offsetof(CPUState, fpscr), "FPSCR");
140 cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
141 offsetof(CPUState, fpul), "FPUL");
143 cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
144 offsetof(CPUState, flags), "_flags_");
145 cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
146 offsetof(CPUState, delayed_pc),
147 "_delayed_pc_");
148 cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
149 offsetof(CPUState, ldst), "_ldst_");
151 for (i = 0; i < 32; i++)
152 cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
153 offsetof(CPUState, fregs[i]),
154 fregnames[i]);
156 /* register helpers */
157 #define GEN_HELPER 2
158 #include "helper.h"
160 done_init = 1;
163 void cpu_dump_state(CPUState * env, FILE * f,
164 int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
165 int flags)
167 int i;
168 cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
169 env->pc, env->sr, env->pr, env->fpscr);
170 cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
171 env->spc, env->ssr, env->gbr, env->vbr);
172 cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
173 env->sgr, env->dbr, env->delayed_pc, env->fpul);
174 for (i = 0; i < 24; i += 4) {
175 cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
176 i, env->gregs[i], i + 1, env->gregs[i + 1],
177 i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
179 if (env->flags & DELAY_SLOT) {
180 cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
181 env->delayed_pc);
182 } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
183 cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
184 env->delayed_pc);
188 static void cpu_sh4_reset(CPUSH4State * env)
190 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
191 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
192 log_cpu_state(env, 0);
195 #if defined(CONFIG_USER_ONLY)
196 env->sr = 0;
197 #else
198 env->sr = SR_MD | SR_RB | SR_BL | SR_I3 | SR_I2 | SR_I1 | SR_I0;
199 #endif
200 env->vbr = 0;
201 env->pc = 0xA0000000;
202 #if defined(CONFIG_USER_ONLY)
203 env->fpscr = FPSCR_PR; /* value for userspace according to the kernel */
204 set_float_rounding_mode(float_round_nearest_even, &env->fp_status); /* ?! */
205 #else
206 env->fpscr = 0x00040001; /* CPU reset value according to SH4 manual */
207 set_float_rounding_mode(float_round_to_zero, &env->fp_status);
208 #endif
209 env->mmucr = 0;
212 typedef struct {
213 const char *name;
214 int id;
215 uint32_t pvr;
216 uint32_t prr;
217 uint32_t cvr;
218 uint32_t features;
219 } sh4_def_t;
221 static sh4_def_t sh4_defs[] = {
223 .name = "SH7750R",
224 .id = SH_CPU_SH7750R,
225 .pvr = 0x00050000,
226 .prr = 0x00000100,
227 .cvr = 0x00110000,
228 .features = SH_FEATURE_BCR3_AND_BCR4,
229 }, {
230 .name = "SH7751R",
231 .id = SH_CPU_SH7751R,
232 .pvr = 0x04050005,
233 .prr = 0x00000113,
234 .cvr = 0x00110000, /* Neutered caches, should be 0x20480000 */
235 .features = SH_FEATURE_BCR3_AND_BCR4,
236 }, {
237 .name = "SH7785",
238 .id = SH_CPU_SH7785,
239 .pvr = 0x10300700,
240 .prr = 0x00000200,
241 .cvr = 0x71440211,
242 .features = SH_FEATURE_SH4A,
246 static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
248 int i;
250 if (strcasecmp(name, "any") == 0)
251 return &sh4_defs[0];
253 for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
254 if (strcasecmp(name, sh4_defs[i].name) == 0)
255 return &sh4_defs[i];
257 return NULL;
260 void sh4_cpu_list(FILE *f, fprintf_function cpu_fprintf)
262 int i;
264 for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
265 (*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
268 static void cpu_sh4_register(CPUSH4State *env, const sh4_def_t *def)
270 env->pvr = def->pvr;
271 env->prr = def->prr;
272 env->cvr = def->cvr;
273 env->id = def->id;
276 CPUSH4State *cpu_sh4_init(const char *cpu_model)
278 CPUSH4State *env;
279 const sh4_def_t *def;
281 def = cpu_sh4_find_by_name(cpu_model);
282 if (!def)
283 return NULL;
284 env = qemu_mallocz(sizeof(CPUSH4State));
285 env->features = def->features;
286 cpu_exec_init(env);
287 env->movcal_backup_tail = &(env->movcal_backup);
288 sh4_translate_init();
289 env->cpu_model_str = cpu_model;
290 cpu_sh4_reset(env);
291 cpu_sh4_register(env, def);
292 tlb_flush(env, 1);
293 qemu_init_vcpu(env);
294 return env;
297 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
299 TranslationBlock *tb;
300 tb = ctx->tb;
302 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
303 !ctx->singlestep_enabled) {
304 /* Use a direct jump if in same page and singlestep not enabled */
305 tcg_gen_goto_tb(n);
306 tcg_gen_movi_i32(cpu_pc, dest);
307 tcg_gen_exit_tb((long) tb + n);
308 } else {
309 tcg_gen_movi_i32(cpu_pc, dest);
310 if (ctx->singlestep_enabled)
311 gen_helper_debug();
312 tcg_gen_exit_tb(0);
316 static void gen_jump(DisasContext * ctx)
318 if (ctx->delayed_pc == (uint32_t) - 1) {
319 /* Target is not statically known, it comes necessarily from a
320 delayed jump as immediate jump are conditinal jumps */
321 tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
322 if (ctx->singlestep_enabled)
323 gen_helper_debug();
324 tcg_gen_exit_tb(0);
325 } else {
326 gen_goto_tb(ctx, 0, ctx->delayed_pc);
330 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
332 TCGv sr;
333 int label = gen_new_label();
334 tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
335 sr = tcg_temp_new();
336 tcg_gen_andi_i32(sr, cpu_sr, SR_T);
337 tcg_gen_brcondi_i32(TCG_COND_NE, sr, t ? SR_T : 0, label);
338 tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
339 gen_set_label(label);
342 /* Immediate conditional jump (bt or bf) */
343 static void gen_conditional_jump(DisasContext * ctx,
344 target_ulong ift, target_ulong ifnott)
346 int l1;
347 TCGv sr;
349 l1 = gen_new_label();
350 sr = tcg_temp_new();
351 tcg_gen_andi_i32(sr, cpu_sr, SR_T);
352 tcg_gen_brcondi_i32(TCG_COND_EQ, sr, SR_T, l1);
353 gen_goto_tb(ctx, 0, ifnott);
354 gen_set_label(l1);
355 gen_goto_tb(ctx, 1, ift);
358 /* Delayed conditional jump (bt or bf) */
359 static void gen_delayed_conditional_jump(DisasContext * ctx)
361 int l1;
362 TCGv ds;
364 l1 = gen_new_label();
365 ds = tcg_temp_new();
366 tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
367 tcg_gen_brcondi_i32(TCG_COND_EQ, ds, DELAY_SLOT_TRUE, l1);
368 gen_goto_tb(ctx, 1, ctx->pc + 2);
369 gen_set_label(l1);
370 tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
371 gen_jump(ctx);
374 static inline void gen_set_t(void)
376 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
379 static inline void gen_clr_t(void)
381 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
384 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
386 int label1 = gen_new_label();
387 int label2 = gen_new_label();
388 tcg_gen_brcond_i32(cond, t1, t0, label1);
389 gen_clr_t();
390 tcg_gen_br(label2);
391 gen_set_label(label1);
392 gen_set_t();
393 gen_set_label(label2);
396 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
398 int label1 = gen_new_label();
399 int label2 = gen_new_label();
400 tcg_gen_brcondi_i32(cond, t0, imm, label1);
401 gen_clr_t();
402 tcg_gen_br(label2);
403 gen_set_label(label1);
404 gen_set_t();
405 gen_set_label(label2);
408 static inline void gen_store_flags(uint32_t flags)
410 tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
411 tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
414 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
416 TCGv tmp = tcg_temp_new();
418 p0 &= 0x1f;
419 p1 &= 0x1f;
421 tcg_gen_andi_i32(tmp, t1, (1 << p1));
422 tcg_gen_andi_i32(t0, t0, ~(1 << p0));
423 if (p0 < p1)
424 tcg_gen_shri_i32(tmp, tmp, p1 - p0);
425 else if (p0 > p1)
426 tcg_gen_shli_i32(tmp, tmp, p0 - p1);
427 tcg_gen_or_i32(t0, t0, tmp);
429 tcg_temp_free(tmp);
432 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
434 tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
437 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
439 TCGv_i32 tmp = tcg_temp_new_i32();
440 tcg_gen_trunc_i64_i32(tmp, t);
441 tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
442 tcg_gen_shri_i64(t, t, 32);
443 tcg_gen_trunc_i64_i32(tmp, t);
444 tcg_gen_mov_i32(cpu_fregs[reg], tmp);
445 tcg_temp_free_i32(tmp);
448 #define B3_0 (ctx->opcode & 0xf)
449 #define B6_4 ((ctx->opcode >> 4) & 0x7)
450 #define B7_4 ((ctx->opcode >> 4) & 0xf)
451 #define B7_0 (ctx->opcode & 0xff)
452 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
453 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
454 (ctx->opcode & 0xfff))
455 #define B11_8 ((ctx->opcode >> 8) & 0xf)
456 #define B15_12 ((ctx->opcode >> 12) & 0xf)
458 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
459 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
461 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
462 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
464 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
465 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
466 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
467 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
469 #define CHECK_NOT_DELAY_SLOT \
470 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
472 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
473 gen_helper_raise_slot_illegal_instruction(); \
474 ctx->bstate = BS_EXCP; \
475 return; \
478 #define CHECK_PRIVILEGED \
479 if (IS_USER(ctx)) { \
480 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
481 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
482 gen_helper_raise_slot_illegal_instruction(); \
483 } else { \
484 gen_helper_raise_illegal_instruction(); \
486 ctx->bstate = BS_EXCP; \
487 return; \
490 #define CHECK_FPU_ENABLED \
491 if (ctx->flags & SR_FD) { \
492 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
493 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
494 gen_helper_raise_slot_fpu_disable(); \
495 } else { \
496 gen_helper_raise_fpu_disable(); \
498 ctx->bstate = BS_EXCP; \
499 return; \
502 static void _decode_opc(DisasContext * ctx)
504 /* This code tries to make movcal emulation sufficiently
505 accurate for Linux purposes. This instruction writes
506 memory, and prior to that, always allocates a cache line.
507 It is used in two contexts:
508 - in memcpy, where data is copied in blocks, the first write
509 of to a block uses movca.l for performance.
510 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
511 to flush the cache. Here, the data written by movcal.l is never
512 written to memory, and the data written is just bogus.
514 To simulate this, we simulate movcal.l, we store the value to memory,
515 but we also remember the previous content. If we see ocbi, we check
516 if movcal.l for that address was done previously. If so, the write should
517 not have hit the memory, so we restore the previous content.
518 When we see an instruction that is neither movca.l
519 nor ocbi, the previous content is discarded.
521 To optimize, we only try to flush stores when we're at the start of
522 TB, or if we already saw movca.l in this TB and did not flush stores
523 yet. */
524 if (ctx->has_movcal)
526 int opcode = ctx->opcode & 0xf0ff;
527 if (opcode != 0x0093 /* ocbi */
528 && opcode != 0x00c3 /* movca.l */)
530 gen_helper_discard_movcal_backup ();
531 ctx->has_movcal = 0;
535 #if 0
536 fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
537 #endif
539 switch (ctx->opcode) {
540 case 0x0019: /* div0u */
541 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
542 return;
543 case 0x000b: /* rts */
544 CHECK_NOT_DELAY_SLOT
545 tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
546 ctx->flags |= DELAY_SLOT;
547 ctx->delayed_pc = (uint32_t) - 1;
548 return;
549 case 0x0028: /* clrmac */
550 tcg_gen_movi_i32(cpu_mach, 0);
551 tcg_gen_movi_i32(cpu_macl, 0);
552 return;
553 case 0x0048: /* clrs */
554 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
555 return;
556 case 0x0008: /* clrt */
557 gen_clr_t();
558 return;
559 case 0x0038: /* ldtlb */
560 CHECK_PRIVILEGED
561 gen_helper_ldtlb();
562 return;
563 case 0x002b: /* rte */
564 CHECK_PRIVILEGED
565 CHECK_NOT_DELAY_SLOT
566 tcg_gen_mov_i32(cpu_sr, cpu_ssr);
567 tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
568 ctx->flags |= DELAY_SLOT;
569 ctx->delayed_pc = (uint32_t) - 1;
570 return;
571 case 0x0058: /* sets */
572 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
573 return;
574 case 0x0018: /* sett */
575 gen_set_t();
576 return;
577 case 0xfbfd: /* frchg */
578 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
579 ctx->bstate = BS_STOP;
580 return;
581 case 0xf3fd: /* fschg */
582 tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
583 ctx->bstate = BS_STOP;
584 return;
585 case 0x0009: /* nop */
586 return;
587 case 0x001b: /* sleep */
588 CHECK_PRIVILEGED
589 gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
590 return;
593 switch (ctx->opcode & 0xf000) {
594 case 0x1000: /* mov.l Rm,@(disp,Rn) */
596 TCGv addr = tcg_temp_new();
597 tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
598 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
599 tcg_temp_free(addr);
601 return;
602 case 0x5000: /* mov.l @(disp,Rm),Rn */
604 TCGv addr = tcg_temp_new();
605 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
606 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
607 tcg_temp_free(addr);
609 return;
610 case 0xe000: /* mov #imm,Rn */
611 tcg_gen_movi_i32(REG(B11_8), B7_0s);
612 return;
613 case 0x9000: /* mov.w @(disp,PC),Rn */
615 TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
616 tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
617 tcg_temp_free(addr);
619 return;
620 case 0xd000: /* mov.l @(disp,PC),Rn */
622 TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
623 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
624 tcg_temp_free(addr);
626 return;
627 case 0x7000: /* add #imm,Rn */
628 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
629 return;
630 case 0xa000: /* bra disp */
631 CHECK_NOT_DELAY_SLOT
632 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
633 tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
634 ctx->flags |= DELAY_SLOT;
635 return;
636 case 0xb000: /* bsr disp */
637 CHECK_NOT_DELAY_SLOT
638 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
639 ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
640 tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
641 ctx->flags |= DELAY_SLOT;
642 return;
645 switch (ctx->opcode & 0xf00f) {
646 case 0x6003: /* mov Rm,Rn */
647 tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
648 return;
649 case 0x2000: /* mov.b Rm,@Rn */
650 tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
651 return;
652 case 0x2001: /* mov.w Rm,@Rn */
653 tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
654 return;
655 case 0x2002: /* mov.l Rm,@Rn */
656 tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
657 return;
658 case 0x6000: /* mov.b @Rm,Rn */
659 tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
660 return;
661 case 0x6001: /* mov.w @Rm,Rn */
662 tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
663 return;
664 case 0x6002: /* mov.l @Rm,Rn */
665 tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
666 return;
667 case 0x2004: /* mov.b Rm,@-Rn */
669 TCGv addr = tcg_temp_new();
670 tcg_gen_subi_i32(addr, REG(B11_8), 1);
671 tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx); /* might cause re-execution */
672 tcg_gen_mov_i32(REG(B11_8), addr); /* modify register status */
673 tcg_temp_free(addr);
675 return;
676 case 0x2005: /* mov.w Rm,@-Rn */
678 TCGv addr = tcg_temp_new();
679 tcg_gen_subi_i32(addr, REG(B11_8), 2);
680 tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
681 tcg_gen_mov_i32(REG(B11_8), addr);
682 tcg_temp_free(addr);
684 return;
685 case 0x2006: /* mov.l Rm,@-Rn */
687 TCGv addr = tcg_temp_new();
688 tcg_gen_subi_i32(addr, REG(B11_8), 4);
689 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
690 tcg_gen_mov_i32(REG(B11_8), addr);
692 return;
693 case 0x6004: /* mov.b @Rm+,Rn */
694 tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
695 if ( B11_8 != B7_4 )
696 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
697 return;
698 case 0x6005: /* mov.w @Rm+,Rn */
699 tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
700 if ( B11_8 != B7_4 )
701 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
702 return;
703 case 0x6006: /* mov.l @Rm+,Rn */
704 tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
705 if ( B11_8 != B7_4 )
706 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
707 return;
708 case 0x0004: /* mov.b Rm,@(R0,Rn) */
710 TCGv addr = tcg_temp_new();
711 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
712 tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
713 tcg_temp_free(addr);
715 return;
716 case 0x0005: /* mov.w Rm,@(R0,Rn) */
718 TCGv addr = tcg_temp_new();
719 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
720 tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
721 tcg_temp_free(addr);
723 return;
724 case 0x0006: /* mov.l Rm,@(R0,Rn) */
726 TCGv addr = tcg_temp_new();
727 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
728 tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
729 tcg_temp_free(addr);
731 return;
732 case 0x000c: /* mov.b @(R0,Rm),Rn */
734 TCGv addr = tcg_temp_new();
735 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
736 tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
737 tcg_temp_free(addr);
739 return;
740 case 0x000d: /* mov.w @(R0,Rm),Rn */
742 TCGv addr = tcg_temp_new();
743 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
744 tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
745 tcg_temp_free(addr);
747 return;
748 case 0x000e: /* mov.l @(R0,Rm),Rn */
750 TCGv addr = tcg_temp_new();
751 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
752 tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
753 tcg_temp_free(addr);
755 return;
756 case 0x6008: /* swap.b Rm,Rn */
758 TCGv high, low;
759 high = tcg_temp_new();
760 tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
761 low = tcg_temp_new();
762 tcg_gen_ext16u_i32(low, REG(B7_4));
763 tcg_gen_bswap16_i32(low, low);
764 tcg_gen_or_i32(REG(B11_8), high, low);
765 tcg_temp_free(low);
766 tcg_temp_free(high);
768 return;
769 case 0x6009: /* swap.w Rm,Rn */
771 TCGv high, low;
772 high = tcg_temp_new();
773 tcg_gen_shli_i32(high, REG(B7_4), 16);
774 low = tcg_temp_new();
775 tcg_gen_shri_i32(low, REG(B7_4), 16);
776 tcg_gen_ext16u_i32(low, low);
777 tcg_gen_or_i32(REG(B11_8), high, low);
778 tcg_temp_free(low);
779 tcg_temp_free(high);
781 return;
782 case 0x200d: /* xtrct Rm,Rn */
784 TCGv high, low;
785 high = tcg_temp_new();
786 tcg_gen_shli_i32(high, REG(B7_4), 16);
787 low = tcg_temp_new();
788 tcg_gen_shri_i32(low, REG(B11_8), 16);
789 tcg_gen_ext16u_i32(low, low);
790 tcg_gen_or_i32(REG(B11_8), high, low);
791 tcg_temp_free(low);
792 tcg_temp_free(high);
794 return;
795 case 0x300c: /* add Rm,Rn */
796 tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
797 return;
798 case 0x300e: /* addc Rm,Rn */
799 gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
800 return;
801 case 0x300f: /* addv Rm,Rn */
802 gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
803 return;
804 case 0x2009: /* and Rm,Rn */
805 tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
806 return;
807 case 0x3000: /* cmp/eq Rm,Rn */
808 gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
809 return;
810 case 0x3003: /* cmp/ge Rm,Rn */
811 gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
812 return;
813 case 0x3007: /* cmp/gt Rm,Rn */
814 gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
815 return;
816 case 0x3006: /* cmp/hi Rm,Rn */
817 gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
818 return;
819 case 0x3002: /* cmp/hs Rm,Rn */
820 gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
821 return;
822 case 0x200c: /* cmp/str Rm,Rn */
824 int label1 = gen_new_label();
825 int label2 = gen_new_label();
826 TCGv cmp1 = tcg_temp_local_new();
827 TCGv cmp2 = tcg_temp_local_new();
828 tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
829 tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
830 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
831 tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
832 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
833 tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
834 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
835 tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
836 tcg_gen_brcondi_i32(TCG_COND_EQ, cmp2, 0, label1);
837 tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
838 tcg_gen_br(label2);
839 gen_set_label(label1);
840 tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
841 gen_set_label(label2);
842 tcg_temp_free(cmp2);
843 tcg_temp_free(cmp1);
845 return;
846 case 0x2007: /* div0s Rm,Rn */
848 gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31); /* SR_Q */
849 gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31); /* SR_M */
850 TCGv val = tcg_temp_new();
851 tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
852 gen_copy_bit_i32(cpu_sr, 0, val, 31); /* SR_T */
853 tcg_temp_free(val);
855 return;
856 case 0x3004: /* div1 Rm,Rn */
857 gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
858 return;
859 case 0x300d: /* dmuls.l Rm,Rn */
861 TCGv_i64 tmp1 = tcg_temp_new_i64();
862 TCGv_i64 tmp2 = tcg_temp_new_i64();
864 tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
865 tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
866 tcg_gen_mul_i64(tmp1, tmp1, tmp2);
867 tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
868 tcg_gen_shri_i64(tmp1, tmp1, 32);
869 tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
871 tcg_temp_free_i64(tmp2);
872 tcg_temp_free_i64(tmp1);
874 return;
875 case 0x3005: /* dmulu.l Rm,Rn */
877 TCGv_i64 tmp1 = tcg_temp_new_i64();
878 TCGv_i64 tmp2 = tcg_temp_new_i64();
880 tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
881 tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
882 tcg_gen_mul_i64(tmp1, tmp1, tmp2);
883 tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
884 tcg_gen_shri_i64(tmp1, tmp1, 32);
885 tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
887 tcg_temp_free_i64(tmp2);
888 tcg_temp_free_i64(tmp1);
890 return;
891 case 0x600e: /* exts.b Rm,Rn */
892 tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
893 return;
894 case 0x600f: /* exts.w Rm,Rn */
895 tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
896 return;
897 case 0x600c: /* extu.b Rm,Rn */
898 tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
899 return;
900 case 0x600d: /* extu.w Rm,Rn */
901 tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
902 return;
903 case 0x000f: /* mac.l @Rm+,@Rn+ */
905 TCGv arg0, arg1;
906 arg0 = tcg_temp_new();
907 tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
908 arg1 = tcg_temp_new();
909 tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
910 gen_helper_macl(arg0, arg1);
911 tcg_temp_free(arg1);
912 tcg_temp_free(arg0);
913 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
914 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
916 return;
917 case 0x400f: /* mac.w @Rm+,@Rn+ */
919 TCGv arg0, arg1;
920 arg0 = tcg_temp_new();
921 tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
922 arg1 = tcg_temp_new();
923 tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
924 gen_helper_macw(arg0, arg1);
925 tcg_temp_free(arg1);
926 tcg_temp_free(arg0);
927 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
928 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
930 return;
931 case 0x0007: /* mul.l Rm,Rn */
932 tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
933 return;
934 case 0x200f: /* muls.w Rm,Rn */
936 TCGv arg0, arg1;
937 arg0 = tcg_temp_new();
938 tcg_gen_ext16s_i32(arg0, REG(B7_4));
939 arg1 = tcg_temp_new();
940 tcg_gen_ext16s_i32(arg1, REG(B11_8));
941 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
942 tcg_temp_free(arg1);
943 tcg_temp_free(arg0);
945 return;
946 case 0x200e: /* mulu.w Rm,Rn */
948 TCGv arg0, arg1;
949 arg0 = tcg_temp_new();
950 tcg_gen_ext16u_i32(arg0, REG(B7_4));
951 arg1 = tcg_temp_new();
952 tcg_gen_ext16u_i32(arg1, REG(B11_8));
953 tcg_gen_mul_i32(cpu_macl, arg0, arg1);
954 tcg_temp_free(arg1);
955 tcg_temp_free(arg0);
957 return;
958 case 0x600b: /* neg Rm,Rn */
959 tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
960 return;
961 case 0x600a: /* negc Rm,Rn */
962 gen_helper_negc(REG(B11_8), REG(B7_4));
963 return;
964 case 0x6007: /* not Rm,Rn */
965 tcg_gen_not_i32(REG(B11_8), REG(B7_4));
966 return;
967 case 0x200b: /* or Rm,Rn */
968 tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
969 return;
970 case 0x400c: /* shad Rm,Rn */
972 int label1 = gen_new_label();
973 int label2 = gen_new_label();
974 int label3 = gen_new_label();
975 int label4 = gen_new_label();
976 TCGv shift;
977 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
978 /* Rm positive, shift to the left */
979 shift = tcg_temp_new();
980 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
981 tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
982 tcg_temp_free(shift);
983 tcg_gen_br(label4);
984 /* Rm negative, shift to the right */
985 gen_set_label(label1);
986 shift = tcg_temp_new();
987 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
988 tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
989 tcg_gen_not_i32(shift, REG(B7_4));
990 tcg_gen_andi_i32(shift, shift, 0x1f);
991 tcg_gen_addi_i32(shift, shift, 1);
992 tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
993 tcg_temp_free(shift);
994 tcg_gen_br(label4);
995 /* Rm = -32 */
996 gen_set_label(label2);
997 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
998 tcg_gen_movi_i32(REG(B11_8), 0);
999 tcg_gen_br(label4);
1000 gen_set_label(label3);
1001 tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
1002 gen_set_label(label4);
1004 return;
1005 case 0x400d: /* shld Rm,Rn */
1007 int label1 = gen_new_label();
1008 int label2 = gen_new_label();
1009 int label3 = gen_new_label();
1010 TCGv shift;
1011 tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
1012 /* Rm positive, shift to the left */
1013 shift = tcg_temp_new();
1014 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1015 tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
1016 tcg_temp_free(shift);
1017 tcg_gen_br(label3);
1018 /* Rm negative, shift to the right */
1019 gen_set_label(label1);
1020 shift = tcg_temp_new();
1021 tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1022 tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
1023 tcg_gen_not_i32(shift, REG(B7_4));
1024 tcg_gen_andi_i32(shift, shift, 0x1f);
1025 tcg_gen_addi_i32(shift, shift, 1);
1026 tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
1027 tcg_temp_free(shift);
1028 tcg_gen_br(label3);
1029 /* Rm = -32 */
1030 gen_set_label(label2);
1031 tcg_gen_movi_i32(REG(B11_8), 0);
1032 gen_set_label(label3);
1034 return;
1035 case 0x3008: /* sub Rm,Rn */
1036 tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1037 return;
1038 case 0x300a: /* subc Rm,Rn */
1039 gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
1040 return;
1041 case 0x300b: /* subv Rm,Rn */
1042 gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
1043 return;
1044 case 0x2008: /* tst Rm,Rn */
1046 TCGv val = tcg_temp_new();
1047 tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1048 gen_cmp_imm(TCG_COND_EQ, val, 0);
1049 tcg_temp_free(val);
1051 return;
1052 case 0x200a: /* xor Rm,Rn */
1053 tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1054 return;
1055 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1056 CHECK_FPU_ENABLED
1057 if (ctx->fpscr & FPSCR_SZ) {
1058 TCGv_i64 fp = tcg_temp_new_i64();
1059 gen_load_fpr64(fp, XREG(B7_4));
1060 gen_store_fpr64(fp, XREG(B11_8));
1061 tcg_temp_free_i64(fp);
1062 } else {
1063 tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1065 return;
1066 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1067 CHECK_FPU_ENABLED
1068 if (ctx->fpscr & FPSCR_SZ) {
1069 TCGv addr_hi = tcg_temp_new();
1070 int fr = XREG(B7_4);
1071 tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1072 tcg_gen_qemu_st32(cpu_fregs[fr ], REG(B11_8), ctx->memidx);
1073 tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1074 tcg_temp_free(addr_hi);
1075 } else {
1076 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1078 return;
1079 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1080 CHECK_FPU_ENABLED
1081 if (ctx->fpscr & FPSCR_SZ) {
1082 TCGv addr_hi = tcg_temp_new();
1083 int fr = XREG(B11_8);
1084 tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1085 tcg_gen_qemu_ld32u(cpu_fregs[fr ], REG(B7_4), ctx->memidx);
1086 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1087 tcg_temp_free(addr_hi);
1088 } else {
1089 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1091 return;
1092 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1093 CHECK_FPU_ENABLED
1094 if (ctx->fpscr & FPSCR_SZ) {
1095 TCGv addr_hi = tcg_temp_new();
1096 int fr = XREG(B11_8);
1097 tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1098 tcg_gen_qemu_ld32u(cpu_fregs[fr ], REG(B7_4), ctx->memidx);
1099 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi, ctx->memidx);
1100 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1101 tcg_temp_free(addr_hi);
1102 } else {
1103 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1104 tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1106 return;
1107 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1108 CHECK_FPU_ENABLED
1109 if (ctx->fpscr & FPSCR_SZ) {
1110 TCGv addr = tcg_temp_new_i32();
1111 int fr = XREG(B7_4);
1112 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1113 tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1114 tcg_gen_subi_i32(addr, addr, 4);
1115 tcg_gen_qemu_st32(cpu_fregs[fr ], addr, ctx->memidx);
1116 tcg_gen_mov_i32(REG(B11_8), addr);
1117 tcg_temp_free(addr);
1118 } else {
1119 TCGv addr;
1120 addr = tcg_temp_new_i32();
1121 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1122 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1123 tcg_gen_mov_i32(REG(B11_8), addr);
1124 tcg_temp_free(addr);
1126 return;
1127 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1128 CHECK_FPU_ENABLED
1130 TCGv addr = tcg_temp_new_i32();
1131 tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1132 if (ctx->fpscr & FPSCR_SZ) {
1133 int fr = XREG(B11_8);
1134 tcg_gen_qemu_ld32u(cpu_fregs[fr ], addr, ctx->memidx);
1135 tcg_gen_addi_i32(addr, addr, 4);
1136 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1137 } else {
1138 tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1140 tcg_temp_free(addr);
1142 return;
1143 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1144 CHECK_FPU_ENABLED
1146 TCGv addr = tcg_temp_new();
1147 tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1148 if (ctx->fpscr & FPSCR_SZ) {
1149 int fr = XREG(B7_4);
1150 tcg_gen_qemu_ld32u(cpu_fregs[fr ], addr, ctx->memidx);
1151 tcg_gen_addi_i32(addr, addr, 4);
1152 tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1153 } else {
1154 tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1156 tcg_temp_free(addr);
1158 return;
1159 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1160 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1161 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1163 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1164 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1166 CHECK_FPU_ENABLED
1167 if (ctx->fpscr & FPSCR_PR) {
1168 TCGv_i64 fp0, fp1;
1170 if (ctx->opcode & 0x0110)
1171 break; /* illegal instruction */
1172 fp0 = tcg_temp_new_i64();
1173 fp1 = tcg_temp_new_i64();
1174 gen_load_fpr64(fp0, DREG(B11_8));
1175 gen_load_fpr64(fp1, DREG(B7_4));
1176 switch (ctx->opcode & 0xf00f) {
1177 case 0xf000: /* fadd Rm,Rn */
1178 gen_helper_fadd_DT(fp0, fp0, fp1);
1179 break;
1180 case 0xf001: /* fsub Rm,Rn */
1181 gen_helper_fsub_DT(fp0, fp0, fp1);
1182 break;
1183 case 0xf002: /* fmul Rm,Rn */
1184 gen_helper_fmul_DT(fp0, fp0, fp1);
1185 break;
1186 case 0xf003: /* fdiv Rm,Rn */
1187 gen_helper_fdiv_DT(fp0, fp0, fp1);
1188 break;
1189 case 0xf004: /* fcmp/eq Rm,Rn */
1190 gen_helper_fcmp_eq_DT(fp0, fp1);
1191 return;
1192 case 0xf005: /* fcmp/gt Rm,Rn */
1193 gen_helper_fcmp_gt_DT(fp0, fp1);
1194 return;
1196 gen_store_fpr64(fp0, DREG(B11_8));
1197 tcg_temp_free_i64(fp0);
1198 tcg_temp_free_i64(fp1);
1199 } else {
1200 switch (ctx->opcode & 0xf00f) {
1201 case 0xf000: /* fadd Rm,Rn */
1202 gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1203 break;
1204 case 0xf001: /* fsub Rm,Rn */
1205 gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1206 break;
1207 case 0xf002: /* fmul Rm,Rn */
1208 gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1209 break;
1210 case 0xf003: /* fdiv Rm,Rn */
1211 gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1212 break;
1213 case 0xf004: /* fcmp/eq Rm,Rn */
1214 gen_helper_fcmp_eq_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1215 return;
1216 case 0xf005: /* fcmp/gt Rm,Rn */
1217 gen_helper_fcmp_gt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1218 return;
1222 return;
1223 case 0xf00e: /* fmac FR0,RM,Rn */
1225 CHECK_FPU_ENABLED
1226 if (ctx->fpscr & FPSCR_PR) {
1227 break; /* illegal instruction */
1228 } else {
1229 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)],
1230 cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], cpu_fregs[FREG(B11_8)]);
1231 return;
1236 switch (ctx->opcode & 0xff00) {
1237 case 0xc900: /* and #imm,R0 */
1238 tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1239 return;
1240 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1242 TCGv addr, val;
1243 addr = tcg_temp_new();
1244 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1245 val = tcg_temp_new();
1246 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1247 tcg_gen_andi_i32(val, val, B7_0);
1248 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1249 tcg_temp_free(val);
1250 tcg_temp_free(addr);
1252 return;
1253 case 0x8b00: /* bf label */
1254 CHECK_NOT_DELAY_SLOT
1255 gen_conditional_jump(ctx, ctx->pc + 2,
1256 ctx->pc + 4 + B7_0s * 2);
1257 ctx->bstate = BS_BRANCH;
1258 return;
1259 case 0x8f00: /* bf/s label */
1260 CHECK_NOT_DELAY_SLOT
1261 gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1262 ctx->flags |= DELAY_SLOT_CONDITIONAL;
1263 return;
1264 case 0x8900: /* bt label */
1265 CHECK_NOT_DELAY_SLOT
1266 gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1267 ctx->pc + 2);
1268 ctx->bstate = BS_BRANCH;
1269 return;
1270 case 0x8d00: /* bt/s label */
1271 CHECK_NOT_DELAY_SLOT
1272 gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1273 ctx->flags |= DELAY_SLOT_CONDITIONAL;
1274 return;
1275 case 0x8800: /* cmp/eq #imm,R0 */
1276 gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1277 return;
1278 case 0xc400: /* mov.b @(disp,GBR),R0 */
1280 TCGv addr = tcg_temp_new();
1281 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1282 tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1283 tcg_temp_free(addr);
1285 return;
1286 case 0xc500: /* mov.w @(disp,GBR),R0 */
1288 TCGv addr = tcg_temp_new();
1289 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1290 tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1291 tcg_temp_free(addr);
1293 return;
1294 case 0xc600: /* mov.l @(disp,GBR),R0 */
1296 TCGv addr = tcg_temp_new();
1297 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1298 tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1299 tcg_temp_free(addr);
1301 return;
1302 case 0xc000: /* mov.b R0,@(disp,GBR) */
1304 TCGv addr = tcg_temp_new();
1305 tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1306 tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1307 tcg_temp_free(addr);
1309 return;
1310 case 0xc100: /* mov.w R0,@(disp,GBR) */
1312 TCGv addr = tcg_temp_new();
1313 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1314 tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1315 tcg_temp_free(addr);
1317 return;
1318 case 0xc200: /* mov.l R0,@(disp,GBR) */
1320 TCGv addr = tcg_temp_new();
1321 tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1322 tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1323 tcg_temp_free(addr);
1325 return;
1326 case 0x8000: /* mov.b R0,@(disp,Rn) */
1328 TCGv addr = tcg_temp_new();
1329 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1330 tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1331 tcg_temp_free(addr);
1333 return;
1334 case 0x8100: /* mov.w R0,@(disp,Rn) */
1336 TCGv addr = tcg_temp_new();
1337 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1338 tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1339 tcg_temp_free(addr);
1341 return;
1342 case 0x8400: /* mov.b @(disp,Rn),R0 */
1344 TCGv addr = tcg_temp_new();
1345 tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1346 tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1347 tcg_temp_free(addr);
1349 return;
1350 case 0x8500: /* mov.w @(disp,Rn),R0 */
1352 TCGv addr = tcg_temp_new();
1353 tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1354 tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1355 tcg_temp_free(addr);
1357 return;
1358 case 0xc700: /* mova @(disp,PC),R0 */
1359 tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1360 return;
1361 case 0xcb00: /* or #imm,R0 */
1362 tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1363 return;
1364 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1366 TCGv addr, val;
1367 addr = tcg_temp_new();
1368 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1369 val = tcg_temp_new();
1370 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1371 tcg_gen_ori_i32(val, val, B7_0);
1372 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1373 tcg_temp_free(val);
1374 tcg_temp_free(addr);
1376 return;
1377 case 0xc300: /* trapa #imm */
1379 TCGv imm;
1380 CHECK_NOT_DELAY_SLOT
1381 tcg_gen_movi_i32(cpu_pc, ctx->pc);
1382 imm = tcg_const_i32(B7_0);
1383 gen_helper_trapa(imm);
1384 tcg_temp_free(imm);
1385 ctx->bstate = BS_BRANCH;
1387 return;
1388 case 0xc800: /* tst #imm,R0 */
1390 TCGv val = tcg_temp_new();
1391 tcg_gen_andi_i32(val, REG(0), B7_0);
1392 gen_cmp_imm(TCG_COND_EQ, val, 0);
1393 tcg_temp_free(val);
1395 return;
1396 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1398 TCGv val = tcg_temp_new();
1399 tcg_gen_add_i32(val, REG(0), cpu_gbr);
1400 tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1401 tcg_gen_andi_i32(val, val, B7_0);
1402 gen_cmp_imm(TCG_COND_EQ, val, 0);
1403 tcg_temp_free(val);
1405 return;
1406 case 0xca00: /* xor #imm,R0 */
1407 tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1408 return;
1409 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1411 TCGv addr, val;
1412 addr = tcg_temp_new();
1413 tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1414 val = tcg_temp_new();
1415 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1416 tcg_gen_xori_i32(val, val, B7_0);
1417 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1418 tcg_temp_free(val);
1419 tcg_temp_free(addr);
1421 return;
1424 switch (ctx->opcode & 0xf08f) {
1425 case 0x408e: /* ldc Rm,Rn_BANK */
1426 CHECK_PRIVILEGED
1427 tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1428 return;
1429 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1430 CHECK_PRIVILEGED
1431 tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1432 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1433 return;
1434 case 0x0082: /* stc Rm_BANK,Rn */
1435 CHECK_PRIVILEGED
1436 tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1437 return;
1438 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1439 CHECK_PRIVILEGED
1441 TCGv addr = tcg_temp_new();
1442 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1443 tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1444 tcg_gen_mov_i32(REG(B11_8), addr);
1445 tcg_temp_free(addr);
1447 return;
1450 switch (ctx->opcode & 0xf0ff) {
1451 case 0x0023: /* braf Rn */
1452 CHECK_NOT_DELAY_SLOT
1453 tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1454 ctx->flags |= DELAY_SLOT;
1455 ctx->delayed_pc = (uint32_t) - 1;
1456 return;
1457 case 0x0003: /* bsrf Rn */
1458 CHECK_NOT_DELAY_SLOT
1459 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1460 tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1461 ctx->flags |= DELAY_SLOT;
1462 ctx->delayed_pc = (uint32_t) - 1;
1463 return;
1464 case 0x4015: /* cmp/pl Rn */
1465 gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1466 return;
1467 case 0x4011: /* cmp/pz Rn */
1468 gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1469 return;
1470 case 0x4010: /* dt Rn */
1471 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1472 gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1473 return;
1474 case 0x402b: /* jmp @Rn */
1475 CHECK_NOT_DELAY_SLOT
1476 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1477 ctx->flags |= DELAY_SLOT;
1478 ctx->delayed_pc = (uint32_t) - 1;
1479 return;
1480 case 0x400b: /* jsr @Rn */
1481 CHECK_NOT_DELAY_SLOT
1482 tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1483 tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1484 ctx->flags |= DELAY_SLOT;
1485 ctx->delayed_pc = (uint32_t) - 1;
1486 return;
1487 case 0x400e: /* ldc Rm,SR */
1488 CHECK_PRIVILEGED
1489 tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1490 ctx->bstate = BS_STOP;
1491 return;
1492 case 0x4007: /* ldc.l @Rm+,SR */
1493 CHECK_PRIVILEGED
1495 TCGv val = tcg_temp_new();
1496 tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1497 tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1498 tcg_temp_free(val);
1499 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1500 ctx->bstate = BS_STOP;
1502 return;
1503 case 0x0002: /* stc SR,Rn */
1504 CHECK_PRIVILEGED
1505 tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1506 return;
1507 case 0x4003: /* stc SR,@-Rn */
1508 CHECK_PRIVILEGED
1510 TCGv addr = tcg_temp_new();
1511 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1512 tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1513 tcg_gen_mov_i32(REG(B11_8), addr);
1514 tcg_temp_free(addr);
1516 return;
1517 #define LD(reg,ldnum,ldpnum,prechk) \
1518 case ldnum: \
1519 prechk \
1520 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1521 return; \
1522 case ldpnum: \
1523 prechk \
1524 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1525 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1526 return;
1527 #define ST(reg,stnum,stpnum,prechk) \
1528 case stnum: \
1529 prechk \
1530 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1531 return; \
1532 case stpnum: \
1533 prechk \
1535 TCGv addr = tcg_temp_new(); \
1536 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1537 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1538 tcg_gen_mov_i32(REG(B11_8), addr); \
1539 tcg_temp_free(addr); \
1541 return;
1542 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1543 LD(reg,ldnum,ldpnum,prechk) \
1544 ST(reg,stnum,stpnum,prechk)
1545 LDST(gbr, 0x401e, 0x4017, 0x0012, 0x4013, {})
1546 LDST(vbr, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1547 LDST(ssr, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1548 LDST(spc, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1549 ST(sgr, 0x003a, 0x4032, CHECK_PRIVILEGED)
1550 LD(sgr, 0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1551 LDST(dbr, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1552 LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1553 LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1554 LDST(pr, 0x402a, 0x4026, 0x002a, 0x4022, {})
1555 LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1556 case 0x406a: /* lds Rm,FPSCR */
1557 CHECK_FPU_ENABLED
1558 gen_helper_ld_fpscr(REG(B11_8));
1559 ctx->bstate = BS_STOP;
1560 return;
1561 case 0x4066: /* lds.l @Rm+,FPSCR */
1562 CHECK_FPU_ENABLED
1564 TCGv addr = tcg_temp_new();
1565 tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1566 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1567 gen_helper_ld_fpscr(addr);
1568 tcg_temp_free(addr);
1569 ctx->bstate = BS_STOP;
1571 return;
1572 case 0x006a: /* sts FPSCR,Rn */
1573 CHECK_FPU_ENABLED
1574 tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1575 return;
1576 case 0x4062: /* sts FPSCR,@-Rn */
1577 CHECK_FPU_ENABLED
1579 TCGv addr, val;
1580 val = tcg_temp_new();
1581 tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1582 addr = tcg_temp_new();
1583 tcg_gen_subi_i32(addr, REG(B11_8), 4);
1584 tcg_gen_qemu_st32(val, addr, ctx->memidx);
1585 tcg_gen_mov_i32(REG(B11_8), addr);
1586 tcg_temp_free(addr);
1587 tcg_temp_free(val);
1589 return;
1590 case 0x00c3: /* movca.l R0,@Rm */
1592 TCGv val = tcg_temp_new();
1593 tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1594 gen_helper_movcal (REG(B11_8), val);
1595 tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1597 ctx->has_movcal = 1;
1598 return;
1599 case 0x40a9:
1600 /* MOVUA.L @Rm,R0 (Rm) -> R0
1601 Load non-boundary-aligned data */
1602 tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1603 return;
1604 case 0x40e9:
1605 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1606 Load non-boundary-aligned data */
1607 tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1608 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1609 return;
1610 case 0x0029: /* movt Rn */
1611 tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1612 return;
1613 case 0x0073:
1614 /* MOVCO.L
1615 LDST -> T
1616 If (T == 1) R0 -> (Rn)
1617 0 -> LDST
1619 if (ctx->features & SH_FEATURE_SH4A) {
1620 int label = gen_new_label();
1621 gen_clr_t();
1622 tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1623 tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1624 tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1625 gen_set_label(label);
1626 tcg_gen_movi_i32(cpu_ldst, 0);
1627 return;
1628 } else
1629 break;
1630 case 0x0063:
1631 /* MOVLI.L @Rm,R0
1632 1 -> LDST
1633 (Rm) -> R0
1634 When interrupt/exception
1635 occurred 0 -> LDST
1637 if (ctx->features & SH_FEATURE_SH4A) {
1638 tcg_gen_movi_i32(cpu_ldst, 0);
1639 tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1640 tcg_gen_movi_i32(cpu_ldst, 1);
1641 return;
1642 } else
1643 break;
1644 case 0x0093: /* ocbi @Rn */
1646 gen_helper_ocbi (REG(B11_8));
1648 return;
1649 case 0x00a3: /* ocbp @Rn */
1651 TCGv dummy = tcg_temp_new();
1652 tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1653 tcg_temp_free(dummy);
1655 return;
1656 case 0x00b3: /* ocbwb @Rn */
1658 TCGv dummy = tcg_temp_new();
1659 tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1660 tcg_temp_free(dummy);
1662 return;
1663 case 0x0083: /* pref @Rn */
1664 return;
1665 case 0x00d3: /* prefi @Rn */
1666 if (ctx->features & SH_FEATURE_SH4A)
1667 return;
1668 else
1669 break;
1670 case 0x00e3: /* icbi @Rn */
1671 if (ctx->features & SH_FEATURE_SH4A)
1672 return;
1673 else
1674 break;
1675 case 0x00ab: /* synco */
1676 if (ctx->features & SH_FEATURE_SH4A)
1677 return;
1678 else
1679 break;
1680 case 0x4024: /* rotcl Rn */
1682 TCGv tmp = tcg_temp_new();
1683 tcg_gen_mov_i32(tmp, cpu_sr);
1684 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1685 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1686 gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1687 tcg_temp_free(tmp);
1689 return;
1690 case 0x4025: /* rotcr Rn */
1692 TCGv tmp = tcg_temp_new();
1693 tcg_gen_mov_i32(tmp, cpu_sr);
1694 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1695 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1696 gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1697 tcg_temp_free(tmp);
1699 return;
1700 case 0x4004: /* rotl Rn */
1701 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1702 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1703 gen_copy_bit_i32(REG(B11_8), 0, cpu_sr, 0);
1704 return;
1705 case 0x4005: /* rotr Rn */
1706 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1707 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1708 gen_copy_bit_i32(REG(B11_8), 31, cpu_sr, 0);
1709 return;
1710 case 0x4000: /* shll Rn */
1711 case 0x4020: /* shal Rn */
1712 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1713 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1714 return;
1715 case 0x4021: /* shar Rn */
1716 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1717 tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1718 return;
1719 case 0x4001: /* shlr Rn */
1720 gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1721 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1722 return;
1723 case 0x4008: /* shll2 Rn */
1724 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1725 return;
1726 case 0x4018: /* shll8 Rn */
1727 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1728 return;
1729 case 0x4028: /* shll16 Rn */
1730 tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1731 return;
1732 case 0x4009: /* shlr2 Rn */
1733 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1734 return;
1735 case 0x4019: /* shlr8 Rn */
1736 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1737 return;
1738 case 0x4029: /* shlr16 Rn */
1739 tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1740 return;
1741 case 0x401b: /* tas.b @Rn */
1743 TCGv addr, val;
1744 addr = tcg_temp_local_new();
1745 tcg_gen_mov_i32(addr, REG(B11_8));
1746 val = tcg_temp_local_new();
1747 tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1748 gen_cmp_imm(TCG_COND_EQ, val, 0);
1749 tcg_gen_ori_i32(val, val, 0x80);
1750 tcg_gen_qemu_st8(val, addr, ctx->memidx);
1751 tcg_temp_free(val);
1752 tcg_temp_free(addr);
1754 return;
1755 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1756 CHECK_FPU_ENABLED
1757 tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1758 return;
1759 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1760 CHECK_FPU_ENABLED
1761 tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1762 return;
1763 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1764 CHECK_FPU_ENABLED
1765 if (ctx->fpscr & FPSCR_PR) {
1766 TCGv_i64 fp;
1767 if (ctx->opcode & 0x0100)
1768 break; /* illegal instruction */
1769 fp = tcg_temp_new_i64();
1770 gen_helper_float_DT(fp, cpu_fpul);
1771 gen_store_fpr64(fp, DREG(B11_8));
1772 tcg_temp_free_i64(fp);
1774 else {
1775 gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_fpul);
1777 return;
1778 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1779 CHECK_FPU_ENABLED
1780 if (ctx->fpscr & FPSCR_PR) {
1781 TCGv_i64 fp;
1782 if (ctx->opcode & 0x0100)
1783 break; /* illegal instruction */
1784 fp = tcg_temp_new_i64();
1785 gen_load_fpr64(fp, DREG(B11_8));
1786 gen_helper_ftrc_DT(cpu_fpul, fp);
1787 tcg_temp_free_i64(fp);
1789 else {
1790 gen_helper_ftrc_FT(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1792 return;
1793 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1794 CHECK_FPU_ENABLED
1796 gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1798 return;
1799 case 0xf05d: /* fabs FRn/DRn */
1800 CHECK_FPU_ENABLED
1801 if (ctx->fpscr & FPSCR_PR) {
1802 if (ctx->opcode & 0x0100)
1803 break; /* illegal instruction */
1804 TCGv_i64 fp = tcg_temp_new_i64();
1805 gen_load_fpr64(fp, DREG(B11_8));
1806 gen_helper_fabs_DT(fp, fp);
1807 gen_store_fpr64(fp, DREG(B11_8));
1808 tcg_temp_free_i64(fp);
1809 } else {
1810 gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1812 return;
1813 case 0xf06d: /* fsqrt FRn */
1814 CHECK_FPU_ENABLED
1815 if (ctx->fpscr & FPSCR_PR) {
1816 if (ctx->opcode & 0x0100)
1817 break; /* illegal instruction */
1818 TCGv_i64 fp = tcg_temp_new_i64();
1819 gen_load_fpr64(fp, DREG(B11_8));
1820 gen_helper_fsqrt_DT(fp, fp);
1821 gen_store_fpr64(fp, DREG(B11_8));
1822 tcg_temp_free_i64(fp);
1823 } else {
1824 gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1826 return;
1827 case 0xf07d: /* fsrra FRn */
1828 CHECK_FPU_ENABLED
1829 break;
1830 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1831 CHECK_FPU_ENABLED
1832 if (!(ctx->fpscr & FPSCR_PR)) {
1833 tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1835 return;
1836 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1837 CHECK_FPU_ENABLED
1838 if (!(ctx->fpscr & FPSCR_PR)) {
1839 tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1841 return;
1842 case 0xf0ad: /* fcnvsd FPUL,DRn */
1843 CHECK_FPU_ENABLED
1845 TCGv_i64 fp = tcg_temp_new_i64();
1846 gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
1847 gen_store_fpr64(fp, DREG(B11_8));
1848 tcg_temp_free_i64(fp);
1850 return;
1851 case 0xf0bd: /* fcnvds DRn,FPUL */
1852 CHECK_FPU_ENABLED
1854 TCGv_i64 fp = tcg_temp_new_i64();
1855 gen_load_fpr64(fp, DREG(B11_8));
1856 gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
1857 tcg_temp_free_i64(fp);
1859 return;
1861 #if 0
1862 fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1863 ctx->opcode, ctx->pc);
1864 fflush(stderr);
1865 #endif
1866 tcg_gen_movi_i32(cpu_pc, ctx->pc);
1867 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1868 gen_helper_raise_slot_illegal_instruction();
1869 } else {
1870 gen_helper_raise_illegal_instruction();
1872 ctx->bstate = BS_EXCP;
1875 static void decode_opc(DisasContext * ctx)
1877 uint32_t old_flags = ctx->flags;
1879 _decode_opc(ctx);
1881 if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1882 if (ctx->flags & DELAY_SLOT_CLEARME) {
1883 gen_store_flags(0);
1884 } else {
1885 /* go out of the delay slot */
1886 uint32_t new_flags = ctx->flags;
1887 new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1888 gen_store_flags(new_flags);
1890 ctx->flags = 0;
1891 ctx->bstate = BS_BRANCH;
1892 if (old_flags & DELAY_SLOT_CONDITIONAL) {
1893 gen_delayed_conditional_jump(ctx);
1894 } else if (old_flags & DELAY_SLOT) {
1895 gen_jump(ctx);
1900 /* go into a delay slot */
1901 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1902 gen_store_flags(ctx->flags);
1905 static inline void
1906 gen_intermediate_code_internal(CPUState * env, TranslationBlock * tb,
1907 int search_pc)
1909 DisasContext ctx;
1910 target_ulong pc_start;
1911 static uint16_t *gen_opc_end;
1912 CPUBreakpoint *bp;
1913 int i, ii;
1914 int num_insns;
1915 int max_insns;
1917 pc_start = tb->pc;
1918 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1919 ctx.pc = pc_start;
1920 ctx.flags = (uint32_t)tb->flags;
1921 ctx.bstate = BS_NONE;
1922 ctx.sr = env->sr;
1923 ctx.fpscr = env->fpscr;
1924 ctx.memidx = (env->sr & SR_MD) == 0 ? 1 : 0;
1925 /* We don't know if the delayed pc came from a dynamic or static branch,
1926 so assume it is a dynamic branch. */
1927 ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1928 ctx.tb = tb;
1929 ctx.singlestep_enabled = env->singlestep_enabled;
1930 ctx.features = env->features;
1931 ctx.has_movcal = (tb->flags & TB_FLAG_PENDING_MOVCA);
1933 ii = -1;
1934 num_insns = 0;
1935 max_insns = tb->cflags & CF_COUNT_MASK;
1936 if (max_insns == 0)
1937 max_insns = CF_COUNT_MASK;
1938 gen_icount_start();
1939 while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1940 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1941 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1942 if (ctx.pc == bp->pc) {
1943 /* We have hit a breakpoint - make sure PC is up-to-date */
1944 tcg_gen_movi_i32(cpu_pc, ctx.pc);
1945 gen_helper_debug();
1946 ctx.bstate = BS_EXCP;
1947 break;
1951 if (search_pc) {
1952 i = gen_opc_ptr - gen_opc_buf;
1953 if (ii < i) {
1954 ii++;
1955 while (ii < i)
1956 gen_opc_instr_start[ii++] = 0;
1958 gen_opc_pc[ii] = ctx.pc;
1959 gen_opc_hflags[ii] = ctx.flags;
1960 gen_opc_instr_start[ii] = 1;
1961 gen_opc_icount[ii] = num_insns;
1963 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1964 gen_io_start();
1965 #if 0
1966 fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1967 fflush(stderr);
1968 #endif
1969 ctx.opcode = lduw_code(ctx.pc);
1970 decode_opc(&ctx);
1971 num_insns++;
1972 ctx.pc += 2;
1973 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
1974 break;
1975 if (env->singlestep_enabled)
1976 break;
1977 if (num_insns >= max_insns)
1978 break;
1979 if (singlestep)
1980 break;
1982 if (tb->cflags & CF_LAST_IO)
1983 gen_io_end();
1984 if (env->singlestep_enabled) {
1985 tcg_gen_movi_i32(cpu_pc, ctx.pc);
1986 gen_helper_debug();
1987 } else {
1988 switch (ctx.bstate) {
1989 case BS_STOP:
1990 /* gen_op_interrupt_restart(); */
1991 /* fall through */
1992 case BS_NONE:
1993 if (ctx.flags) {
1994 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
1996 gen_goto_tb(&ctx, 0, ctx.pc);
1997 break;
1998 case BS_EXCP:
1999 /* gen_op_interrupt_restart(); */
2000 tcg_gen_exit_tb(0);
2001 break;
2002 case BS_BRANCH:
2003 default:
2004 break;
2008 gen_icount_end(tb, num_insns);
2009 *gen_opc_ptr = INDEX_op_end;
2010 if (search_pc) {
2011 i = gen_opc_ptr - gen_opc_buf;
2012 ii++;
2013 while (ii <= i)
2014 gen_opc_instr_start[ii++] = 0;
2015 } else {
2016 tb->size = ctx.pc - pc_start;
2017 tb->icount = num_insns;
2020 #ifdef DEBUG_DISAS
2021 #ifdef SH4_DEBUG_DISAS
2022 qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
2023 #endif
2024 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2025 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2026 log_target_disas(pc_start, ctx.pc - pc_start, 0);
2027 qemu_log("\n");
2029 #endif
2032 void gen_intermediate_code(CPUState * env, struct TranslationBlock *tb)
2034 gen_intermediate_code_internal(env, tb, 0);
2037 void gen_intermediate_code_pc(CPUState * env, struct TranslationBlock *tb)
2039 gen_intermediate_code_internal(env, tb, 1);
2042 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2043 unsigned long searched_pc, int pc_pos, void *puc)
2045 env->pc = gen_opc_pc[pc_pos];
2046 env->flags = gen_opc_hflags[pc_pos];