4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
28 #define SH4_DEBUG_DISAS
29 //#define SH4_SINGLE_STEP
35 #include "qemu-common.h"
41 typedef struct DisasContext
{
42 struct TranslationBlock
*tb
;
51 int singlestep_enabled
;
55 #if defined(CONFIG_USER_ONLY)
56 #define IS_USER(ctx) 1
58 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
62 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
65 BS_STOP
= 1, /* We want to stop translation for any reason */
66 BS_BRANCH
= 2, /* We reached a branch condition */
67 BS_EXCP
= 3, /* We reached an exception condition */
70 /* global register indexes */
71 static TCGv_ptr cpu_env
;
72 static TCGv cpu_gregs
[24];
73 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
74 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
75 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
;
76 static TCGv cpu_fregs
[32];
78 /* internal register indexes */
79 static TCGv cpu_flags
, cpu_delayed_pc
;
81 #include "gen-icount.h"
83 static void sh4_translate_init(void)
86 static int done_init
= 0;
87 static const char * const gregnames
[24] = {
88 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
89 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
90 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
91 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
92 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
94 static const char * const fregnames
[32] = {
95 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
96 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
97 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
98 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
99 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
100 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
101 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
102 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
108 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
110 for (i
= 0; i
< 24; i
++)
111 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
112 offsetof(CPUState
, gregs
[i
]),
115 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
116 offsetof(CPUState
, pc
), "PC");
117 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
118 offsetof(CPUState
, sr
), "SR");
119 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
120 offsetof(CPUState
, ssr
), "SSR");
121 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
122 offsetof(CPUState
, spc
), "SPC");
123 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
124 offsetof(CPUState
, gbr
), "GBR");
125 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
126 offsetof(CPUState
, vbr
), "VBR");
127 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
128 offsetof(CPUState
, sgr
), "SGR");
129 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
130 offsetof(CPUState
, dbr
), "DBR");
131 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
132 offsetof(CPUState
, mach
), "MACH");
133 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
134 offsetof(CPUState
, macl
), "MACL");
135 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
136 offsetof(CPUState
, pr
), "PR");
137 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
138 offsetof(CPUState
, fpscr
), "FPSCR");
139 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
140 offsetof(CPUState
, fpul
), "FPUL");
142 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
143 offsetof(CPUState
, flags
), "_flags_");
144 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
145 offsetof(CPUState
, delayed_pc
),
148 for (i
= 0; i
< 32; i
++)
149 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
150 offsetof(CPUState
, fregs
[i
]),
153 /* register helpers */
160 void cpu_dump_state(CPUState
* env
, FILE * f
,
161 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
165 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
166 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
167 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
168 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
169 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
170 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
171 for (i
= 0; i
< 24; i
+= 4) {
172 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
173 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
174 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
176 if (env
->flags
& DELAY_SLOT
) {
177 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
179 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
180 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
185 static void cpu_sh4_reset(CPUSH4State
* env
)
187 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
188 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
189 log_cpu_state(env
, 0);
192 #if defined(CONFIG_USER_ONLY)
195 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
198 env
->pc
= 0xA0000000;
199 #if defined(CONFIG_USER_ONLY)
200 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
201 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
203 env
->fpscr
= 0x00040001; /* CPU reset value according to SH4 manual */
204 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
218 static sh4_def_t sh4_defs
[] = {
221 .id
= SH_CPU_SH7750R
,
225 .features
= SH_FEATURE_BCR3_AND_BCR4
,
228 .id
= SH_CPU_SH7751R
,
231 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
232 .features
= SH_FEATURE_BCR3_AND_BCR4
,
239 .features
= SH_FEATURE_SH4A
,
243 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
247 if (strcasecmp(name
, "any") == 0)
250 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
251 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
257 void sh4_cpu_list(FILE *f
, int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...))
261 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
262 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
265 static void cpu_sh4_register(CPUSH4State
*env
, const sh4_def_t
*def
)
273 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
276 const sh4_def_t
*def
;
278 def
= cpu_sh4_find_by_name(cpu_model
);
281 env
= qemu_mallocz(sizeof(CPUSH4State
));
282 env
->features
= def
->features
;
284 sh4_translate_init();
285 env
->cpu_model_str
= cpu_model
;
287 cpu_sh4_register(env
, def
);
292 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
294 TranslationBlock
*tb
;
297 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
298 !ctx
->singlestep_enabled
) {
299 /* Use a direct jump if in same page and singlestep not enabled */
301 tcg_gen_movi_i32(cpu_pc
, dest
);
302 tcg_gen_exit_tb((long) tb
+ n
);
304 tcg_gen_movi_i32(cpu_pc
, dest
);
305 if (ctx
->singlestep_enabled
)
311 static void gen_jump(DisasContext
* ctx
)
313 if (ctx
->delayed_pc
== (uint32_t) - 1) {
314 /* Target is not statically known, it comes necessarily from a
315 delayed jump as immediate jump are conditinal jumps */
316 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
317 if (ctx
->singlestep_enabled
)
321 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
325 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
328 int label
= gen_new_label();
329 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
331 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
332 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, t
? SR_T
: 0, label
);
333 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
334 gen_set_label(label
);
337 /* Immediate conditional jump (bt or bf) */
338 static void gen_conditional_jump(DisasContext
* ctx
,
339 target_ulong ift
, target_ulong ifnott
)
344 l1
= gen_new_label();
346 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
347 tcg_gen_brcondi_i32(TCG_COND_EQ
, sr
, SR_T
, l1
);
348 gen_goto_tb(ctx
, 0, ifnott
);
350 gen_goto_tb(ctx
, 1, ift
);
353 /* Delayed conditional jump (bt or bf) */
354 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
359 l1
= gen_new_label();
361 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
362 tcg_gen_brcondi_i32(TCG_COND_EQ
, ds
, DELAY_SLOT_TRUE
, l1
);
363 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
365 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
369 static inline void gen_set_t(void)
371 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
374 static inline void gen_clr_t(void)
376 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
379 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
381 int label1
= gen_new_label();
382 int label2
= gen_new_label();
383 tcg_gen_brcond_i32(cond
, t1
, t0
, label1
);
386 gen_set_label(label1
);
388 gen_set_label(label2
);
391 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
393 int label1
= gen_new_label();
394 int label2
= gen_new_label();
395 tcg_gen_brcondi_i32(cond
, t0
, imm
, label1
);
398 gen_set_label(label1
);
400 gen_set_label(label2
);
403 static inline void gen_store_flags(uint32_t flags
)
405 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
406 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
409 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
411 TCGv tmp
= tcg_temp_new();
416 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
417 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
419 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
421 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
422 tcg_gen_or_i32(t0
, t0
, tmp
);
427 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
429 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
432 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
434 TCGv_i32 tmp
= tcg_temp_new_i32();
435 tcg_gen_trunc_i64_i32(tmp
, t
);
436 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
437 tcg_gen_shri_i64(t
, t
, 32);
438 tcg_gen_trunc_i64_i32(tmp
, t
);
439 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
440 tcg_temp_free_i32(tmp
);
443 #define B3_0 (ctx->opcode & 0xf)
444 #define B6_4 ((ctx->opcode >> 4) & 0x7)
445 #define B7_4 ((ctx->opcode >> 4) & 0xf)
446 #define B7_0 (ctx->opcode & 0xff)
447 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
448 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
449 (ctx->opcode & 0xfff))
450 #define B11_8 ((ctx->opcode >> 8) & 0xf)
451 #define B15_12 ((ctx->opcode >> 12) & 0xf)
453 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
454 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
456 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
457 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
459 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
460 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
461 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
462 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
464 #define CHECK_NOT_DELAY_SLOT \
465 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
467 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
468 gen_helper_raise_slot_illegal_instruction(); \
469 ctx->bstate = BS_EXCP; \
473 #define CHECK_PRIVILEGED \
474 if (IS_USER(ctx)) { \
475 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
476 gen_helper_raise_illegal_instruction(); \
477 ctx->bstate = BS_EXCP; \
481 #define CHECK_FPU_ENABLED \
482 if (ctx->flags & SR_FD) { \
483 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
484 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
485 gen_helper_raise_slot_fpu_disable(); \
487 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
488 gen_helper_raise_fpu_disable(); \
490 ctx->bstate = BS_EXCP; \
494 static void _decode_opc(DisasContext
* ctx
)
497 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
500 switch (ctx
->opcode
) {
501 case 0x0019: /* div0u */
502 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
504 case 0x000b: /* rts */
506 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
507 ctx
->flags
|= DELAY_SLOT
;
508 ctx
->delayed_pc
= (uint32_t) - 1;
510 case 0x0028: /* clrmac */
511 tcg_gen_movi_i32(cpu_mach
, 0);
512 tcg_gen_movi_i32(cpu_macl
, 0);
514 case 0x0048: /* clrs */
515 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
517 case 0x0008: /* clrt */
520 case 0x0038: /* ldtlb */
524 case 0x002b: /* rte */
527 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
528 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
529 ctx
->flags
|= DELAY_SLOT
;
530 ctx
->delayed_pc
= (uint32_t) - 1;
532 case 0x0058: /* sets */
533 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
535 case 0x0018: /* sett */
538 case 0xfbfd: /* frchg */
539 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
540 ctx
->bstate
= BS_STOP
;
542 case 0xf3fd: /* fschg */
543 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
544 ctx
->bstate
= BS_STOP
;
546 case 0x0009: /* nop */
548 case 0x001b: /* sleep */
550 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
554 switch (ctx
->opcode
& 0xf000) {
555 case 0x1000: /* mov.l Rm,@(disp,Rn) */
557 TCGv addr
= tcg_temp_new();
558 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
559 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
563 case 0x5000: /* mov.l @(disp,Rm),Rn */
565 TCGv addr
= tcg_temp_new();
566 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
567 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
571 case 0xe000: /* mov #imm,Rn */
572 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
574 case 0x9000: /* mov.w @(disp,PC),Rn */
576 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
577 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
581 case 0xd000: /* mov.l @(disp,PC),Rn */
583 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
584 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
588 case 0x7000: /* add #imm,Rn */
589 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
591 case 0xa000: /* bra disp */
593 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
594 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
595 ctx
->flags
|= DELAY_SLOT
;
597 case 0xb000: /* bsr disp */
599 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
600 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
601 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
602 ctx
->flags
|= DELAY_SLOT
;
606 switch (ctx
->opcode
& 0xf00f) {
607 case 0x6003: /* mov Rm,Rn */
608 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
610 case 0x2000: /* mov.b Rm,@Rn */
611 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
613 case 0x2001: /* mov.w Rm,@Rn */
614 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
616 case 0x2002: /* mov.l Rm,@Rn */
617 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
619 case 0x6000: /* mov.b @Rm,Rn */
620 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
622 case 0x6001: /* mov.w @Rm,Rn */
623 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
625 case 0x6002: /* mov.l @Rm,Rn */
626 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
628 case 0x2004: /* mov.b Rm,@-Rn */
630 TCGv addr
= tcg_temp_new();
631 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
632 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
633 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1); /* modify register status */
637 case 0x2005: /* mov.w Rm,@-Rn */
639 TCGv addr
= tcg_temp_new();
640 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
641 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
642 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 2);
646 case 0x2006: /* mov.l Rm,@-Rn */
648 TCGv addr
= tcg_temp_new();
649 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
650 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
651 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
654 case 0x6004: /* mov.b @Rm+,Rn */
655 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
657 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
659 case 0x6005: /* mov.w @Rm+,Rn */
660 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
662 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
664 case 0x6006: /* mov.l @Rm+,Rn */
665 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
667 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
669 case 0x0004: /* mov.b Rm,@(R0,Rn) */
671 TCGv addr
= tcg_temp_new();
672 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
673 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
677 case 0x0005: /* mov.w Rm,@(R0,Rn) */
679 TCGv addr
= tcg_temp_new();
680 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
681 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
685 case 0x0006: /* mov.l Rm,@(R0,Rn) */
687 TCGv addr
= tcg_temp_new();
688 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
689 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
693 case 0x000c: /* mov.b @(R0,Rm),Rn */
695 TCGv addr
= tcg_temp_new();
696 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
697 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
701 case 0x000d: /* mov.w @(R0,Rm),Rn */
703 TCGv addr
= tcg_temp_new();
704 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
705 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
709 case 0x000e: /* mov.l @(R0,Rm),Rn */
711 TCGv addr
= tcg_temp_new();
712 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
713 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
717 case 0x6008: /* swap.b Rm,Rn */
719 TCGv highw
, high
, low
;
720 highw
= tcg_temp_new();
721 tcg_gen_andi_i32(highw
, REG(B7_4
), 0xffff0000);
722 high
= tcg_temp_new();
723 tcg_gen_ext8u_i32(high
, REG(B7_4
));
724 tcg_gen_shli_i32(high
, high
, 8);
725 low
= tcg_temp_new();
726 tcg_gen_shri_i32(low
, REG(B7_4
), 8);
727 tcg_gen_ext8u_i32(low
, low
);
728 tcg_gen_or_i32(REG(B11_8
), high
, low
);
729 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), highw
);
734 case 0x6009: /* swap.w Rm,Rn */
737 high
= tcg_temp_new();
738 tcg_gen_ext16u_i32(high
, REG(B7_4
));
739 tcg_gen_shli_i32(high
, high
, 16);
740 low
= tcg_temp_new();
741 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
742 tcg_gen_ext16u_i32(low
, low
);
743 tcg_gen_or_i32(REG(B11_8
), high
, low
);
748 case 0x200d: /* xtrct Rm,Rn */
751 high
= tcg_temp_new();
752 tcg_gen_ext16u_i32(high
, REG(B7_4
));
753 tcg_gen_shli_i32(high
, high
, 16);
754 low
= tcg_temp_new();
755 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
756 tcg_gen_ext16u_i32(low
, low
);
757 tcg_gen_or_i32(REG(B11_8
), high
, low
);
762 case 0x300c: /* add Rm,Rn */
763 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
765 case 0x300e: /* addc Rm,Rn */
766 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
768 case 0x300f: /* addv Rm,Rn */
769 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
771 case 0x2009: /* and Rm,Rn */
772 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
774 case 0x3000: /* cmp/eq Rm,Rn */
775 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
777 case 0x3003: /* cmp/ge Rm,Rn */
778 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
780 case 0x3007: /* cmp/gt Rm,Rn */
781 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
783 case 0x3006: /* cmp/hi Rm,Rn */
784 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
786 case 0x3002: /* cmp/hs Rm,Rn */
787 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
789 case 0x200c: /* cmp/str Rm,Rn */
791 int label1
= gen_new_label();
792 int label2
= gen_new_label();
793 TCGv cmp1
= tcg_temp_local_new();
794 TCGv cmp2
= tcg_temp_local_new();
795 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
796 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
797 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
798 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
799 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
800 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
801 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
802 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
803 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
804 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
806 gen_set_label(label1
);
807 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
808 gen_set_label(label2
);
813 case 0x2007: /* div0s Rm,Rn */
815 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
816 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
817 TCGv val
= tcg_temp_new();
818 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
819 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
823 case 0x3004: /* div1 Rm,Rn */
824 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
826 case 0x300d: /* dmuls.l Rm,Rn */
828 TCGv_i64 tmp1
= tcg_temp_new_i64();
829 TCGv_i64 tmp2
= tcg_temp_new_i64();
831 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
832 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
833 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
834 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
835 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
836 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
838 tcg_temp_free_i64(tmp2
);
839 tcg_temp_free_i64(tmp1
);
842 case 0x3005: /* dmulu.l Rm,Rn */
844 TCGv_i64 tmp1
= tcg_temp_new_i64();
845 TCGv_i64 tmp2
= tcg_temp_new_i64();
847 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
848 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
849 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
850 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
851 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
852 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
854 tcg_temp_free_i64(tmp2
);
855 tcg_temp_free_i64(tmp1
);
858 case 0x600e: /* exts.b Rm,Rn */
859 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
861 case 0x600f: /* exts.w Rm,Rn */
862 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
864 case 0x600c: /* extu.b Rm,Rn */
865 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
867 case 0x600d: /* extu.w Rm,Rn */
868 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
870 case 0x000f: /* mac.l @Rm+,@Rn+ */
873 arg0
= tcg_temp_new();
874 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
875 arg1
= tcg_temp_new();
876 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
877 gen_helper_macl(arg0
, arg1
);
880 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
881 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
884 case 0x400f: /* mac.w @Rm+,@Rn+ */
887 arg0
= tcg_temp_new();
888 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
889 arg1
= tcg_temp_new();
890 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
891 gen_helper_macw(arg0
, arg1
);
894 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
895 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
898 case 0x0007: /* mul.l Rm,Rn */
899 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
901 case 0x200f: /* muls.w Rm,Rn */
904 arg0
= tcg_temp_new();
905 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
906 arg1
= tcg_temp_new();
907 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
908 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
913 case 0x200e: /* mulu.w Rm,Rn */
916 arg0
= tcg_temp_new();
917 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
918 arg1
= tcg_temp_new();
919 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
920 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
925 case 0x600b: /* neg Rm,Rn */
926 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
928 case 0x600a: /* negc Rm,Rn */
929 gen_helper_negc(REG(B11_8
), REG(B7_4
));
931 case 0x6007: /* not Rm,Rn */
932 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
934 case 0x200b: /* or Rm,Rn */
935 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
937 case 0x400c: /* shad Rm,Rn */
939 int label1
= gen_new_label();
940 int label2
= gen_new_label();
941 int label3
= gen_new_label();
942 int label4
= gen_new_label();
943 TCGv shift
= tcg_temp_local_new();
944 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
945 /* Rm positive, shift to the left */
946 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
947 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
949 /* Rm negative, shift to the right */
950 gen_set_label(label1
);
951 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
952 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
953 tcg_gen_not_i32(shift
, REG(B7_4
));
954 tcg_gen_andi_i32(shift
, shift
, 0x1f);
955 tcg_gen_addi_i32(shift
, shift
, 1);
956 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
959 gen_set_label(label2
);
960 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
961 tcg_gen_movi_i32(REG(B11_8
), 0);
963 gen_set_label(label3
);
964 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
965 gen_set_label(label4
);
966 tcg_temp_free(shift
);
969 case 0x400d: /* shld Rm,Rn */
971 int label1
= gen_new_label();
972 int label2
= gen_new_label();
973 int label3
= gen_new_label();
974 TCGv shift
= tcg_temp_local_new();
975 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
976 /* Rm positive, shift to the left */
977 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
978 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
980 /* Rm negative, shift to the right */
981 gen_set_label(label1
);
982 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
983 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
984 tcg_gen_not_i32(shift
, REG(B7_4
));
985 tcg_gen_andi_i32(shift
, shift
, 0x1f);
986 tcg_gen_addi_i32(shift
, shift
, 1);
987 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
990 gen_set_label(label2
);
991 tcg_gen_movi_i32(REG(B11_8
), 0);
992 gen_set_label(label3
);
993 tcg_temp_free(shift
);
996 case 0x3008: /* sub Rm,Rn */
997 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
999 case 0x300a: /* subc Rm,Rn */
1000 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1002 case 0x300b: /* subv Rm,Rn */
1003 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1005 case 0x2008: /* tst Rm,Rn */
1007 TCGv val
= tcg_temp_new();
1008 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1009 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1013 case 0x200a: /* xor Rm,Rn */
1014 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1016 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1018 if (ctx
->fpscr
& FPSCR_SZ
) {
1019 TCGv_i64 fp
= tcg_temp_new_i64();
1020 gen_load_fpr64(fp
, XREG(B7_4
));
1021 gen_store_fpr64(fp
, XREG(B11_8
));
1022 tcg_temp_free_i64(fp
);
1024 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1027 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1029 if (ctx
->fpscr
& FPSCR_SZ
) {
1030 TCGv addr_hi
= tcg_temp_new();
1031 int fr
= XREG(B7_4
);
1032 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1033 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1034 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1035 tcg_temp_free(addr_hi
);
1037 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1040 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1042 if (ctx
->fpscr
& FPSCR_SZ
) {
1043 TCGv addr_hi
= tcg_temp_new();
1044 int fr
= XREG(B11_8
);
1045 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1046 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1047 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1048 tcg_temp_free(addr_hi
);
1050 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1053 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1055 if (ctx
->fpscr
& FPSCR_SZ
) {
1056 TCGv addr_hi
= tcg_temp_new();
1057 int fr
= XREG(B11_8
);
1058 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1059 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1060 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1061 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1062 tcg_temp_free(addr_hi
);
1064 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1065 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1068 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1070 if (ctx
->fpscr
& FPSCR_SZ
) {
1071 TCGv addr
= tcg_temp_new_i32();
1072 int fr
= XREG(B7_4
);
1073 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1074 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1075 tcg_gen_subi_i32(addr
, REG(B11_8
), 8);
1076 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1077 tcg_gen_mov_i32(REG(B11_8
), addr
);
1078 tcg_temp_free(addr
);
1081 addr
= tcg_temp_new_i32();
1082 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1083 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1084 tcg_temp_free(addr
);
1085 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1088 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1091 TCGv addr
= tcg_temp_new_i32();
1092 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1093 if (ctx
->fpscr
& FPSCR_SZ
) {
1094 int fr
= XREG(B11_8
);
1095 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1096 tcg_gen_addi_i32(addr
, addr
, 4);
1097 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1099 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1101 tcg_temp_free(addr
);
1104 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1107 TCGv addr
= tcg_temp_new();
1108 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1109 if (ctx
->fpscr
& FPSCR_SZ
) {
1110 int fr
= XREG(B7_4
);
1111 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1112 tcg_gen_addi_i32(addr
, addr
, 4);
1113 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1115 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1117 tcg_temp_free(addr
);
1120 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1121 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1122 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1123 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1124 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1125 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1128 if (ctx
->fpscr
& FPSCR_PR
) {
1131 if (ctx
->opcode
& 0x0110)
1132 break; /* illegal instruction */
1133 fp0
= tcg_temp_new_i64();
1134 fp1
= tcg_temp_new_i64();
1135 gen_load_fpr64(fp0
, DREG(B11_8
));
1136 gen_load_fpr64(fp1
, DREG(B7_4
));
1137 switch (ctx
->opcode
& 0xf00f) {
1138 case 0xf000: /* fadd Rm,Rn */
1139 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1141 case 0xf001: /* fsub Rm,Rn */
1142 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1144 case 0xf002: /* fmul Rm,Rn */
1145 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1147 case 0xf003: /* fdiv Rm,Rn */
1148 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1150 case 0xf004: /* fcmp/eq Rm,Rn */
1151 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1153 case 0xf005: /* fcmp/gt Rm,Rn */
1154 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1157 gen_store_fpr64(fp0
, DREG(B11_8
));
1158 tcg_temp_free_i64(fp0
);
1159 tcg_temp_free_i64(fp1
);
1161 switch (ctx
->opcode
& 0xf00f) {
1162 case 0xf000: /* fadd Rm,Rn */
1163 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1165 case 0xf001: /* fsub Rm,Rn */
1166 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1168 case 0xf002: /* fmul Rm,Rn */
1169 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1171 case 0xf003: /* fdiv Rm,Rn */
1172 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1174 case 0xf004: /* fcmp/eq Rm,Rn */
1175 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1177 case 0xf005: /* fcmp/gt Rm,Rn */
1178 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1184 case 0xf00e: /* fmac FR0,RM,Rn */
1187 if (ctx
->fpscr
& FPSCR_PR
) {
1188 break; /* illegal instruction */
1190 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1191 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1197 switch (ctx
->opcode
& 0xff00) {
1198 case 0xc900: /* and #imm,R0 */
1199 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1201 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1204 addr
= tcg_temp_new();
1205 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1206 val
= tcg_temp_new();
1207 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1208 tcg_gen_andi_i32(val
, val
, B7_0
);
1209 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1211 tcg_temp_free(addr
);
1214 case 0x8b00: /* bf label */
1215 CHECK_NOT_DELAY_SLOT
1216 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1217 ctx
->pc
+ 4 + B7_0s
* 2);
1218 ctx
->bstate
= BS_BRANCH
;
1220 case 0x8f00: /* bf/s label */
1221 CHECK_NOT_DELAY_SLOT
1222 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1223 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1225 case 0x8900: /* bt label */
1226 CHECK_NOT_DELAY_SLOT
1227 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1229 ctx
->bstate
= BS_BRANCH
;
1231 case 0x8d00: /* bt/s label */
1232 CHECK_NOT_DELAY_SLOT
1233 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1234 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1236 case 0x8800: /* cmp/eq #imm,R0 */
1237 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1239 case 0xc400: /* mov.b @(disp,GBR),R0 */
1241 TCGv addr
= tcg_temp_new();
1242 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1243 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1244 tcg_temp_free(addr
);
1247 case 0xc500: /* mov.w @(disp,GBR),R0 */
1249 TCGv addr
= tcg_temp_new();
1250 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1251 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1252 tcg_temp_free(addr
);
1255 case 0xc600: /* mov.l @(disp,GBR),R0 */
1257 TCGv addr
= tcg_temp_new();
1258 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1259 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1260 tcg_temp_free(addr
);
1263 case 0xc000: /* mov.b R0,@(disp,GBR) */
1265 TCGv addr
= tcg_temp_new();
1266 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1267 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1268 tcg_temp_free(addr
);
1271 case 0xc100: /* mov.w R0,@(disp,GBR) */
1273 TCGv addr
= tcg_temp_new();
1274 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1275 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1276 tcg_temp_free(addr
);
1279 case 0xc200: /* mov.l R0,@(disp,GBR) */
1281 TCGv addr
= tcg_temp_new();
1282 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1283 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1284 tcg_temp_free(addr
);
1287 case 0x8000: /* mov.b R0,@(disp,Rn) */
1289 TCGv addr
= tcg_temp_new();
1290 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1291 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1292 tcg_temp_free(addr
);
1295 case 0x8100: /* mov.w R0,@(disp,Rn) */
1297 TCGv addr
= tcg_temp_new();
1298 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1299 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1300 tcg_temp_free(addr
);
1303 case 0x8400: /* mov.b @(disp,Rn),R0 */
1305 TCGv addr
= tcg_temp_new();
1306 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1307 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1308 tcg_temp_free(addr
);
1311 case 0x8500: /* mov.w @(disp,Rn),R0 */
1313 TCGv addr
= tcg_temp_new();
1314 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1315 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1316 tcg_temp_free(addr
);
1319 case 0xc700: /* mova @(disp,PC),R0 */
1320 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1322 case 0xcb00: /* or #imm,R0 */
1323 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1325 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1328 addr
= tcg_temp_new();
1329 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1330 val
= tcg_temp_new();
1331 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1332 tcg_gen_ori_i32(val
, val
, B7_0
);
1333 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1335 tcg_temp_free(addr
);
1338 case 0xc300: /* trapa #imm */
1341 CHECK_NOT_DELAY_SLOT
1342 tcg_gen_movi_i32(cpu_pc
, ctx
->pc
);
1343 imm
= tcg_const_i32(B7_0
);
1344 gen_helper_trapa(imm
);
1346 ctx
->bstate
= BS_BRANCH
;
1349 case 0xc800: /* tst #imm,R0 */
1351 TCGv val
= tcg_temp_new();
1352 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1353 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1357 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1359 TCGv val
= tcg_temp_new();
1360 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1361 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1362 tcg_gen_andi_i32(val
, val
, B7_0
);
1363 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1367 case 0xca00: /* xor #imm,R0 */
1368 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1370 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1373 addr
= tcg_temp_new();
1374 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1375 val
= tcg_temp_new();
1376 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1377 tcg_gen_xori_i32(val
, val
, B7_0
);
1378 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1380 tcg_temp_free(addr
);
1385 switch (ctx
->opcode
& 0xf08f) {
1386 case 0x408e: /* ldc Rm,Rn_BANK */
1388 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1390 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1392 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1393 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1395 case 0x0082: /* stc Rm_BANK,Rn */
1397 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1399 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1402 TCGv addr
= tcg_temp_new();
1403 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1404 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1405 tcg_temp_free(addr
);
1406 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1411 switch (ctx
->opcode
& 0xf0ff) {
1412 case 0x0023: /* braf Rn */
1413 CHECK_NOT_DELAY_SLOT
1414 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1415 ctx
->flags
|= DELAY_SLOT
;
1416 ctx
->delayed_pc
= (uint32_t) - 1;
1418 case 0x0003: /* bsrf Rn */
1419 CHECK_NOT_DELAY_SLOT
1420 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1421 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1422 ctx
->flags
|= DELAY_SLOT
;
1423 ctx
->delayed_pc
= (uint32_t) - 1;
1425 case 0x4015: /* cmp/pl Rn */
1426 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1428 case 0x4011: /* cmp/pz Rn */
1429 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1431 case 0x4010: /* dt Rn */
1432 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1433 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1435 case 0x402b: /* jmp @Rn */
1436 CHECK_NOT_DELAY_SLOT
1437 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1438 ctx
->flags
|= DELAY_SLOT
;
1439 ctx
->delayed_pc
= (uint32_t) - 1;
1441 case 0x400b: /* jsr @Rn */
1442 CHECK_NOT_DELAY_SLOT
1443 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1444 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1445 ctx
->flags
|= DELAY_SLOT
;
1446 ctx
->delayed_pc
= (uint32_t) - 1;
1448 case 0x400e: /* ldc Rm,SR */
1450 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1451 ctx
->bstate
= BS_STOP
;
1453 case 0x4007: /* ldc.l @Rm+,SR */
1456 TCGv val
= tcg_temp_new();
1457 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1458 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1460 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1461 ctx
->bstate
= BS_STOP
;
1464 case 0x0002: /* stc SR,Rn */
1466 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1468 case 0x4003: /* stc SR,@-Rn */
1471 TCGv addr
= tcg_temp_new();
1472 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1473 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1474 tcg_temp_free(addr
);
1475 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1478 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1481 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1485 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1486 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1490 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1495 TCGv addr = tcg_temp_new(); \
1496 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1497 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1498 tcg_temp_free(addr); \
1499 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4); \
1502 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1503 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1504 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1505 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1506 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1507 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1508 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1509 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1510 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1511 case 0x406a: /* lds Rm,FPSCR */
1513 gen_helper_ld_fpscr(REG(B11_8
));
1514 ctx
->bstate
= BS_STOP
;
1516 case 0x4066: /* lds.l @Rm+,FPSCR */
1519 TCGv addr
= tcg_temp_new();
1520 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1521 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1522 gen_helper_ld_fpscr(addr
);
1523 tcg_temp_free(addr
);
1524 ctx
->bstate
= BS_STOP
;
1527 case 0x006a: /* sts FPSCR,Rn */
1529 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1531 case 0x4062: /* sts FPSCR,@-Rn */
1535 val
= tcg_temp_new();
1536 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1537 addr
= tcg_temp_new();
1538 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1539 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1540 tcg_temp_free(addr
);
1542 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1545 case 0x00c3: /* movca.l R0,@Rm */
1546 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1549 /* MOVUA.L @Rm,R0 (Rm) -> R0
1550 Load non-boundary-aligned data */
1551 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1554 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1555 Load non-boundary-aligned data */
1556 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1557 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1559 case 0x0029: /* movt Rn */
1560 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1562 case 0x0093: /* ocbi @Rn */
1564 TCGv dummy
= tcg_temp_new();
1565 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1566 tcg_temp_free(dummy
);
1569 case 0x00a3: /* ocbp @Rn */
1571 TCGv dummy
= tcg_temp_new();
1572 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1573 tcg_temp_free(dummy
);
1576 case 0x00b3: /* ocbwb @Rn */
1578 TCGv dummy
= tcg_temp_new();
1579 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1580 tcg_temp_free(dummy
);
1583 case 0x0083: /* pref @Rn */
1585 case 0x00d3: /* prefi @Rn */
1586 if (ctx
->features
& SH_FEATURE_SH4A
)
1590 case 0x00e3: /* icbi @Rn */
1591 if (ctx
->features
& SH_FEATURE_SH4A
)
1595 case 0x00ab: /* synco */
1596 if (ctx
->features
& SH_FEATURE_SH4A
)
1600 case 0x4024: /* rotcl Rn */
1602 TCGv tmp
= tcg_temp_new();
1603 tcg_gen_mov_i32(tmp
, cpu_sr
);
1604 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1605 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1606 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1610 case 0x4025: /* rotcr Rn */
1612 TCGv tmp
= tcg_temp_new();
1613 tcg_gen_mov_i32(tmp
, cpu_sr
);
1614 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1615 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1616 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1620 case 0x4004: /* rotl Rn */
1621 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1622 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1623 gen_copy_bit_i32(REG(B11_8
), 0, cpu_sr
, 0);
1625 case 0x4005: /* rotr Rn */
1626 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1627 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1628 gen_copy_bit_i32(REG(B11_8
), 31, cpu_sr
, 0);
1630 case 0x4000: /* shll Rn */
1631 case 0x4020: /* shal Rn */
1632 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1633 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1635 case 0x4021: /* shar Rn */
1636 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1637 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1639 case 0x4001: /* shlr Rn */
1640 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1641 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1643 case 0x4008: /* shll2 Rn */
1644 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1646 case 0x4018: /* shll8 Rn */
1647 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1649 case 0x4028: /* shll16 Rn */
1650 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1652 case 0x4009: /* shlr2 Rn */
1653 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1655 case 0x4019: /* shlr8 Rn */
1656 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1658 case 0x4029: /* shlr16 Rn */
1659 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1661 case 0x401b: /* tas.b @Rn */
1664 addr
= tcg_temp_local_new();
1665 tcg_gen_mov_i32(addr
, REG(B11_8
));
1666 val
= tcg_temp_local_new();
1667 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1668 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1669 tcg_gen_ori_i32(val
, val
, 0x80);
1670 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1672 tcg_temp_free(addr
);
1675 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1677 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1679 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1681 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1683 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1685 if (ctx
->fpscr
& FPSCR_PR
) {
1687 if (ctx
->opcode
& 0x0100)
1688 break; /* illegal instruction */
1689 fp
= tcg_temp_new_i64();
1690 gen_helper_float_DT(fp
, cpu_fpul
);
1691 gen_store_fpr64(fp
, DREG(B11_8
));
1692 tcg_temp_free_i64(fp
);
1695 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1698 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1700 if (ctx
->fpscr
& FPSCR_PR
) {
1702 if (ctx
->opcode
& 0x0100)
1703 break; /* illegal instruction */
1704 fp
= tcg_temp_new_i64();
1705 gen_load_fpr64(fp
, DREG(B11_8
));
1706 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1707 tcg_temp_free_i64(fp
);
1710 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1713 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1716 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1719 case 0xf05d: /* fabs FRn/DRn */
1721 if (ctx
->fpscr
& FPSCR_PR
) {
1722 if (ctx
->opcode
& 0x0100)
1723 break; /* illegal instruction */
1724 TCGv_i64 fp
= tcg_temp_new_i64();
1725 gen_load_fpr64(fp
, DREG(B11_8
));
1726 gen_helper_fabs_DT(fp
, fp
);
1727 gen_store_fpr64(fp
, DREG(B11_8
));
1728 tcg_temp_free_i64(fp
);
1730 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1733 case 0xf06d: /* fsqrt FRn */
1735 if (ctx
->fpscr
& FPSCR_PR
) {
1736 if (ctx
->opcode
& 0x0100)
1737 break; /* illegal instruction */
1738 TCGv_i64 fp
= tcg_temp_new_i64();
1739 gen_load_fpr64(fp
, DREG(B11_8
));
1740 gen_helper_fsqrt_DT(fp
, fp
);
1741 gen_store_fpr64(fp
, DREG(B11_8
));
1742 tcg_temp_free_i64(fp
);
1744 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1747 case 0xf07d: /* fsrra FRn */
1750 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1752 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1753 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1756 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1758 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1759 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1762 case 0xf0ad: /* fcnvsd FPUL,DRn */
1765 TCGv_i64 fp
= tcg_temp_new_i64();
1766 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1767 gen_store_fpr64(fp
, DREG(B11_8
));
1768 tcg_temp_free_i64(fp
);
1771 case 0xf0bd: /* fcnvds DRn,FPUL */
1774 TCGv_i64 fp
= tcg_temp_new_i64();
1775 gen_load_fpr64(fp
, DREG(B11_8
));
1776 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1777 tcg_temp_free_i64(fp
);
1782 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1783 ctx
->opcode
, ctx
->pc
);
1786 gen_helper_raise_illegal_instruction();
1787 ctx
->bstate
= BS_EXCP
;
1790 static void decode_opc(DisasContext
* ctx
)
1792 uint32_t old_flags
= ctx
->flags
;
1796 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1797 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1800 /* go out of the delay slot */
1801 uint32_t new_flags
= ctx
->flags
;
1802 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1803 gen_store_flags(new_flags
);
1806 ctx
->bstate
= BS_BRANCH
;
1807 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1808 gen_delayed_conditional_jump(ctx
);
1809 } else if (old_flags
& DELAY_SLOT
) {
1815 /* go into a delay slot */
1816 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1817 gen_store_flags(ctx
->flags
);
1821 gen_intermediate_code_internal(CPUState
* env
, TranslationBlock
* tb
,
1825 target_ulong pc_start
;
1826 static uint16_t *gen_opc_end
;
1833 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1835 ctx
.flags
= (uint32_t)tb
->flags
;
1836 ctx
.bstate
= BS_NONE
;
1838 ctx
.fpscr
= env
->fpscr
;
1839 ctx
.memidx
= (env
->sr
& SR_MD
) ? 1 : 0;
1840 /* We don't know if the delayed pc came from a dynamic or static branch,
1841 so assume it is a dynamic branch. */
1842 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1844 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1845 ctx
.features
= env
->features
;
1848 qemu_log_mask(CPU_LOG_TB_CPU
,
1849 "------------------------------------------------\n");
1850 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
1855 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1857 max_insns
= CF_COUNT_MASK
;
1859 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1860 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
1861 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1862 if (ctx
.pc
== bp
->pc
) {
1863 /* We have hit a breakpoint - make sure PC is up-to-date */
1864 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1866 ctx
.bstate
= BS_EXCP
;
1872 i
= gen_opc_ptr
- gen_opc_buf
;
1876 gen_opc_instr_start
[ii
++] = 0;
1878 gen_opc_pc
[ii
] = ctx
.pc
;
1879 gen_opc_hflags
[ii
] = ctx
.flags
;
1880 gen_opc_instr_start
[ii
] = 1;
1881 gen_opc_icount
[ii
] = num_insns
;
1883 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1886 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1889 ctx
.opcode
= lduw_code(ctx
.pc
);
1893 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
1895 if (env
->singlestep_enabled
)
1897 if (num_insns
>= max_insns
)
1899 #ifdef SH4_SINGLE_STEP
1903 if (tb
->cflags
& CF_LAST_IO
)
1905 if (env
->singlestep_enabled
) {
1906 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1909 switch (ctx
.bstate
) {
1911 /* gen_op_interrupt_restart(); */
1915 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
1917 gen_goto_tb(&ctx
, 0, ctx
.pc
);
1920 /* gen_op_interrupt_restart(); */
1929 gen_icount_end(tb
, num_insns
);
1930 *gen_opc_ptr
= INDEX_op_end
;
1932 i
= gen_opc_ptr
- gen_opc_buf
;
1935 gen_opc_instr_start
[ii
++] = 0;
1937 tb
->size
= ctx
.pc
- pc_start
;
1938 tb
->icount
= num_insns
;
1942 #ifdef SH4_DEBUG_DISAS
1943 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
1945 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
1946 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
1947 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
1953 void gen_intermediate_code(CPUState
* env
, struct TranslationBlock
*tb
)
1955 gen_intermediate_code_internal(env
, tb
, 0);
1958 void gen_intermediate_code_pc(CPUState
* env
, struct TranslationBlock
*tb
)
1960 gen_intermediate_code_internal(env
, tb
, 1);
1963 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
1964 unsigned long searched_pc
, int pc_pos
, void *puc
)
1966 env
->pc
= gen_opc_pc
[pc_pos
];
1967 env
->flags
= gen_opc_hflags
[pc_pos
];