4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
26 #define SH4_DEBUG_DISAS
27 //#define SH4_SINGLE_STEP
33 #include "qemu-common.h"
39 typedef struct DisasContext
{
40 struct TranslationBlock
*tb
;
49 int singlestep_enabled
;
54 #if defined(CONFIG_USER_ONLY)
55 #define IS_USER(ctx) 1
57 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
61 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
64 BS_STOP
= 1, /* We want to stop translation for any reason */
65 BS_BRANCH
= 2, /* We reached a branch condition */
66 BS_EXCP
= 3, /* We reached an exception condition */
69 /* global register indexes */
70 static TCGv_ptr cpu_env
;
71 static TCGv cpu_gregs
[24];
72 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
73 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
74 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
, cpu_ldst
;
75 static TCGv cpu_fregs
[32];
77 /* internal register indexes */
78 static TCGv cpu_flags
, cpu_delayed_pc
;
80 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
82 #include "gen-icount.h"
84 static void sh4_translate_init(void)
87 static int done_init
= 0;
88 static const char * const gregnames
[24] = {
89 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
90 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
91 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
92 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
93 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
95 static const char * const fregnames
[32] = {
96 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
97 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
98 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
99 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
100 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
101 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
102 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
103 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
109 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
111 for (i
= 0; i
< 24; i
++)
112 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
113 offsetof(CPUState
, gregs
[i
]),
116 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
117 offsetof(CPUState
, pc
), "PC");
118 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
119 offsetof(CPUState
, sr
), "SR");
120 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
121 offsetof(CPUState
, ssr
), "SSR");
122 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
123 offsetof(CPUState
, spc
), "SPC");
124 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
125 offsetof(CPUState
, gbr
), "GBR");
126 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
127 offsetof(CPUState
, vbr
), "VBR");
128 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
129 offsetof(CPUState
, sgr
), "SGR");
130 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
131 offsetof(CPUState
, dbr
), "DBR");
132 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
133 offsetof(CPUState
, mach
), "MACH");
134 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
135 offsetof(CPUState
, macl
), "MACL");
136 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
137 offsetof(CPUState
, pr
), "PR");
138 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
139 offsetof(CPUState
, fpscr
), "FPSCR");
140 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
141 offsetof(CPUState
, fpul
), "FPUL");
143 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
144 offsetof(CPUState
, flags
), "_flags_");
145 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
146 offsetof(CPUState
, delayed_pc
),
148 cpu_ldst
= tcg_global_mem_new_i32(TCG_AREG0
,
149 offsetof(CPUState
, ldst
), "_ldst_");
151 for (i
= 0; i
< 32; i
++)
152 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
153 offsetof(CPUState
, fregs
[i
]),
156 /* register helpers */
163 void cpu_dump_state(CPUState
* env
, FILE * f
,
164 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
168 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
169 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
170 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
171 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
172 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
173 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
174 for (i
= 0; i
< 24; i
+= 4) {
175 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
176 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
177 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
179 if (env
->flags
& DELAY_SLOT
) {
180 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
182 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
183 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
188 static void cpu_sh4_reset(CPUSH4State
* env
)
190 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
191 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
192 log_cpu_state(env
, 0);
195 #if defined(CONFIG_USER_ONLY)
198 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
201 env
->pc
= 0xA0000000;
202 #if defined(CONFIG_USER_ONLY)
203 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
204 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
206 env
->fpscr
= FPSCR_DN
| FPSCR_RM_ZERO
; /* CPU reset value according to SH4 manual */
207 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
208 set_flush_to_zero(1, &env
->fp_status
);
210 set_default_nan_mode(1, &env
->fp_status
);
223 static sh4_def_t sh4_defs
[] = {
226 .id
= SH_CPU_SH7750R
,
230 .features
= SH_FEATURE_BCR3_AND_BCR4
,
233 .id
= SH_CPU_SH7751R
,
236 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
237 .features
= SH_FEATURE_BCR3_AND_BCR4
,
244 .features
= SH_FEATURE_SH4A
,
248 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
252 if (strcasecmp(name
, "any") == 0)
255 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
256 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
262 void sh4_cpu_list(FILE *f
, fprintf_function cpu_fprintf
)
266 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
267 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
270 static void cpu_sh4_register(CPUSH4State
*env
, const sh4_def_t
*def
)
278 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
281 const sh4_def_t
*def
;
283 def
= cpu_sh4_find_by_name(cpu_model
);
286 env
= qemu_mallocz(sizeof(CPUSH4State
));
287 env
->features
= def
->features
;
289 env
->movcal_backup_tail
= &(env
->movcal_backup
);
290 sh4_translate_init();
291 env
->cpu_model_str
= cpu_model
;
293 cpu_sh4_register(env
, def
);
299 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
301 TranslationBlock
*tb
;
304 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
305 !ctx
->singlestep_enabled
) {
306 /* Use a direct jump if in same page and singlestep not enabled */
308 tcg_gen_movi_i32(cpu_pc
, dest
);
309 tcg_gen_exit_tb((long) tb
+ n
);
311 tcg_gen_movi_i32(cpu_pc
, dest
);
312 if (ctx
->singlestep_enabled
)
318 static void gen_jump(DisasContext
* ctx
)
320 if (ctx
->delayed_pc
== (uint32_t) - 1) {
321 /* Target is not statically known, it comes necessarily from a
322 delayed jump as immediate jump are conditinal jumps */
323 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
324 if (ctx
->singlestep_enabled
)
328 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
332 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
335 int label
= gen_new_label();
336 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
338 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
339 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, t
? SR_T
: 0, label
);
340 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
341 gen_set_label(label
);
344 /* Immediate conditional jump (bt or bf) */
345 static void gen_conditional_jump(DisasContext
* ctx
,
346 target_ulong ift
, target_ulong ifnott
)
351 l1
= gen_new_label();
353 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
354 tcg_gen_brcondi_i32(TCG_COND_EQ
, sr
, SR_T
, l1
);
355 gen_goto_tb(ctx
, 0, ifnott
);
357 gen_goto_tb(ctx
, 1, ift
);
360 /* Delayed conditional jump (bt or bf) */
361 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
366 l1
= gen_new_label();
368 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
369 tcg_gen_brcondi_i32(TCG_COND_EQ
, ds
, DELAY_SLOT_TRUE
, l1
);
370 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
372 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
376 static inline void gen_set_t(void)
378 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
381 static inline void gen_clr_t(void)
383 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
386 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
388 int label1
= gen_new_label();
389 int label2
= gen_new_label();
390 tcg_gen_brcond_i32(cond
, t1
, t0
, label1
);
393 gen_set_label(label1
);
395 gen_set_label(label2
);
398 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
400 int label1
= gen_new_label();
401 int label2
= gen_new_label();
402 tcg_gen_brcondi_i32(cond
, t0
, imm
, label1
);
405 gen_set_label(label1
);
407 gen_set_label(label2
);
410 static inline void gen_store_flags(uint32_t flags
)
412 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
413 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
416 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
418 TCGv tmp
= tcg_temp_new();
423 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
424 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
426 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
428 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
429 tcg_gen_or_i32(t0
, t0
, tmp
);
434 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
436 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
439 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
441 TCGv_i32 tmp
= tcg_temp_new_i32();
442 tcg_gen_trunc_i64_i32(tmp
, t
);
443 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
444 tcg_gen_shri_i64(t
, t
, 32);
445 tcg_gen_trunc_i64_i32(tmp
, t
);
446 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
447 tcg_temp_free_i32(tmp
);
450 #define B3_0 (ctx->opcode & 0xf)
451 #define B6_4 ((ctx->opcode >> 4) & 0x7)
452 #define B7_4 ((ctx->opcode >> 4) & 0xf)
453 #define B7_0 (ctx->opcode & 0xff)
454 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
455 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
456 (ctx->opcode & 0xfff))
457 #define B11_8 ((ctx->opcode >> 8) & 0xf)
458 #define B15_12 ((ctx->opcode >> 12) & 0xf)
460 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
461 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
463 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
464 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
466 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
467 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
468 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
469 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
471 #define CHECK_NOT_DELAY_SLOT \
472 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
474 gen_helper_raise_slot_illegal_instruction(); \
475 ctx->bstate = BS_EXCP; \
479 #define CHECK_PRIVILEGED \
480 if (IS_USER(ctx)) { \
481 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
482 gen_helper_raise_slot_illegal_instruction(); \
484 gen_helper_raise_illegal_instruction(); \
486 ctx->bstate = BS_EXCP; \
490 #define CHECK_FPU_ENABLED \
491 if (ctx->flags & SR_FD) { \
492 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
493 gen_helper_raise_slot_fpu_disable(); \
495 gen_helper_raise_fpu_disable(); \
497 ctx->bstate = BS_EXCP; \
501 static void _decode_opc(DisasContext
* ctx
)
503 /* This code tries to make movcal emulation sufficiently
504 accurate for Linux purposes. This instruction writes
505 memory, and prior to that, always allocates a cache line.
506 It is used in two contexts:
507 - in memcpy, where data is copied in blocks, the first write
508 of to a block uses movca.l for performance.
509 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
510 to flush the cache. Here, the data written by movcal.l is never
511 written to memory, and the data written is just bogus.
513 To simulate this, we simulate movcal.l, we store the value to memory,
514 but we also remember the previous content. If we see ocbi, we check
515 if movcal.l for that address was done previously. If so, the write should
516 not have hit the memory, so we restore the previous content.
517 When we see an instruction that is neither movca.l
518 nor ocbi, the previous content is discarded.
520 To optimize, we only try to flush stores when we're at the start of
521 TB, or if we already saw movca.l in this TB and did not flush stores
525 int opcode
= ctx
->opcode
& 0xf0ff;
526 if (opcode
!= 0x0093 /* ocbi */
527 && opcode
!= 0x00c3 /* movca.l */)
529 gen_helper_discard_movcal_backup ();
535 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
538 switch (ctx
->opcode
) {
539 case 0x0019: /* div0u */
540 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
542 case 0x000b: /* rts */
544 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
545 ctx
->flags
|= DELAY_SLOT
;
546 ctx
->delayed_pc
= (uint32_t) - 1;
548 case 0x0028: /* clrmac */
549 tcg_gen_movi_i32(cpu_mach
, 0);
550 tcg_gen_movi_i32(cpu_macl
, 0);
552 case 0x0048: /* clrs */
553 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
555 case 0x0008: /* clrt */
558 case 0x0038: /* ldtlb */
562 case 0x002b: /* rte */
565 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
566 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
567 ctx
->flags
|= DELAY_SLOT
;
568 ctx
->delayed_pc
= (uint32_t) - 1;
570 case 0x0058: /* sets */
571 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
573 case 0x0018: /* sett */
576 case 0xfbfd: /* frchg */
577 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
578 ctx
->bstate
= BS_STOP
;
580 case 0xf3fd: /* fschg */
581 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
582 ctx
->bstate
= BS_STOP
;
584 case 0x0009: /* nop */
586 case 0x001b: /* sleep */
588 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
592 switch (ctx
->opcode
& 0xf000) {
593 case 0x1000: /* mov.l Rm,@(disp,Rn) */
595 TCGv addr
= tcg_temp_new();
596 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
597 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
601 case 0x5000: /* mov.l @(disp,Rm),Rn */
603 TCGv addr
= tcg_temp_new();
604 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
605 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
609 case 0xe000: /* mov #imm,Rn */
610 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
612 case 0x9000: /* mov.w @(disp,PC),Rn */
614 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
615 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
619 case 0xd000: /* mov.l @(disp,PC),Rn */
621 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
622 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
626 case 0x7000: /* add #imm,Rn */
627 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
629 case 0xa000: /* bra disp */
631 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
632 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
633 ctx
->flags
|= DELAY_SLOT
;
635 case 0xb000: /* bsr disp */
637 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
638 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
639 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
640 ctx
->flags
|= DELAY_SLOT
;
644 switch (ctx
->opcode
& 0xf00f) {
645 case 0x6003: /* mov Rm,Rn */
646 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
648 case 0x2000: /* mov.b Rm,@Rn */
649 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
651 case 0x2001: /* mov.w Rm,@Rn */
652 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
654 case 0x2002: /* mov.l Rm,@Rn */
655 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
657 case 0x6000: /* mov.b @Rm,Rn */
658 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
660 case 0x6001: /* mov.w @Rm,Rn */
661 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
663 case 0x6002: /* mov.l @Rm,Rn */
664 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
666 case 0x2004: /* mov.b Rm,@-Rn */
668 TCGv addr
= tcg_temp_new();
669 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
670 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
671 tcg_gen_mov_i32(REG(B11_8
), addr
); /* modify register status */
675 case 0x2005: /* mov.w Rm,@-Rn */
677 TCGv addr
= tcg_temp_new();
678 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
679 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
680 tcg_gen_mov_i32(REG(B11_8
), addr
);
684 case 0x2006: /* mov.l Rm,@-Rn */
686 TCGv addr
= tcg_temp_new();
687 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
688 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
689 tcg_gen_mov_i32(REG(B11_8
), addr
);
692 case 0x6004: /* mov.b @Rm+,Rn */
693 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
695 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
697 case 0x6005: /* mov.w @Rm+,Rn */
698 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
700 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
702 case 0x6006: /* mov.l @Rm+,Rn */
703 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
705 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
707 case 0x0004: /* mov.b Rm,@(R0,Rn) */
709 TCGv addr
= tcg_temp_new();
710 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
711 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
715 case 0x0005: /* mov.w Rm,@(R0,Rn) */
717 TCGv addr
= tcg_temp_new();
718 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
719 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
723 case 0x0006: /* mov.l Rm,@(R0,Rn) */
725 TCGv addr
= tcg_temp_new();
726 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
727 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
731 case 0x000c: /* mov.b @(R0,Rm),Rn */
733 TCGv addr
= tcg_temp_new();
734 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
735 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
739 case 0x000d: /* mov.w @(R0,Rm),Rn */
741 TCGv addr
= tcg_temp_new();
742 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
743 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
747 case 0x000e: /* mov.l @(R0,Rm),Rn */
749 TCGv addr
= tcg_temp_new();
750 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
751 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
755 case 0x6008: /* swap.b Rm,Rn */
758 high
= tcg_temp_new();
759 tcg_gen_andi_i32(high
, REG(B7_4
), 0xffff0000);
760 low
= tcg_temp_new();
761 tcg_gen_ext16u_i32(low
, REG(B7_4
));
762 tcg_gen_bswap16_i32(low
, low
);
763 tcg_gen_or_i32(REG(B11_8
), high
, low
);
768 case 0x6009: /* swap.w Rm,Rn */
771 high
= tcg_temp_new();
772 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
773 low
= tcg_temp_new();
774 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
775 tcg_gen_ext16u_i32(low
, low
);
776 tcg_gen_or_i32(REG(B11_8
), high
, low
);
781 case 0x200d: /* xtrct Rm,Rn */
784 high
= tcg_temp_new();
785 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
786 low
= tcg_temp_new();
787 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
788 tcg_gen_ext16u_i32(low
, low
);
789 tcg_gen_or_i32(REG(B11_8
), high
, low
);
794 case 0x300c: /* add Rm,Rn */
795 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
797 case 0x300e: /* addc Rm,Rn */
798 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
800 case 0x300f: /* addv Rm,Rn */
801 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
803 case 0x2009: /* and Rm,Rn */
804 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
806 case 0x3000: /* cmp/eq Rm,Rn */
807 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
809 case 0x3003: /* cmp/ge Rm,Rn */
810 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
812 case 0x3007: /* cmp/gt Rm,Rn */
813 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
815 case 0x3006: /* cmp/hi Rm,Rn */
816 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
818 case 0x3002: /* cmp/hs Rm,Rn */
819 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
821 case 0x200c: /* cmp/str Rm,Rn */
823 int label1
= gen_new_label();
824 int label2
= gen_new_label();
825 TCGv cmp1
= tcg_temp_local_new();
826 TCGv cmp2
= tcg_temp_local_new();
827 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
828 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
829 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
830 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
831 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
832 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
833 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
834 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
835 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
836 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
838 gen_set_label(label1
);
839 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
840 gen_set_label(label2
);
845 case 0x2007: /* div0s Rm,Rn */
847 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
848 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
849 TCGv val
= tcg_temp_new();
850 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
851 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
855 case 0x3004: /* div1 Rm,Rn */
856 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
858 case 0x300d: /* dmuls.l Rm,Rn */
860 TCGv_i64 tmp1
= tcg_temp_new_i64();
861 TCGv_i64 tmp2
= tcg_temp_new_i64();
863 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
864 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
865 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
866 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
867 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
868 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
870 tcg_temp_free_i64(tmp2
);
871 tcg_temp_free_i64(tmp1
);
874 case 0x3005: /* dmulu.l Rm,Rn */
876 TCGv_i64 tmp1
= tcg_temp_new_i64();
877 TCGv_i64 tmp2
= tcg_temp_new_i64();
879 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
880 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
881 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
882 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
883 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
884 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
886 tcg_temp_free_i64(tmp2
);
887 tcg_temp_free_i64(tmp1
);
890 case 0x600e: /* exts.b Rm,Rn */
891 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
893 case 0x600f: /* exts.w Rm,Rn */
894 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
896 case 0x600c: /* extu.b Rm,Rn */
897 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
899 case 0x600d: /* extu.w Rm,Rn */
900 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
902 case 0x000f: /* mac.l @Rm+,@Rn+ */
905 arg0
= tcg_temp_new();
906 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
907 arg1
= tcg_temp_new();
908 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
909 gen_helper_macl(arg0
, arg1
);
912 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
913 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
916 case 0x400f: /* mac.w @Rm+,@Rn+ */
919 arg0
= tcg_temp_new();
920 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
921 arg1
= tcg_temp_new();
922 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
923 gen_helper_macw(arg0
, arg1
);
926 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
927 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
930 case 0x0007: /* mul.l Rm,Rn */
931 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
933 case 0x200f: /* muls.w Rm,Rn */
936 arg0
= tcg_temp_new();
937 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
938 arg1
= tcg_temp_new();
939 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
940 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
945 case 0x200e: /* mulu.w Rm,Rn */
948 arg0
= tcg_temp_new();
949 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
950 arg1
= tcg_temp_new();
951 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
952 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
957 case 0x600b: /* neg Rm,Rn */
958 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
960 case 0x600a: /* negc Rm,Rn */
961 gen_helper_negc(REG(B11_8
), REG(B7_4
));
963 case 0x6007: /* not Rm,Rn */
964 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
966 case 0x200b: /* or Rm,Rn */
967 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
969 case 0x400c: /* shad Rm,Rn */
971 int label1
= gen_new_label();
972 int label2
= gen_new_label();
973 int label3
= gen_new_label();
974 int label4
= gen_new_label();
976 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
977 /* Rm positive, shift to the left */
978 shift
= tcg_temp_new();
979 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
980 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
981 tcg_temp_free(shift
);
983 /* Rm negative, shift to the right */
984 gen_set_label(label1
);
985 shift
= tcg_temp_new();
986 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
987 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
988 tcg_gen_not_i32(shift
, REG(B7_4
));
989 tcg_gen_andi_i32(shift
, shift
, 0x1f);
990 tcg_gen_addi_i32(shift
, shift
, 1);
991 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
992 tcg_temp_free(shift
);
995 gen_set_label(label2
);
996 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
997 tcg_gen_movi_i32(REG(B11_8
), 0);
999 gen_set_label(label3
);
1000 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
1001 gen_set_label(label4
);
1004 case 0x400d: /* shld Rm,Rn */
1006 int label1
= gen_new_label();
1007 int label2
= gen_new_label();
1008 int label3
= gen_new_label();
1010 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
1011 /* Rm positive, shift to the left */
1012 shift
= tcg_temp_new();
1013 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1014 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
1015 tcg_temp_free(shift
);
1017 /* Rm negative, shift to the right */
1018 gen_set_label(label1
);
1019 shift
= tcg_temp_new();
1020 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1021 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
1022 tcg_gen_not_i32(shift
, REG(B7_4
));
1023 tcg_gen_andi_i32(shift
, shift
, 0x1f);
1024 tcg_gen_addi_i32(shift
, shift
, 1);
1025 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
1026 tcg_temp_free(shift
);
1029 gen_set_label(label2
);
1030 tcg_gen_movi_i32(REG(B11_8
), 0);
1031 gen_set_label(label3
);
1034 case 0x3008: /* sub Rm,Rn */
1035 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1037 case 0x300a: /* subc Rm,Rn */
1038 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1040 case 0x300b: /* subv Rm,Rn */
1041 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1043 case 0x2008: /* tst Rm,Rn */
1045 TCGv val
= tcg_temp_new();
1046 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1047 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1051 case 0x200a: /* xor Rm,Rn */
1052 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1054 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1056 if (ctx
->fpscr
& FPSCR_SZ
) {
1057 TCGv_i64 fp
= tcg_temp_new_i64();
1058 gen_load_fpr64(fp
, XREG(B7_4
));
1059 gen_store_fpr64(fp
, XREG(B11_8
));
1060 tcg_temp_free_i64(fp
);
1062 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1065 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1067 if (ctx
->fpscr
& FPSCR_SZ
) {
1068 TCGv addr_hi
= tcg_temp_new();
1069 int fr
= XREG(B7_4
);
1070 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1071 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1072 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1073 tcg_temp_free(addr_hi
);
1075 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1078 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1080 if (ctx
->fpscr
& FPSCR_SZ
) {
1081 TCGv addr_hi
= tcg_temp_new();
1082 int fr
= XREG(B11_8
);
1083 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1084 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1085 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1086 tcg_temp_free(addr_hi
);
1088 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1091 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1093 if (ctx
->fpscr
& FPSCR_SZ
) {
1094 TCGv addr_hi
= tcg_temp_new();
1095 int fr
= XREG(B11_8
);
1096 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1097 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1098 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1099 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1100 tcg_temp_free(addr_hi
);
1102 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1103 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1106 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1108 if (ctx
->fpscr
& FPSCR_SZ
) {
1109 TCGv addr
= tcg_temp_new_i32();
1110 int fr
= XREG(B7_4
);
1111 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1112 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1113 tcg_gen_subi_i32(addr
, addr
, 4);
1114 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1115 tcg_gen_mov_i32(REG(B11_8
), addr
);
1116 tcg_temp_free(addr
);
1119 addr
= tcg_temp_new_i32();
1120 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1121 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1122 tcg_gen_mov_i32(REG(B11_8
), addr
);
1123 tcg_temp_free(addr
);
1126 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1129 TCGv addr
= tcg_temp_new_i32();
1130 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1131 if (ctx
->fpscr
& FPSCR_SZ
) {
1132 int fr
= XREG(B11_8
);
1133 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1134 tcg_gen_addi_i32(addr
, addr
, 4);
1135 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1137 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1139 tcg_temp_free(addr
);
1142 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1145 TCGv addr
= tcg_temp_new();
1146 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1147 if (ctx
->fpscr
& FPSCR_SZ
) {
1148 int fr
= XREG(B7_4
);
1149 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1150 tcg_gen_addi_i32(addr
, addr
, 4);
1151 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1153 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1155 tcg_temp_free(addr
);
1158 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1159 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1160 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1161 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1163 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1166 if (ctx
->fpscr
& FPSCR_PR
) {
1169 if (ctx
->opcode
& 0x0110)
1170 break; /* illegal instruction */
1171 fp0
= tcg_temp_new_i64();
1172 fp1
= tcg_temp_new_i64();
1173 gen_load_fpr64(fp0
, DREG(B11_8
));
1174 gen_load_fpr64(fp1
, DREG(B7_4
));
1175 switch (ctx
->opcode
& 0xf00f) {
1176 case 0xf000: /* fadd Rm,Rn */
1177 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1179 case 0xf001: /* fsub Rm,Rn */
1180 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1182 case 0xf002: /* fmul Rm,Rn */
1183 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1185 case 0xf003: /* fdiv Rm,Rn */
1186 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1188 case 0xf004: /* fcmp/eq Rm,Rn */
1189 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1191 case 0xf005: /* fcmp/gt Rm,Rn */
1192 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1195 gen_store_fpr64(fp0
, DREG(B11_8
));
1196 tcg_temp_free_i64(fp0
);
1197 tcg_temp_free_i64(fp1
);
1199 switch (ctx
->opcode
& 0xf00f) {
1200 case 0xf000: /* fadd Rm,Rn */
1201 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1203 case 0xf001: /* fsub Rm,Rn */
1204 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1206 case 0xf002: /* fmul Rm,Rn */
1207 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1209 case 0xf003: /* fdiv Rm,Rn */
1210 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1212 case 0xf004: /* fcmp/eq Rm,Rn */
1213 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1215 case 0xf005: /* fcmp/gt Rm,Rn */
1216 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1222 case 0xf00e: /* fmac FR0,RM,Rn */
1225 if (ctx
->fpscr
& FPSCR_PR
) {
1226 break; /* illegal instruction */
1228 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1229 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1235 switch (ctx
->opcode
& 0xff00) {
1236 case 0xc900: /* and #imm,R0 */
1237 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1239 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1242 addr
= tcg_temp_new();
1243 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1244 val
= tcg_temp_new();
1245 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1246 tcg_gen_andi_i32(val
, val
, B7_0
);
1247 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1249 tcg_temp_free(addr
);
1252 case 0x8b00: /* bf label */
1253 CHECK_NOT_DELAY_SLOT
1254 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1255 ctx
->pc
+ 4 + B7_0s
* 2);
1256 ctx
->bstate
= BS_BRANCH
;
1258 case 0x8f00: /* bf/s label */
1259 CHECK_NOT_DELAY_SLOT
1260 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1261 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1263 case 0x8900: /* bt label */
1264 CHECK_NOT_DELAY_SLOT
1265 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1267 ctx
->bstate
= BS_BRANCH
;
1269 case 0x8d00: /* bt/s label */
1270 CHECK_NOT_DELAY_SLOT
1271 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1272 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1274 case 0x8800: /* cmp/eq #imm,R0 */
1275 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1277 case 0xc400: /* mov.b @(disp,GBR),R0 */
1279 TCGv addr
= tcg_temp_new();
1280 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1281 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1282 tcg_temp_free(addr
);
1285 case 0xc500: /* mov.w @(disp,GBR),R0 */
1287 TCGv addr
= tcg_temp_new();
1288 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1289 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1290 tcg_temp_free(addr
);
1293 case 0xc600: /* mov.l @(disp,GBR),R0 */
1295 TCGv addr
= tcg_temp_new();
1296 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1297 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1298 tcg_temp_free(addr
);
1301 case 0xc000: /* mov.b R0,@(disp,GBR) */
1303 TCGv addr
= tcg_temp_new();
1304 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1305 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1306 tcg_temp_free(addr
);
1309 case 0xc100: /* mov.w R0,@(disp,GBR) */
1311 TCGv addr
= tcg_temp_new();
1312 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1313 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1314 tcg_temp_free(addr
);
1317 case 0xc200: /* mov.l R0,@(disp,GBR) */
1319 TCGv addr
= tcg_temp_new();
1320 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1321 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1322 tcg_temp_free(addr
);
1325 case 0x8000: /* mov.b R0,@(disp,Rn) */
1327 TCGv addr
= tcg_temp_new();
1328 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1329 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1330 tcg_temp_free(addr
);
1333 case 0x8100: /* mov.w R0,@(disp,Rn) */
1335 TCGv addr
= tcg_temp_new();
1336 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1337 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1338 tcg_temp_free(addr
);
1341 case 0x8400: /* mov.b @(disp,Rn),R0 */
1343 TCGv addr
= tcg_temp_new();
1344 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1345 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1346 tcg_temp_free(addr
);
1349 case 0x8500: /* mov.w @(disp,Rn),R0 */
1351 TCGv addr
= tcg_temp_new();
1352 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1353 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1354 tcg_temp_free(addr
);
1357 case 0xc700: /* mova @(disp,PC),R0 */
1358 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1360 case 0xcb00: /* or #imm,R0 */
1361 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1363 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1366 addr
= tcg_temp_new();
1367 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1368 val
= tcg_temp_new();
1369 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1370 tcg_gen_ori_i32(val
, val
, B7_0
);
1371 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1373 tcg_temp_free(addr
);
1376 case 0xc300: /* trapa #imm */
1379 CHECK_NOT_DELAY_SLOT
1380 imm
= tcg_const_i32(B7_0
);
1381 gen_helper_trapa(imm
);
1383 ctx
->bstate
= BS_BRANCH
;
1386 case 0xc800: /* tst #imm,R0 */
1388 TCGv val
= tcg_temp_new();
1389 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1390 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1394 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1396 TCGv val
= tcg_temp_new();
1397 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1398 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1399 tcg_gen_andi_i32(val
, val
, B7_0
);
1400 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1404 case 0xca00: /* xor #imm,R0 */
1405 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1407 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1410 addr
= tcg_temp_new();
1411 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1412 val
= tcg_temp_new();
1413 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1414 tcg_gen_xori_i32(val
, val
, B7_0
);
1415 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1417 tcg_temp_free(addr
);
1422 switch (ctx
->opcode
& 0xf08f) {
1423 case 0x408e: /* ldc Rm,Rn_BANK */
1425 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1427 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1429 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1430 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1432 case 0x0082: /* stc Rm_BANK,Rn */
1434 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1436 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1439 TCGv addr
= tcg_temp_new();
1440 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1441 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1442 tcg_gen_mov_i32(REG(B11_8
), addr
);
1443 tcg_temp_free(addr
);
1448 switch (ctx
->opcode
& 0xf0ff) {
1449 case 0x0023: /* braf Rn */
1450 CHECK_NOT_DELAY_SLOT
1451 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1452 ctx
->flags
|= DELAY_SLOT
;
1453 ctx
->delayed_pc
= (uint32_t) - 1;
1455 case 0x0003: /* bsrf Rn */
1456 CHECK_NOT_DELAY_SLOT
1457 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1458 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1459 ctx
->flags
|= DELAY_SLOT
;
1460 ctx
->delayed_pc
= (uint32_t) - 1;
1462 case 0x4015: /* cmp/pl Rn */
1463 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1465 case 0x4011: /* cmp/pz Rn */
1466 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1468 case 0x4010: /* dt Rn */
1469 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1470 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1472 case 0x402b: /* jmp @Rn */
1473 CHECK_NOT_DELAY_SLOT
1474 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1475 ctx
->flags
|= DELAY_SLOT
;
1476 ctx
->delayed_pc
= (uint32_t) - 1;
1478 case 0x400b: /* jsr @Rn */
1479 CHECK_NOT_DELAY_SLOT
1480 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1481 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1482 ctx
->flags
|= DELAY_SLOT
;
1483 ctx
->delayed_pc
= (uint32_t) - 1;
1485 case 0x400e: /* ldc Rm,SR */
1487 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1488 ctx
->bstate
= BS_STOP
;
1490 case 0x4007: /* ldc.l @Rm+,SR */
1493 TCGv val
= tcg_temp_new();
1494 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1495 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1497 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1498 ctx
->bstate
= BS_STOP
;
1501 case 0x0002: /* stc SR,Rn */
1503 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1505 case 0x4003: /* stc SR,@-Rn */
1508 TCGv addr
= tcg_temp_new();
1509 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1510 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1511 tcg_gen_mov_i32(REG(B11_8
), addr
);
1512 tcg_temp_free(addr
);
1515 #define LD(reg,ldnum,ldpnum,prechk) \
1518 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1522 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1523 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1525 #define ST(reg,stnum,stpnum,prechk) \
1528 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1533 TCGv addr = tcg_temp_new(); \
1534 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1535 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1536 tcg_gen_mov_i32(REG(B11_8), addr); \
1537 tcg_temp_free(addr); \
1540 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1541 LD(reg,ldnum,ldpnum,prechk) \
1542 ST(reg,stnum,stpnum,prechk)
1543 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1544 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1545 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1546 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1547 ST(sgr
, 0x003a, 0x4032, CHECK_PRIVILEGED
)
1548 LD(sgr
, 0x403a, 0x4036, CHECK_PRIVILEGED
if (!(ctx
->features
& SH_FEATURE_SH4A
)) break;)
1549 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1550 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1551 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1552 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1553 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1554 case 0x406a: /* lds Rm,FPSCR */
1556 gen_helper_ld_fpscr(REG(B11_8
));
1557 ctx
->bstate
= BS_STOP
;
1559 case 0x4066: /* lds.l @Rm+,FPSCR */
1562 TCGv addr
= tcg_temp_new();
1563 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1564 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1565 gen_helper_ld_fpscr(addr
);
1566 tcg_temp_free(addr
);
1567 ctx
->bstate
= BS_STOP
;
1570 case 0x006a: /* sts FPSCR,Rn */
1572 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1574 case 0x4062: /* sts FPSCR,@-Rn */
1578 val
= tcg_temp_new();
1579 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1580 addr
= tcg_temp_new();
1581 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1582 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1583 tcg_gen_mov_i32(REG(B11_8
), addr
);
1584 tcg_temp_free(addr
);
1588 case 0x00c3: /* movca.l R0,@Rm */
1590 TCGv val
= tcg_temp_new();
1591 tcg_gen_qemu_ld32u(val
, REG(B11_8
), ctx
->memidx
);
1592 gen_helper_movcal (REG(B11_8
), val
);
1593 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1595 ctx
->has_movcal
= 1;
1598 /* MOVUA.L @Rm,R0 (Rm) -> R0
1599 Load non-boundary-aligned data */
1600 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1603 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1604 Load non-boundary-aligned data */
1605 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1606 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1608 case 0x0029: /* movt Rn */
1609 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1614 If (T == 1) R0 -> (Rn)
1617 if (ctx
->features
& SH_FEATURE_SH4A
) {
1618 int label
= gen_new_label();
1620 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cpu_ldst
);
1621 tcg_gen_brcondi_i32(TCG_COND_EQ
, cpu_ldst
, 0, label
);
1622 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1623 gen_set_label(label
);
1624 tcg_gen_movi_i32(cpu_ldst
, 0);
1632 When interrupt/exception
1635 if (ctx
->features
& SH_FEATURE_SH4A
) {
1636 tcg_gen_movi_i32(cpu_ldst
, 0);
1637 tcg_gen_qemu_ld32s(REG(0), REG(B11_8
), ctx
->memidx
);
1638 tcg_gen_movi_i32(cpu_ldst
, 1);
1642 case 0x0093: /* ocbi @Rn */
1644 gen_helper_ocbi (REG(B11_8
));
1647 case 0x00a3: /* ocbp @Rn */
1649 TCGv dummy
= tcg_temp_new();
1650 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1651 tcg_temp_free(dummy
);
1654 case 0x00b3: /* ocbwb @Rn */
1656 TCGv dummy
= tcg_temp_new();
1657 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1658 tcg_temp_free(dummy
);
1661 case 0x0083: /* pref @Rn */
1663 case 0x00d3: /* prefi @Rn */
1664 if (ctx
->features
& SH_FEATURE_SH4A
)
1668 case 0x00e3: /* icbi @Rn */
1669 if (ctx
->features
& SH_FEATURE_SH4A
)
1673 case 0x00ab: /* synco */
1674 if (ctx
->features
& SH_FEATURE_SH4A
)
1678 case 0x4024: /* rotcl Rn */
1680 TCGv tmp
= tcg_temp_new();
1681 tcg_gen_mov_i32(tmp
, cpu_sr
);
1682 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1683 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1684 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1688 case 0x4025: /* rotcr Rn */
1690 TCGv tmp
= tcg_temp_new();
1691 tcg_gen_mov_i32(tmp
, cpu_sr
);
1692 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1693 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1694 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1698 case 0x4004: /* rotl Rn */
1699 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1700 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1701 gen_copy_bit_i32(REG(B11_8
), 0, cpu_sr
, 0);
1703 case 0x4005: /* rotr Rn */
1704 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1705 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1706 gen_copy_bit_i32(REG(B11_8
), 31, cpu_sr
, 0);
1708 case 0x4000: /* shll Rn */
1709 case 0x4020: /* shal Rn */
1710 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1711 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1713 case 0x4021: /* shar Rn */
1714 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1715 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1717 case 0x4001: /* shlr Rn */
1718 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1719 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1721 case 0x4008: /* shll2 Rn */
1722 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1724 case 0x4018: /* shll8 Rn */
1725 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1727 case 0x4028: /* shll16 Rn */
1728 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1730 case 0x4009: /* shlr2 Rn */
1731 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1733 case 0x4019: /* shlr8 Rn */
1734 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1736 case 0x4029: /* shlr16 Rn */
1737 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1739 case 0x401b: /* tas.b @Rn */
1742 addr
= tcg_temp_local_new();
1743 tcg_gen_mov_i32(addr
, REG(B11_8
));
1744 val
= tcg_temp_local_new();
1745 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1746 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1747 tcg_gen_ori_i32(val
, val
, 0x80);
1748 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1750 tcg_temp_free(addr
);
1753 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1755 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1757 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1759 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1761 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1763 if (ctx
->fpscr
& FPSCR_PR
) {
1765 if (ctx
->opcode
& 0x0100)
1766 break; /* illegal instruction */
1767 fp
= tcg_temp_new_i64();
1768 gen_helper_float_DT(fp
, cpu_fpul
);
1769 gen_store_fpr64(fp
, DREG(B11_8
));
1770 tcg_temp_free_i64(fp
);
1773 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1776 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1778 if (ctx
->fpscr
& FPSCR_PR
) {
1780 if (ctx
->opcode
& 0x0100)
1781 break; /* illegal instruction */
1782 fp
= tcg_temp_new_i64();
1783 gen_load_fpr64(fp
, DREG(B11_8
));
1784 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1785 tcg_temp_free_i64(fp
);
1788 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1791 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1794 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1797 case 0xf05d: /* fabs FRn/DRn */
1799 if (ctx
->fpscr
& FPSCR_PR
) {
1800 if (ctx
->opcode
& 0x0100)
1801 break; /* illegal instruction */
1802 TCGv_i64 fp
= tcg_temp_new_i64();
1803 gen_load_fpr64(fp
, DREG(B11_8
));
1804 gen_helper_fabs_DT(fp
, fp
);
1805 gen_store_fpr64(fp
, DREG(B11_8
));
1806 tcg_temp_free_i64(fp
);
1808 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1811 case 0xf06d: /* fsqrt FRn */
1813 if (ctx
->fpscr
& FPSCR_PR
) {
1814 if (ctx
->opcode
& 0x0100)
1815 break; /* illegal instruction */
1816 TCGv_i64 fp
= tcg_temp_new_i64();
1817 gen_load_fpr64(fp
, DREG(B11_8
));
1818 gen_helper_fsqrt_DT(fp
, fp
);
1819 gen_store_fpr64(fp
, DREG(B11_8
));
1820 tcg_temp_free_i64(fp
);
1822 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1825 case 0xf07d: /* fsrra FRn */
1828 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1830 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1831 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1834 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1836 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1837 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1840 case 0xf0ad: /* fcnvsd FPUL,DRn */
1843 TCGv_i64 fp
= tcg_temp_new_i64();
1844 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1845 gen_store_fpr64(fp
, DREG(B11_8
));
1846 tcg_temp_free_i64(fp
);
1849 case 0xf0bd: /* fcnvds DRn,FPUL */
1852 TCGv_i64 fp
= tcg_temp_new_i64();
1853 gen_load_fpr64(fp
, DREG(B11_8
));
1854 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1855 tcg_temp_free_i64(fp
);
1858 case 0xf0ed: /* fipr FVm,FVn */
1860 if ((ctx
->fpscr
& FPSCR_PR
) == 0) {
1862 m
= tcg_const_i32((ctx
->opcode
>> 16) & 3);
1863 n
= tcg_const_i32((ctx
->opcode
>> 18) & 3);
1864 gen_helper_fipr(m
, n
);
1870 case 0xf0fd: /* ftrv XMTRX,FVn */
1872 if ((ctx
->opcode
& 0x0300) == 0x0100 &&
1873 (ctx
->fpscr
& FPSCR_PR
) == 0) {
1875 n
= tcg_const_i32((ctx
->opcode
>> 18) & 3);
1883 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1884 ctx
->opcode
, ctx
->pc
);
1887 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1888 gen_helper_raise_slot_illegal_instruction();
1890 gen_helper_raise_illegal_instruction();
1892 ctx
->bstate
= BS_EXCP
;
1895 static void decode_opc(DisasContext
* ctx
)
1897 uint32_t old_flags
= ctx
->flags
;
1901 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1902 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1905 /* go out of the delay slot */
1906 uint32_t new_flags
= ctx
->flags
;
1907 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1908 gen_store_flags(new_flags
);
1911 ctx
->bstate
= BS_BRANCH
;
1912 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1913 gen_delayed_conditional_jump(ctx
);
1914 } else if (old_flags
& DELAY_SLOT
) {
1920 /* go into a delay slot */
1921 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1922 gen_store_flags(ctx
->flags
);
1926 gen_intermediate_code_internal(CPUState
* env
, TranslationBlock
* tb
,
1930 target_ulong pc_start
;
1931 static uint16_t *gen_opc_end
;
1938 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1940 ctx
.flags
= (uint32_t)tb
->flags
;
1941 ctx
.bstate
= BS_NONE
;
1943 ctx
.fpscr
= env
->fpscr
;
1944 ctx
.memidx
= (env
->sr
& SR_MD
) == 0 ? 1 : 0;
1945 /* We don't know if the delayed pc came from a dynamic or static branch,
1946 so assume it is a dynamic branch. */
1947 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1949 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1950 ctx
.features
= env
->features
;
1951 ctx
.has_movcal
= (tb
->flags
& TB_FLAG_PENDING_MOVCA
);
1955 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1957 max_insns
= CF_COUNT_MASK
;
1959 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1960 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
1961 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1962 if (ctx
.pc
== bp
->pc
) {
1963 /* We have hit a breakpoint - make sure PC is up-to-date */
1964 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1966 ctx
.bstate
= BS_EXCP
;
1972 i
= gen_opc_ptr
- gen_opc_buf
;
1976 gen_opc_instr_start
[ii
++] = 0;
1978 gen_opc_pc
[ii
] = ctx
.pc
;
1979 gen_opc_hflags
[ii
] = ctx
.flags
;
1980 gen_opc_instr_start
[ii
] = 1;
1981 gen_opc_icount
[ii
] = num_insns
;
1983 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1986 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1989 ctx
.opcode
= lduw_code(ctx
.pc
);
1993 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
1995 if (env
->singlestep_enabled
)
1997 if (num_insns
>= max_insns
)
2002 if (tb
->cflags
& CF_LAST_IO
)
2004 if (env
->singlestep_enabled
) {
2005 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
2008 switch (ctx
.bstate
) {
2010 /* gen_op_interrupt_restart(); */
2014 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
2016 gen_goto_tb(&ctx
, 0, ctx
.pc
);
2019 /* gen_op_interrupt_restart(); */
2028 gen_icount_end(tb
, num_insns
);
2029 *gen_opc_ptr
= INDEX_op_end
;
2031 i
= gen_opc_ptr
- gen_opc_buf
;
2034 gen_opc_instr_start
[ii
++] = 0;
2036 tb
->size
= ctx
.pc
- pc_start
;
2037 tb
->icount
= num_insns
;
2041 #ifdef SH4_DEBUG_DISAS
2042 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
2044 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2045 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2046 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
2052 void gen_intermediate_code(CPUState
* env
, struct TranslationBlock
*tb
)
2054 gen_intermediate_code_internal(env
, tb
, 0);
2057 void gen_intermediate_code_pc(CPUState
* env
, struct TranslationBlock
*tb
)
2059 gen_intermediate_code_internal(env
, tb
, 1);
2062 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2063 unsigned long searched_pc
, int pc_pos
, void *puc
)
2065 env
->pc
= gen_opc_pc
[pc_pos
];
2066 env
->flags
= gen_opc_hflags
[pc_pos
];