4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #define SH4_DEBUG_DISAS
22 //#define SH4_SINGLE_STEP
32 typedef struct DisasContext
{
33 struct TranslationBlock
*tb
;
42 int singlestep_enabled
;
47 #if defined(CONFIG_USER_ONLY)
48 #define IS_USER(ctx) 1
50 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
54 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
57 BS_STOP
= 1, /* We want to stop translation for any reason */
58 BS_BRANCH
= 2, /* We reached a branch condition */
59 BS_EXCP
= 3, /* We reached an exception condition */
62 /* global register indexes */
63 static TCGv_ptr cpu_env
;
64 static TCGv cpu_gregs
[24];
65 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
66 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
67 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
, cpu_ldst
;
68 static TCGv cpu_fregs
[32];
70 /* internal register indexes */
71 static TCGv cpu_flags
, cpu_delayed_pc
;
73 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
75 #include "gen-icount.h"
77 static void sh4_translate_init(void)
80 static int done_init
= 0;
81 static const char * const gregnames
[24] = {
82 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
83 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
84 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
85 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
86 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
88 static const char * const fregnames
[32] = {
89 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
90 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
91 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
92 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
93 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
94 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
95 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
96 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
102 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
104 for (i
= 0; i
< 24; i
++)
105 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
106 offsetof(CPUSH4State
, gregs
[i
]),
109 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
110 offsetof(CPUSH4State
, pc
), "PC");
111 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
112 offsetof(CPUSH4State
, sr
), "SR");
113 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
114 offsetof(CPUSH4State
, ssr
), "SSR");
115 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
116 offsetof(CPUSH4State
, spc
), "SPC");
117 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
118 offsetof(CPUSH4State
, gbr
), "GBR");
119 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
120 offsetof(CPUSH4State
, vbr
), "VBR");
121 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
122 offsetof(CPUSH4State
, sgr
), "SGR");
123 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
124 offsetof(CPUSH4State
, dbr
), "DBR");
125 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
126 offsetof(CPUSH4State
, mach
), "MACH");
127 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
128 offsetof(CPUSH4State
, macl
), "MACL");
129 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
130 offsetof(CPUSH4State
, pr
), "PR");
131 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
132 offsetof(CPUSH4State
, fpscr
), "FPSCR");
133 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
134 offsetof(CPUSH4State
, fpul
), "FPUL");
136 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
137 offsetof(CPUSH4State
, flags
), "_flags_");
138 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
139 offsetof(CPUSH4State
, delayed_pc
),
141 cpu_ldst
= tcg_global_mem_new_i32(TCG_AREG0
,
142 offsetof(CPUSH4State
, ldst
), "_ldst_");
144 for (i
= 0; i
< 32; i
++)
145 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
146 offsetof(CPUSH4State
, fregs
[i
]),
149 /* register helpers */
156 void cpu_dump_state(CPUSH4State
* env
, FILE * f
,
157 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
161 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
162 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
163 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
164 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
165 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
166 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
167 for (i
= 0; i
< 24; i
+= 4) {
168 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
169 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
170 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
172 if (env
->flags
& DELAY_SLOT
) {
173 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
175 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
176 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
181 void cpu_state_reset(CPUSH4State
*env
)
183 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
184 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
185 log_cpu_state(env
, 0);
188 memset(env
, 0, offsetof(CPUSH4State
, breakpoints
));
191 env
->pc
= 0xA0000000;
192 #if defined(CONFIG_USER_ONLY)
193 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
194 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
196 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
197 env
->fpscr
= FPSCR_DN
| FPSCR_RM_ZERO
; /* CPU reset value according to SH4 manual */
198 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
199 set_flush_to_zero(1, &env
->fp_status
);
201 set_default_nan_mode(1, &env
->fp_status
);
213 static sh4_def_t sh4_defs
[] = {
216 .id
= SH_CPU_SH7750R
,
220 .features
= SH_FEATURE_BCR3_AND_BCR4
,
223 .id
= SH_CPU_SH7751R
,
226 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
227 .features
= SH_FEATURE_BCR3_AND_BCR4
,
234 .features
= SH_FEATURE_SH4A
,
238 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
242 if (strcasecmp(name
, "any") == 0)
245 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
246 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
252 void sh4_cpu_list(FILE *f
, fprintf_function cpu_fprintf
)
256 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
257 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
260 static void cpu_register(CPUSH4State
*env
, const sh4_def_t
*def
)
268 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
271 const sh4_def_t
*def
;
273 def
= cpu_sh4_find_by_name(cpu_model
);
276 env
= g_malloc0(sizeof(CPUSH4State
));
277 env
->features
= def
->features
;
279 env
->movcal_backup_tail
= &(env
->movcal_backup
);
280 sh4_translate_init();
281 env
->cpu_model_str
= cpu_model
;
282 cpu_state_reset(env
);
283 cpu_register(env
, def
);
288 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
290 TranslationBlock
*tb
;
293 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
294 !ctx
->singlestep_enabled
) {
295 /* Use a direct jump if in same page and singlestep not enabled */
297 tcg_gen_movi_i32(cpu_pc
, dest
);
298 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
300 tcg_gen_movi_i32(cpu_pc
, dest
);
301 if (ctx
->singlestep_enabled
)
307 static void gen_jump(DisasContext
* ctx
)
309 if (ctx
->delayed_pc
== (uint32_t) - 1) {
310 /* Target is not statically known, it comes necessarily from a
311 delayed jump as immediate jump are conditinal jumps */
312 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
313 if (ctx
->singlestep_enabled
)
317 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
321 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
324 int label
= gen_new_label();
325 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
327 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
328 tcg_gen_brcondi_i32(t
? TCG_COND_EQ
:TCG_COND_NE
, sr
, 0, label
);
329 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
330 gen_set_label(label
);
333 /* Immediate conditional jump (bt or bf) */
334 static void gen_conditional_jump(DisasContext
* ctx
,
335 target_ulong ift
, target_ulong ifnott
)
340 l1
= gen_new_label();
342 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
343 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, 0, l1
);
344 gen_goto_tb(ctx
, 0, ifnott
);
346 gen_goto_tb(ctx
, 1, ift
);
349 /* Delayed conditional jump (bt or bf) */
350 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
355 l1
= gen_new_label();
357 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
358 tcg_gen_brcondi_i32(TCG_COND_NE
, ds
, 0, l1
);
359 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
361 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
365 static inline void gen_set_t(void)
367 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
370 static inline void gen_clr_t(void)
372 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
375 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
380 tcg_gen_setcond_i32(cond
, t
, t1
, t0
);
381 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
382 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t
);
387 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
392 tcg_gen_setcondi_i32(cond
, t
, t0
, imm
);
393 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
394 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t
);
399 static inline void gen_store_flags(uint32_t flags
)
401 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
402 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
405 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
407 TCGv tmp
= tcg_temp_new();
412 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
413 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
415 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
417 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
418 tcg_gen_or_i32(t0
, t0
, tmp
);
423 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
425 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
428 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
430 TCGv_i32 tmp
= tcg_temp_new_i32();
431 tcg_gen_trunc_i64_i32(tmp
, t
);
432 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
433 tcg_gen_shri_i64(t
, t
, 32);
434 tcg_gen_trunc_i64_i32(tmp
, t
);
435 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
436 tcg_temp_free_i32(tmp
);
439 #define B3_0 (ctx->opcode & 0xf)
440 #define B6_4 ((ctx->opcode >> 4) & 0x7)
441 #define B7_4 ((ctx->opcode >> 4) & 0xf)
442 #define B7_0 (ctx->opcode & 0xff)
443 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
444 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
445 (ctx->opcode & 0xfff))
446 #define B11_8 ((ctx->opcode >> 8) & 0xf)
447 #define B15_12 ((ctx->opcode >> 12) & 0xf)
449 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
450 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
452 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
453 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
455 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
456 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
457 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
458 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
460 #define CHECK_NOT_DELAY_SLOT \
461 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
463 gen_helper_raise_slot_illegal_instruction(); \
464 ctx->bstate = BS_EXCP; \
468 #define CHECK_PRIVILEGED \
469 if (IS_USER(ctx)) { \
470 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
471 gen_helper_raise_slot_illegal_instruction(); \
473 gen_helper_raise_illegal_instruction(); \
475 ctx->bstate = BS_EXCP; \
479 #define CHECK_FPU_ENABLED \
480 if (ctx->flags & SR_FD) { \
481 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
482 gen_helper_raise_slot_fpu_disable(); \
484 gen_helper_raise_fpu_disable(); \
486 ctx->bstate = BS_EXCP; \
490 static void _decode_opc(DisasContext
* ctx
)
492 /* This code tries to make movcal emulation sufficiently
493 accurate for Linux purposes. This instruction writes
494 memory, and prior to that, always allocates a cache line.
495 It is used in two contexts:
496 - in memcpy, where data is copied in blocks, the first write
497 of to a block uses movca.l for performance.
498 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
499 to flush the cache. Here, the data written by movcal.l is never
500 written to memory, and the data written is just bogus.
502 To simulate this, we simulate movcal.l, we store the value to memory,
503 but we also remember the previous content. If we see ocbi, we check
504 if movcal.l for that address was done previously. If so, the write should
505 not have hit the memory, so we restore the previous content.
506 When we see an instruction that is neither movca.l
507 nor ocbi, the previous content is discarded.
509 To optimize, we only try to flush stores when we're at the start of
510 TB, or if we already saw movca.l in this TB and did not flush stores
514 int opcode
= ctx
->opcode
& 0xf0ff;
515 if (opcode
!= 0x0093 /* ocbi */
516 && opcode
!= 0x00c3 /* movca.l */)
518 gen_helper_discard_movcal_backup ();
524 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
527 switch (ctx
->opcode
) {
528 case 0x0019: /* div0u */
529 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
531 case 0x000b: /* rts */
533 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
534 ctx
->flags
|= DELAY_SLOT
;
535 ctx
->delayed_pc
= (uint32_t) - 1;
537 case 0x0028: /* clrmac */
538 tcg_gen_movi_i32(cpu_mach
, 0);
539 tcg_gen_movi_i32(cpu_macl
, 0);
541 case 0x0048: /* clrs */
542 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
544 case 0x0008: /* clrt */
547 case 0x0038: /* ldtlb */
551 case 0x002b: /* rte */
554 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
555 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
556 ctx
->flags
|= DELAY_SLOT
;
557 ctx
->delayed_pc
= (uint32_t) - 1;
559 case 0x0058: /* sets */
560 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
562 case 0x0018: /* sett */
565 case 0xfbfd: /* frchg */
566 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
567 ctx
->bstate
= BS_STOP
;
569 case 0xf3fd: /* fschg */
570 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
571 ctx
->bstate
= BS_STOP
;
573 case 0x0009: /* nop */
575 case 0x001b: /* sleep */
577 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
581 switch (ctx
->opcode
& 0xf000) {
582 case 0x1000: /* mov.l Rm,@(disp,Rn) */
584 TCGv addr
= tcg_temp_new();
585 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
586 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
590 case 0x5000: /* mov.l @(disp,Rm),Rn */
592 TCGv addr
= tcg_temp_new();
593 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
594 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
598 case 0xe000: /* mov #imm,Rn */
599 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
601 case 0x9000: /* mov.w @(disp,PC),Rn */
603 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
604 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
608 case 0xd000: /* mov.l @(disp,PC),Rn */
610 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
611 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
615 case 0x7000: /* add #imm,Rn */
616 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
618 case 0xa000: /* bra disp */
620 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
621 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
622 ctx
->flags
|= DELAY_SLOT
;
624 case 0xb000: /* bsr disp */
626 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
627 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
628 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
629 ctx
->flags
|= DELAY_SLOT
;
633 switch (ctx
->opcode
& 0xf00f) {
634 case 0x6003: /* mov Rm,Rn */
635 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
637 case 0x2000: /* mov.b Rm,@Rn */
638 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
640 case 0x2001: /* mov.w Rm,@Rn */
641 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
643 case 0x2002: /* mov.l Rm,@Rn */
644 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
646 case 0x6000: /* mov.b @Rm,Rn */
647 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
649 case 0x6001: /* mov.w @Rm,Rn */
650 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
652 case 0x6002: /* mov.l @Rm,Rn */
653 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
655 case 0x2004: /* mov.b Rm,@-Rn */
657 TCGv addr
= tcg_temp_new();
658 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
659 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
660 tcg_gen_mov_i32(REG(B11_8
), addr
); /* modify register status */
664 case 0x2005: /* mov.w Rm,@-Rn */
666 TCGv addr
= tcg_temp_new();
667 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
668 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
669 tcg_gen_mov_i32(REG(B11_8
), addr
);
673 case 0x2006: /* mov.l Rm,@-Rn */
675 TCGv addr
= tcg_temp_new();
676 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
677 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
678 tcg_gen_mov_i32(REG(B11_8
), addr
);
681 case 0x6004: /* mov.b @Rm+,Rn */
682 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
684 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
686 case 0x6005: /* mov.w @Rm+,Rn */
687 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
689 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
691 case 0x6006: /* mov.l @Rm+,Rn */
692 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
694 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
696 case 0x0004: /* mov.b Rm,@(R0,Rn) */
698 TCGv addr
= tcg_temp_new();
699 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
700 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
704 case 0x0005: /* mov.w Rm,@(R0,Rn) */
706 TCGv addr
= tcg_temp_new();
707 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
708 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
712 case 0x0006: /* mov.l Rm,@(R0,Rn) */
714 TCGv addr
= tcg_temp_new();
715 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
716 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
720 case 0x000c: /* mov.b @(R0,Rm),Rn */
722 TCGv addr
= tcg_temp_new();
723 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
724 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
728 case 0x000d: /* mov.w @(R0,Rm),Rn */
730 TCGv addr
= tcg_temp_new();
731 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
732 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
736 case 0x000e: /* mov.l @(R0,Rm),Rn */
738 TCGv addr
= tcg_temp_new();
739 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
740 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
744 case 0x6008: /* swap.b Rm,Rn */
747 high
= tcg_temp_new();
748 tcg_gen_andi_i32(high
, REG(B7_4
), 0xffff0000);
749 low
= tcg_temp_new();
750 tcg_gen_ext16u_i32(low
, REG(B7_4
));
751 tcg_gen_bswap16_i32(low
, low
);
752 tcg_gen_or_i32(REG(B11_8
), high
, low
);
757 case 0x6009: /* swap.w Rm,Rn */
760 high
= tcg_temp_new();
761 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
762 low
= tcg_temp_new();
763 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
764 tcg_gen_ext16u_i32(low
, low
);
765 tcg_gen_or_i32(REG(B11_8
), high
, low
);
770 case 0x200d: /* xtrct Rm,Rn */
773 high
= tcg_temp_new();
774 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
775 low
= tcg_temp_new();
776 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
777 tcg_gen_ext16u_i32(low
, low
);
778 tcg_gen_or_i32(REG(B11_8
), high
, low
);
783 case 0x300c: /* add Rm,Rn */
784 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
786 case 0x300e: /* addc Rm,Rn */
787 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
789 case 0x300f: /* addv Rm,Rn */
790 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
792 case 0x2009: /* and Rm,Rn */
793 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
795 case 0x3000: /* cmp/eq Rm,Rn */
796 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
798 case 0x3003: /* cmp/ge Rm,Rn */
799 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
801 case 0x3007: /* cmp/gt Rm,Rn */
802 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
804 case 0x3006: /* cmp/hi Rm,Rn */
805 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
807 case 0x3002: /* cmp/hs Rm,Rn */
808 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
810 case 0x200c: /* cmp/str Rm,Rn */
812 TCGv cmp1
= tcg_temp_new();
813 TCGv cmp2
= tcg_temp_new();
814 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
815 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
816 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
817 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
818 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
819 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
820 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
821 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
822 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
823 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
824 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
825 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
826 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
827 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
832 case 0x2007: /* div0s Rm,Rn */
834 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
835 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
836 TCGv val
= tcg_temp_new();
837 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
838 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
842 case 0x3004: /* div1 Rm,Rn */
843 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
845 case 0x300d: /* dmuls.l Rm,Rn */
847 TCGv_i64 tmp1
= tcg_temp_new_i64();
848 TCGv_i64 tmp2
= tcg_temp_new_i64();
850 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
851 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
852 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
853 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
854 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
855 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
857 tcg_temp_free_i64(tmp2
);
858 tcg_temp_free_i64(tmp1
);
861 case 0x3005: /* dmulu.l Rm,Rn */
863 TCGv_i64 tmp1
= tcg_temp_new_i64();
864 TCGv_i64 tmp2
= tcg_temp_new_i64();
866 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
867 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
868 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
869 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
870 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
871 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
873 tcg_temp_free_i64(tmp2
);
874 tcg_temp_free_i64(tmp1
);
877 case 0x600e: /* exts.b Rm,Rn */
878 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
880 case 0x600f: /* exts.w Rm,Rn */
881 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
883 case 0x600c: /* extu.b Rm,Rn */
884 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
886 case 0x600d: /* extu.w Rm,Rn */
887 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
889 case 0x000f: /* mac.l @Rm+,@Rn+ */
892 arg0
= tcg_temp_new();
893 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
894 arg1
= tcg_temp_new();
895 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
896 gen_helper_macl(arg0
, arg1
);
899 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
900 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
903 case 0x400f: /* mac.w @Rm+,@Rn+ */
906 arg0
= tcg_temp_new();
907 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
908 arg1
= tcg_temp_new();
909 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
910 gen_helper_macw(arg0
, arg1
);
913 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
914 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
917 case 0x0007: /* mul.l Rm,Rn */
918 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
920 case 0x200f: /* muls.w Rm,Rn */
923 arg0
= tcg_temp_new();
924 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
925 arg1
= tcg_temp_new();
926 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
927 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
932 case 0x200e: /* mulu.w Rm,Rn */
935 arg0
= tcg_temp_new();
936 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
937 arg1
= tcg_temp_new();
938 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
939 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
944 case 0x600b: /* neg Rm,Rn */
945 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
947 case 0x600a: /* negc Rm,Rn */
951 tcg_gen_neg_i32(t0
, REG(B7_4
));
953 tcg_gen_andi_i32(t1
, cpu_sr
, SR_T
);
954 tcg_gen_sub_i32(REG(B11_8
), t0
, t1
);
955 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
956 tcg_gen_setcondi_i32(TCG_COND_GTU
, t1
, t0
, 0);
957 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t1
);
958 tcg_gen_setcond_i32(TCG_COND_GTU
, t1
, REG(B11_8
), t0
);
959 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t1
);
964 case 0x6007: /* not Rm,Rn */
965 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
967 case 0x200b: /* or Rm,Rn */
968 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
970 case 0x400c: /* shad Rm,Rn */
972 int label1
= gen_new_label();
973 int label2
= gen_new_label();
974 int label3
= gen_new_label();
975 int label4
= gen_new_label();
977 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
978 /* Rm positive, shift to the left */
979 shift
= tcg_temp_new();
980 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
981 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
982 tcg_temp_free(shift
);
984 /* Rm negative, shift to the right */
985 gen_set_label(label1
);
986 shift
= tcg_temp_new();
987 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
988 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
989 tcg_gen_not_i32(shift
, REG(B7_4
));
990 tcg_gen_andi_i32(shift
, shift
, 0x1f);
991 tcg_gen_addi_i32(shift
, shift
, 1);
992 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
993 tcg_temp_free(shift
);
996 gen_set_label(label2
);
997 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
998 tcg_gen_movi_i32(REG(B11_8
), 0);
1000 gen_set_label(label3
);
1001 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
1002 gen_set_label(label4
);
1005 case 0x400d: /* shld Rm,Rn */
1007 int label1
= gen_new_label();
1008 int label2
= gen_new_label();
1009 int label3
= gen_new_label();
1011 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
1012 /* Rm positive, shift to the left */
1013 shift
= tcg_temp_new();
1014 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1015 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
1016 tcg_temp_free(shift
);
1018 /* Rm negative, shift to the right */
1019 gen_set_label(label1
);
1020 shift
= tcg_temp_new();
1021 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1022 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
1023 tcg_gen_not_i32(shift
, REG(B7_4
));
1024 tcg_gen_andi_i32(shift
, shift
, 0x1f);
1025 tcg_gen_addi_i32(shift
, shift
, 1);
1026 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
1027 tcg_temp_free(shift
);
1030 gen_set_label(label2
);
1031 tcg_gen_movi_i32(REG(B11_8
), 0);
1032 gen_set_label(label3
);
1035 case 0x3008: /* sub Rm,Rn */
1036 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1038 case 0x300a: /* subc Rm,Rn */
1039 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1041 case 0x300b: /* subv Rm,Rn */
1042 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1044 case 0x2008: /* tst Rm,Rn */
1046 TCGv val
= tcg_temp_new();
1047 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1048 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1052 case 0x200a: /* xor Rm,Rn */
1053 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1055 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1057 if (ctx
->fpscr
& FPSCR_SZ
) {
1058 TCGv_i64 fp
= tcg_temp_new_i64();
1059 gen_load_fpr64(fp
, XREG(B7_4
));
1060 gen_store_fpr64(fp
, XREG(B11_8
));
1061 tcg_temp_free_i64(fp
);
1063 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1066 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1068 if (ctx
->fpscr
& FPSCR_SZ
) {
1069 TCGv addr_hi
= tcg_temp_new();
1070 int fr
= XREG(B7_4
);
1071 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1072 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1073 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1074 tcg_temp_free(addr_hi
);
1076 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1079 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1081 if (ctx
->fpscr
& FPSCR_SZ
) {
1082 TCGv addr_hi
= tcg_temp_new();
1083 int fr
= XREG(B11_8
);
1084 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1085 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1086 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1087 tcg_temp_free(addr_hi
);
1089 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1092 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1094 if (ctx
->fpscr
& FPSCR_SZ
) {
1095 TCGv addr_hi
= tcg_temp_new();
1096 int fr
= XREG(B11_8
);
1097 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1098 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1099 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1100 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1101 tcg_temp_free(addr_hi
);
1103 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1104 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1107 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1109 if (ctx
->fpscr
& FPSCR_SZ
) {
1110 TCGv addr
= tcg_temp_new_i32();
1111 int fr
= XREG(B7_4
);
1112 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1113 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1114 tcg_gen_subi_i32(addr
, addr
, 4);
1115 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1116 tcg_gen_mov_i32(REG(B11_8
), addr
);
1117 tcg_temp_free(addr
);
1120 addr
= tcg_temp_new_i32();
1121 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1122 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1123 tcg_gen_mov_i32(REG(B11_8
), addr
);
1124 tcg_temp_free(addr
);
1127 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1130 TCGv addr
= tcg_temp_new_i32();
1131 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1132 if (ctx
->fpscr
& FPSCR_SZ
) {
1133 int fr
= XREG(B11_8
);
1134 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1135 tcg_gen_addi_i32(addr
, addr
, 4);
1136 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1138 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1140 tcg_temp_free(addr
);
1143 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1146 TCGv addr
= tcg_temp_new();
1147 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1148 if (ctx
->fpscr
& FPSCR_SZ
) {
1149 int fr
= XREG(B7_4
);
1150 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1151 tcg_gen_addi_i32(addr
, addr
, 4);
1152 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1154 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1156 tcg_temp_free(addr
);
1159 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1160 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1161 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1163 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1164 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1167 if (ctx
->fpscr
& FPSCR_PR
) {
1170 if (ctx
->opcode
& 0x0110)
1171 break; /* illegal instruction */
1172 fp0
= tcg_temp_new_i64();
1173 fp1
= tcg_temp_new_i64();
1174 gen_load_fpr64(fp0
, DREG(B11_8
));
1175 gen_load_fpr64(fp1
, DREG(B7_4
));
1176 switch (ctx
->opcode
& 0xf00f) {
1177 case 0xf000: /* fadd Rm,Rn */
1178 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1180 case 0xf001: /* fsub Rm,Rn */
1181 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1183 case 0xf002: /* fmul Rm,Rn */
1184 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1186 case 0xf003: /* fdiv Rm,Rn */
1187 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1189 case 0xf004: /* fcmp/eq Rm,Rn */
1190 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1192 case 0xf005: /* fcmp/gt Rm,Rn */
1193 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1196 gen_store_fpr64(fp0
, DREG(B11_8
));
1197 tcg_temp_free_i64(fp0
);
1198 tcg_temp_free_i64(fp1
);
1200 switch (ctx
->opcode
& 0xf00f) {
1201 case 0xf000: /* fadd Rm,Rn */
1202 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1204 case 0xf001: /* fsub Rm,Rn */
1205 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1207 case 0xf002: /* fmul Rm,Rn */
1208 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1210 case 0xf003: /* fdiv Rm,Rn */
1211 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1213 case 0xf004: /* fcmp/eq Rm,Rn */
1214 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1216 case 0xf005: /* fcmp/gt Rm,Rn */
1217 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1223 case 0xf00e: /* fmac FR0,RM,Rn */
1226 if (ctx
->fpscr
& FPSCR_PR
) {
1227 break; /* illegal instruction */
1229 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1230 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1236 switch (ctx
->opcode
& 0xff00) {
1237 case 0xc900: /* and #imm,R0 */
1238 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1240 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1243 addr
= tcg_temp_new();
1244 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1245 val
= tcg_temp_new();
1246 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1247 tcg_gen_andi_i32(val
, val
, B7_0
);
1248 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1250 tcg_temp_free(addr
);
1253 case 0x8b00: /* bf label */
1254 CHECK_NOT_DELAY_SLOT
1255 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1256 ctx
->pc
+ 4 + B7_0s
* 2);
1257 ctx
->bstate
= BS_BRANCH
;
1259 case 0x8f00: /* bf/s label */
1260 CHECK_NOT_DELAY_SLOT
1261 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1262 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1264 case 0x8900: /* bt label */
1265 CHECK_NOT_DELAY_SLOT
1266 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1268 ctx
->bstate
= BS_BRANCH
;
1270 case 0x8d00: /* bt/s label */
1271 CHECK_NOT_DELAY_SLOT
1272 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1273 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1275 case 0x8800: /* cmp/eq #imm,R0 */
1276 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1278 case 0xc400: /* mov.b @(disp,GBR),R0 */
1280 TCGv addr
= tcg_temp_new();
1281 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1282 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1283 tcg_temp_free(addr
);
1286 case 0xc500: /* mov.w @(disp,GBR),R0 */
1288 TCGv addr
= tcg_temp_new();
1289 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1290 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1291 tcg_temp_free(addr
);
1294 case 0xc600: /* mov.l @(disp,GBR),R0 */
1296 TCGv addr
= tcg_temp_new();
1297 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1298 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1299 tcg_temp_free(addr
);
1302 case 0xc000: /* mov.b R0,@(disp,GBR) */
1304 TCGv addr
= tcg_temp_new();
1305 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1306 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1307 tcg_temp_free(addr
);
1310 case 0xc100: /* mov.w R0,@(disp,GBR) */
1312 TCGv addr
= tcg_temp_new();
1313 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1314 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1315 tcg_temp_free(addr
);
1318 case 0xc200: /* mov.l R0,@(disp,GBR) */
1320 TCGv addr
= tcg_temp_new();
1321 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1322 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1323 tcg_temp_free(addr
);
1326 case 0x8000: /* mov.b R0,@(disp,Rn) */
1328 TCGv addr
= tcg_temp_new();
1329 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1330 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1331 tcg_temp_free(addr
);
1334 case 0x8100: /* mov.w R0,@(disp,Rn) */
1336 TCGv addr
= tcg_temp_new();
1337 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1338 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1339 tcg_temp_free(addr
);
1342 case 0x8400: /* mov.b @(disp,Rn),R0 */
1344 TCGv addr
= tcg_temp_new();
1345 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1346 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1347 tcg_temp_free(addr
);
1350 case 0x8500: /* mov.w @(disp,Rn),R0 */
1352 TCGv addr
= tcg_temp_new();
1353 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1354 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1355 tcg_temp_free(addr
);
1358 case 0xc700: /* mova @(disp,PC),R0 */
1359 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1361 case 0xcb00: /* or #imm,R0 */
1362 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1364 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1367 addr
= tcg_temp_new();
1368 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1369 val
= tcg_temp_new();
1370 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1371 tcg_gen_ori_i32(val
, val
, B7_0
);
1372 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1374 tcg_temp_free(addr
);
1377 case 0xc300: /* trapa #imm */
1380 CHECK_NOT_DELAY_SLOT
1381 imm
= tcg_const_i32(B7_0
);
1382 gen_helper_trapa(imm
);
1384 ctx
->bstate
= BS_BRANCH
;
1387 case 0xc800: /* tst #imm,R0 */
1389 TCGv val
= tcg_temp_new();
1390 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1391 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1395 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1397 TCGv val
= tcg_temp_new();
1398 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1399 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1400 tcg_gen_andi_i32(val
, val
, B7_0
);
1401 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1405 case 0xca00: /* xor #imm,R0 */
1406 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1408 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1411 addr
= tcg_temp_new();
1412 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1413 val
= tcg_temp_new();
1414 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1415 tcg_gen_xori_i32(val
, val
, B7_0
);
1416 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1418 tcg_temp_free(addr
);
1423 switch (ctx
->opcode
& 0xf08f) {
1424 case 0x408e: /* ldc Rm,Rn_BANK */
1426 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1428 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1430 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1431 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1433 case 0x0082: /* stc Rm_BANK,Rn */
1435 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1437 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1440 TCGv addr
= tcg_temp_new();
1441 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1442 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1443 tcg_gen_mov_i32(REG(B11_8
), addr
);
1444 tcg_temp_free(addr
);
1449 switch (ctx
->opcode
& 0xf0ff) {
1450 case 0x0023: /* braf Rn */
1451 CHECK_NOT_DELAY_SLOT
1452 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1453 ctx
->flags
|= DELAY_SLOT
;
1454 ctx
->delayed_pc
= (uint32_t) - 1;
1456 case 0x0003: /* bsrf Rn */
1457 CHECK_NOT_DELAY_SLOT
1458 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1459 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1460 ctx
->flags
|= DELAY_SLOT
;
1461 ctx
->delayed_pc
= (uint32_t) - 1;
1463 case 0x4015: /* cmp/pl Rn */
1464 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1466 case 0x4011: /* cmp/pz Rn */
1467 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1469 case 0x4010: /* dt Rn */
1470 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1471 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1473 case 0x402b: /* jmp @Rn */
1474 CHECK_NOT_DELAY_SLOT
1475 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1476 ctx
->flags
|= DELAY_SLOT
;
1477 ctx
->delayed_pc
= (uint32_t) - 1;
1479 case 0x400b: /* jsr @Rn */
1480 CHECK_NOT_DELAY_SLOT
1481 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1482 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1483 ctx
->flags
|= DELAY_SLOT
;
1484 ctx
->delayed_pc
= (uint32_t) - 1;
1486 case 0x400e: /* ldc Rm,SR */
1488 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1489 ctx
->bstate
= BS_STOP
;
1491 case 0x4007: /* ldc.l @Rm+,SR */
1494 TCGv val
= tcg_temp_new();
1495 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1496 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1498 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1499 ctx
->bstate
= BS_STOP
;
1502 case 0x0002: /* stc SR,Rn */
1504 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1506 case 0x4003: /* stc SR,@-Rn */
1509 TCGv addr
= tcg_temp_new();
1510 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1511 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1512 tcg_gen_mov_i32(REG(B11_8
), addr
);
1513 tcg_temp_free(addr
);
1516 #define LD(reg,ldnum,ldpnum,prechk) \
1519 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1523 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1524 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1526 #define ST(reg,stnum,stpnum,prechk) \
1529 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1534 TCGv addr = tcg_temp_new(); \
1535 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1536 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1537 tcg_gen_mov_i32(REG(B11_8), addr); \
1538 tcg_temp_free(addr); \
1541 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1542 LD(reg,ldnum,ldpnum,prechk) \
1543 ST(reg,stnum,stpnum,prechk)
1544 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1545 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1546 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1547 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1548 ST(sgr
, 0x003a, 0x4032, CHECK_PRIVILEGED
)
1549 LD(sgr
, 0x403a, 0x4036, CHECK_PRIVILEGED
if (!(ctx
->features
& SH_FEATURE_SH4A
)) break;)
1550 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1551 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1552 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1553 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1554 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1555 case 0x406a: /* lds Rm,FPSCR */
1557 gen_helper_ld_fpscr(REG(B11_8
));
1558 ctx
->bstate
= BS_STOP
;
1560 case 0x4066: /* lds.l @Rm+,FPSCR */
1563 TCGv addr
= tcg_temp_new();
1564 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1565 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1566 gen_helper_ld_fpscr(addr
);
1567 tcg_temp_free(addr
);
1568 ctx
->bstate
= BS_STOP
;
1571 case 0x006a: /* sts FPSCR,Rn */
1573 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1575 case 0x4062: /* sts FPSCR,@-Rn */
1579 val
= tcg_temp_new();
1580 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1581 addr
= tcg_temp_new();
1582 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1583 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1584 tcg_gen_mov_i32(REG(B11_8
), addr
);
1585 tcg_temp_free(addr
);
1589 case 0x00c3: /* movca.l R0,@Rm */
1591 TCGv val
= tcg_temp_new();
1592 tcg_gen_qemu_ld32u(val
, REG(B11_8
), ctx
->memidx
);
1593 gen_helper_movcal (REG(B11_8
), val
);
1594 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1596 ctx
->has_movcal
= 1;
1599 /* MOVUA.L @Rm,R0 (Rm) -> R0
1600 Load non-boundary-aligned data */
1601 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1604 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1605 Load non-boundary-aligned data */
1606 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1607 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1609 case 0x0029: /* movt Rn */
1610 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1615 If (T == 1) R0 -> (Rn)
1618 if (ctx
->features
& SH_FEATURE_SH4A
) {
1619 int label
= gen_new_label();
1621 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cpu_ldst
);
1622 tcg_gen_brcondi_i32(TCG_COND_EQ
, cpu_ldst
, 0, label
);
1623 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1624 gen_set_label(label
);
1625 tcg_gen_movi_i32(cpu_ldst
, 0);
1633 When interrupt/exception
1636 if (ctx
->features
& SH_FEATURE_SH4A
) {
1637 tcg_gen_movi_i32(cpu_ldst
, 0);
1638 tcg_gen_qemu_ld32s(REG(0), REG(B11_8
), ctx
->memidx
);
1639 tcg_gen_movi_i32(cpu_ldst
, 1);
1643 case 0x0093: /* ocbi @Rn */
1645 gen_helper_ocbi (REG(B11_8
));
1648 case 0x00a3: /* ocbp @Rn */
1649 case 0x00b3: /* ocbwb @Rn */
1650 /* These instructions are supposed to do nothing in case of
1651 a cache miss. Given that we only partially emulate caches
1652 it is safe to simply ignore them. */
1654 case 0x0083: /* pref @Rn */
1656 case 0x00d3: /* prefi @Rn */
1657 if (ctx
->features
& SH_FEATURE_SH4A
)
1661 case 0x00e3: /* icbi @Rn */
1662 if (ctx
->features
& SH_FEATURE_SH4A
)
1666 case 0x00ab: /* synco */
1667 if (ctx
->features
& SH_FEATURE_SH4A
)
1671 case 0x4024: /* rotcl Rn */
1673 TCGv tmp
= tcg_temp_new();
1674 tcg_gen_mov_i32(tmp
, cpu_sr
);
1675 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1676 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1677 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1681 case 0x4025: /* rotcr Rn */
1683 TCGv tmp
= tcg_temp_new();
1684 tcg_gen_mov_i32(tmp
, cpu_sr
);
1685 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1686 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1687 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1691 case 0x4004: /* rotl Rn */
1692 tcg_gen_rotli_i32(REG(B11_8
), REG(B11_8
), 1);
1693 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1695 case 0x4005: /* rotr Rn */
1696 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1697 tcg_gen_rotri_i32(REG(B11_8
), REG(B11_8
), 1);
1699 case 0x4000: /* shll Rn */
1700 case 0x4020: /* shal Rn */
1701 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1702 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1704 case 0x4021: /* shar Rn */
1705 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1706 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1708 case 0x4001: /* shlr Rn */
1709 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1710 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1712 case 0x4008: /* shll2 Rn */
1713 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1715 case 0x4018: /* shll8 Rn */
1716 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1718 case 0x4028: /* shll16 Rn */
1719 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1721 case 0x4009: /* shlr2 Rn */
1722 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1724 case 0x4019: /* shlr8 Rn */
1725 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1727 case 0x4029: /* shlr16 Rn */
1728 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1730 case 0x401b: /* tas.b @Rn */
1733 addr
= tcg_temp_local_new();
1734 tcg_gen_mov_i32(addr
, REG(B11_8
));
1735 val
= tcg_temp_local_new();
1736 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1737 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1738 tcg_gen_ori_i32(val
, val
, 0x80);
1739 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1741 tcg_temp_free(addr
);
1744 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1746 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1748 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1750 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1752 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1754 if (ctx
->fpscr
& FPSCR_PR
) {
1756 if (ctx
->opcode
& 0x0100)
1757 break; /* illegal instruction */
1758 fp
= tcg_temp_new_i64();
1759 gen_helper_float_DT(fp
, cpu_fpul
);
1760 gen_store_fpr64(fp
, DREG(B11_8
));
1761 tcg_temp_free_i64(fp
);
1764 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1767 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1769 if (ctx
->fpscr
& FPSCR_PR
) {
1771 if (ctx
->opcode
& 0x0100)
1772 break; /* illegal instruction */
1773 fp
= tcg_temp_new_i64();
1774 gen_load_fpr64(fp
, DREG(B11_8
));
1775 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1776 tcg_temp_free_i64(fp
);
1779 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1782 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1785 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1788 case 0xf05d: /* fabs FRn/DRn */
1790 if (ctx
->fpscr
& FPSCR_PR
) {
1791 if (ctx
->opcode
& 0x0100)
1792 break; /* illegal instruction */
1793 TCGv_i64 fp
= tcg_temp_new_i64();
1794 gen_load_fpr64(fp
, DREG(B11_8
));
1795 gen_helper_fabs_DT(fp
, fp
);
1796 gen_store_fpr64(fp
, DREG(B11_8
));
1797 tcg_temp_free_i64(fp
);
1799 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1802 case 0xf06d: /* fsqrt FRn */
1804 if (ctx
->fpscr
& FPSCR_PR
) {
1805 if (ctx
->opcode
& 0x0100)
1806 break; /* illegal instruction */
1807 TCGv_i64 fp
= tcg_temp_new_i64();
1808 gen_load_fpr64(fp
, DREG(B11_8
));
1809 gen_helper_fsqrt_DT(fp
, fp
);
1810 gen_store_fpr64(fp
, DREG(B11_8
));
1811 tcg_temp_free_i64(fp
);
1813 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1816 case 0xf07d: /* fsrra FRn */
1819 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1821 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1822 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1825 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1827 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1828 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1831 case 0xf0ad: /* fcnvsd FPUL,DRn */
1834 TCGv_i64 fp
= tcg_temp_new_i64();
1835 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1836 gen_store_fpr64(fp
, DREG(B11_8
));
1837 tcg_temp_free_i64(fp
);
1840 case 0xf0bd: /* fcnvds DRn,FPUL */
1843 TCGv_i64 fp
= tcg_temp_new_i64();
1844 gen_load_fpr64(fp
, DREG(B11_8
));
1845 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1846 tcg_temp_free_i64(fp
);
1849 case 0xf0ed: /* fipr FVm,FVn */
1851 if ((ctx
->fpscr
& FPSCR_PR
) == 0) {
1853 m
= tcg_const_i32((ctx
->opcode
>> 8) & 3);
1854 n
= tcg_const_i32((ctx
->opcode
>> 10) & 3);
1855 gen_helper_fipr(m
, n
);
1861 case 0xf0fd: /* ftrv XMTRX,FVn */
1863 if ((ctx
->opcode
& 0x0300) == 0x0100 &&
1864 (ctx
->fpscr
& FPSCR_PR
) == 0) {
1866 n
= tcg_const_i32((ctx
->opcode
>> 10) & 3);
1874 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1875 ctx
->opcode
, ctx
->pc
);
1878 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1879 gen_helper_raise_slot_illegal_instruction();
1881 gen_helper_raise_illegal_instruction();
1883 ctx
->bstate
= BS_EXCP
;
1886 static void decode_opc(DisasContext
* ctx
)
1888 uint32_t old_flags
= ctx
->flags
;
1890 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
))) {
1891 tcg_gen_debug_insn_start(ctx
->pc
);
1896 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1897 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1900 /* go out of the delay slot */
1901 uint32_t new_flags
= ctx
->flags
;
1902 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1903 gen_store_flags(new_flags
);
1906 ctx
->bstate
= BS_BRANCH
;
1907 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1908 gen_delayed_conditional_jump(ctx
);
1909 } else if (old_flags
& DELAY_SLOT
) {
1915 /* go into a delay slot */
1916 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1917 gen_store_flags(ctx
->flags
);
1921 gen_intermediate_code_internal(CPUSH4State
* env
, TranslationBlock
* tb
,
1925 target_ulong pc_start
;
1926 static uint16_t *gen_opc_end
;
1933 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1935 ctx
.flags
= (uint32_t)tb
->flags
;
1936 ctx
.bstate
= BS_NONE
;
1938 ctx
.fpscr
= env
->fpscr
;
1939 ctx
.memidx
= (env
->sr
& SR_MD
) == 0 ? 1 : 0;
1940 /* We don't know if the delayed pc came from a dynamic or static branch,
1941 so assume it is a dynamic branch. */
1942 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1944 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1945 ctx
.features
= env
->features
;
1946 ctx
.has_movcal
= (tb
->flags
& TB_FLAG_PENDING_MOVCA
);
1950 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1952 max_insns
= CF_COUNT_MASK
;
1954 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1955 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
1956 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1957 if (ctx
.pc
== bp
->pc
) {
1958 /* We have hit a breakpoint - make sure PC is up-to-date */
1959 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1961 ctx
.bstate
= BS_EXCP
;
1967 i
= gen_opc_ptr
- gen_opc_buf
;
1971 gen_opc_instr_start
[ii
++] = 0;
1973 gen_opc_pc
[ii
] = ctx
.pc
;
1974 gen_opc_hflags
[ii
] = ctx
.flags
;
1975 gen_opc_instr_start
[ii
] = 1;
1976 gen_opc_icount
[ii
] = num_insns
;
1978 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1981 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1984 ctx
.opcode
= lduw_code(ctx
.pc
);
1988 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
1990 if (env
->singlestep_enabled
)
1992 if (num_insns
>= max_insns
)
1997 if (tb
->cflags
& CF_LAST_IO
)
1999 if (env
->singlestep_enabled
) {
2000 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
2003 switch (ctx
.bstate
) {
2005 /* gen_op_interrupt_restart(); */
2009 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
2011 gen_goto_tb(&ctx
, 0, ctx
.pc
);
2014 /* gen_op_interrupt_restart(); */
2023 gen_icount_end(tb
, num_insns
);
2024 *gen_opc_ptr
= INDEX_op_end
;
2026 i
= gen_opc_ptr
- gen_opc_buf
;
2029 gen_opc_instr_start
[ii
++] = 0;
2031 tb
->size
= ctx
.pc
- pc_start
;
2032 tb
->icount
= num_insns
;
2036 #ifdef SH4_DEBUG_DISAS
2037 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
2039 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2040 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2041 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
2047 void gen_intermediate_code(CPUSH4State
* env
, struct TranslationBlock
*tb
)
2049 gen_intermediate_code_internal(env
, tb
, 0);
2052 void gen_intermediate_code_pc(CPUSH4State
* env
, struct TranslationBlock
*tb
)
2054 gen_intermediate_code_internal(env
, tb
, 1);
2057 void restore_state_to_opc(CPUSH4State
*env
, TranslationBlock
*tb
, int pc_pos
)
2059 env
->pc
= gen_opc_pc
[pc_pos
];
2060 env
->flags
= gen_opc_hflags
[pc_pos
];