4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
26 #define SH4_DEBUG_DISAS
27 //#define SH4_SINGLE_STEP
33 #include "qemu-common.h"
39 typedef struct DisasContext
{
40 struct TranslationBlock
*tb
;
49 int singlestep_enabled
;
54 #if defined(CONFIG_USER_ONLY)
55 #define IS_USER(ctx) 1
57 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
61 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
64 BS_STOP
= 1, /* We want to stop translation for any reason */
65 BS_BRANCH
= 2, /* We reached a branch condition */
66 BS_EXCP
= 3, /* We reached an exception condition */
69 /* global register indexes */
70 static TCGv_ptr cpu_env
;
71 static TCGv cpu_gregs
[24];
72 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
73 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
74 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
, cpu_ldst
;
75 static TCGv cpu_fregs
[32];
77 /* internal register indexes */
78 static TCGv cpu_flags
, cpu_delayed_pc
;
80 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
82 #include "gen-icount.h"
84 static void sh4_translate_init(void)
87 static int done_init
= 0;
88 static const char * const gregnames
[24] = {
89 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
90 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
91 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
92 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
93 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
95 static const char * const fregnames
[32] = {
96 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
97 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
98 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
99 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
100 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
101 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
102 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
103 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
109 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
111 for (i
= 0; i
< 24; i
++)
112 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
113 offsetof(CPUState
, gregs
[i
]),
116 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
117 offsetof(CPUState
, pc
), "PC");
118 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
119 offsetof(CPUState
, sr
), "SR");
120 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
121 offsetof(CPUState
, ssr
), "SSR");
122 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
123 offsetof(CPUState
, spc
), "SPC");
124 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
125 offsetof(CPUState
, gbr
), "GBR");
126 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
127 offsetof(CPUState
, vbr
), "VBR");
128 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
129 offsetof(CPUState
, sgr
), "SGR");
130 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
131 offsetof(CPUState
, dbr
), "DBR");
132 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
133 offsetof(CPUState
, mach
), "MACH");
134 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
135 offsetof(CPUState
, macl
), "MACL");
136 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
137 offsetof(CPUState
, pr
), "PR");
138 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
139 offsetof(CPUState
, fpscr
), "FPSCR");
140 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
141 offsetof(CPUState
, fpul
), "FPUL");
143 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
144 offsetof(CPUState
, flags
), "_flags_");
145 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
146 offsetof(CPUState
, delayed_pc
),
148 cpu_ldst
= tcg_global_mem_new_i32(TCG_AREG0
,
149 offsetof(CPUState
, ldst
), "_ldst_");
151 for (i
= 0; i
< 32; i
++)
152 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
153 offsetof(CPUState
, fregs
[i
]),
156 /* register helpers */
163 void cpu_dump_state(CPUState
* env
, FILE * f
,
164 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
168 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
169 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
170 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
171 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
172 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
173 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
174 for (i
= 0; i
< 24; i
+= 4) {
175 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
176 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
177 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
179 if (env
->flags
& DELAY_SLOT
) {
180 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
182 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
183 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
188 static void cpu_sh4_reset(CPUSH4State
* env
)
190 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
191 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
192 log_cpu_state(env
, 0);
195 #if defined(CONFIG_USER_ONLY)
198 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
201 env
->pc
= 0xA0000000;
202 #if defined(CONFIG_USER_ONLY)
203 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
204 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
206 env
->fpscr
= FPSCR_DN
| FPSCR_RM_ZERO
; /* CPU reset value according to SH4 manual */
207 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
208 set_flush_to_zero(1, &env
->fp_status
);
210 set_default_nan_mode(1, &env
->fp_status
);
223 static sh4_def_t sh4_defs
[] = {
226 .id
= SH_CPU_SH7750R
,
230 .features
= SH_FEATURE_BCR3_AND_BCR4
,
233 .id
= SH_CPU_SH7751R
,
236 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
237 .features
= SH_FEATURE_BCR3_AND_BCR4
,
244 .features
= SH_FEATURE_SH4A
,
248 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
252 if (strcasecmp(name
, "any") == 0)
255 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
256 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
262 void sh4_cpu_list(FILE *f
, fprintf_function cpu_fprintf
)
266 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
267 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
270 static void cpu_sh4_register(CPUSH4State
*env
, const sh4_def_t
*def
)
278 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
281 const sh4_def_t
*def
;
283 def
= cpu_sh4_find_by_name(cpu_model
);
286 env
= qemu_mallocz(sizeof(CPUSH4State
));
287 env
->features
= def
->features
;
289 env
->movcal_backup_tail
= &(env
->movcal_backup
);
290 sh4_translate_init();
291 env
->cpu_model_str
= cpu_model
;
293 cpu_sh4_register(env
, def
);
299 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
301 TranslationBlock
*tb
;
304 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
305 !ctx
->singlestep_enabled
) {
306 /* Use a direct jump if in same page and singlestep not enabled */
308 tcg_gen_movi_i32(cpu_pc
, dest
);
309 tcg_gen_exit_tb((long) tb
+ n
);
311 tcg_gen_movi_i32(cpu_pc
, dest
);
312 if (ctx
->singlestep_enabled
)
318 static void gen_jump(DisasContext
* ctx
)
320 if (ctx
->delayed_pc
== (uint32_t) - 1) {
321 /* Target is not statically known, it comes necessarily from a
322 delayed jump as immediate jump are conditinal jumps */
323 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
324 if (ctx
->singlestep_enabled
)
328 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
332 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
335 int label
= gen_new_label();
336 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
338 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
339 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, t
? SR_T
: 0, label
);
340 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
341 gen_set_label(label
);
344 /* Immediate conditional jump (bt or bf) */
345 static void gen_conditional_jump(DisasContext
* ctx
,
346 target_ulong ift
, target_ulong ifnott
)
351 l1
= gen_new_label();
353 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
354 tcg_gen_brcondi_i32(TCG_COND_EQ
, sr
, SR_T
, l1
);
355 gen_goto_tb(ctx
, 0, ifnott
);
357 gen_goto_tb(ctx
, 1, ift
);
360 /* Delayed conditional jump (bt or bf) */
361 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
366 l1
= gen_new_label();
368 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
369 tcg_gen_brcondi_i32(TCG_COND_EQ
, ds
, DELAY_SLOT_TRUE
, l1
);
370 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
372 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
376 static inline void gen_set_t(void)
378 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
381 static inline void gen_clr_t(void)
383 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
386 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
388 int label1
= gen_new_label();
389 int label2
= gen_new_label();
390 tcg_gen_brcond_i32(cond
, t1
, t0
, label1
);
393 gen_set_label(label1
);
395 gen_set_label(label2
);
398 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
400 int label1
= gen_new_label();
401 int label2
= gen_new_label();
402 tcg_gen_brcondi_i32(cond
, t0
, imm
, label1
);
405 gen_set_label(label1
);
407 gen_set_label(label2
);
410 static inline void gen_store_flags(uint32_t flags
)
412 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
413 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
416 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
418 TCGv tmp
= tcg_temp_new();
423 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
424 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
426 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
428 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
429 tcg_gen_or_i32(t0
, t0
, tmp
);
434 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
436 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
439 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
441 TCGv_i32 tmp
= tcg_temp_new_i32();
442 tcg_gen_trunc_i64_i32(tmp
, t
);
443 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
444 tcg_gen_shri_i64(t
, t
, 32);
445 tcg_gen_trunc_i64_i32(tmp
, t
);
446 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
447 tcg_temp_free_i32(tmp
);
450 #define B3_0 (ctx->opcode & 0xf)
451 #define B6_4 ((ctx->opcode >> 4) & 0x7)
452 #define B7_4 ((ctx->opcode >> 4) & 0xf)
453 #define B7_0 (ctx->opcode & 0xff)
454 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
455 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
456 (ctx->opcode & 0xfff))
457 #define B11_8 ((ctx->opcode >> 8) & 0xf)
458 #define B15_12 ((ctx->opcode >> 12) & 0xf)
460 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
461 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
463 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
464 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
466 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
467 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
468 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
469 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
471 #define CHECK_NOT_DELAY_SLOT \
472 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
474 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
475 gen_helper_raise_slot_illegal_instruction(); \
476 ctx->bstate = BS_EXCP; \
480 #define CHECK_PRIVILEGED \
481 if (IS_USER(ctx)) { \
482 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
483 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
484 gen_helper_raise_slot_illegal_instruction(); \
486 gen_helper_raise_illegal_instruction(); \
488 ctx->bstate = BS_EXCP; \
492 #define CHECK_FPU_ENABLED \
493 if (ctx->flags & SR_FD) { \
494 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
495 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
496 gen_helper_raise_slot_fpu_disable(); \
498 gen_helper_raise_fpu_disable(); \
500 ctx->bstate = BS_EXCP; \
504 static void _decode_opc(DisasContext
* ctx
)
506 /* This code tries to make movcal emulation sufficiently
507 accurate for Linux purposes. This instruction writes
508 memory, and prior to that, always allocates a cache line.
509 It is used in two contexts:
510 - in memcpy, where data is copied in blocks, the first write
511 of to a block uses movca.l for performance.
512 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
513 to flush the cache. Here, the data written by movcal.l is never
514 written to memory, and the data written is just bogus.
516 To simulate this, we simulate movcal.l, we store the value to memory,
517 but we also remember the previous content. If we see ocbi, we check
518 if movcal.l for that address was done previously. If so, the write should
519 not have hit the memory, so we restore the previous content.
520 When we see an instruction that is neither movca.l
521 nor ocbi, the previous content is discarded.
523 To optimize, we only try to flush stores when we're at the start of
524 TB, or if we already saw movca.l in this TB and did not flush stores
528 int opcode
= ctx
->opcode
& 0xf0ff;
529 if (opcode
!= 0x0093 /* ocbi */
530 && opcode
!= 0x00c3 /* movca.l */)
532 gen_helper_discard_movcal_backup ();
538 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
541 switch (ctx
->opcode
) {
542 case 0x0019: /* div0u */
543 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
545 case 0x000b: /* rts */
547 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
548 ctx
->flags
|= DELAY_SLOT
;
549 ctx
->delayed_pc
= (uint32_t) - 1;
551 case 0x0028: /* clrmac */
552 tcg_gen_movi_i32(cpu_mach
, 0);
553 tcg_gen_movi_i32(cpu_macl
, 0);
555 case 0x0048: /* clrs */
556 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
558 case 0x0008: /* clrt */
561 case 0x0038: /* ldtlb */
565 case 0x002b: /* rte */
568 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
569 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
570 ctx
->flags
|= DELAY_SLOT
;
571 ctx
->delayed_pc
= (uint32_t) - 1;
573 case 0x0058: /* sets */
574 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
576 case 0x0018: /* sett */
579 case 0xfbfd: /* frchg */
580 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
581 ctx
->bstate
= BS_STOP
;
583 case 0xf3fd: /* fschg */
584 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
585 ctx
->bstate
= BS_STOP
;
587 case 0x0009: /* nop */
589 case 0x001b: /* sleep */
591 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
595 switch (ctx
->opcode
& 0xf000) {
596 case 0x1000: /* mov.l Rm,@(disp,Rn) */
598 TCGv addr
= tcg_temp_new();
599 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
600 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
604 case 0x5000: /* mov.l @(disp,Rm),Rn */
606 TCGv addr
= tcg_temp_new();
607 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
608 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
612 case 0xe000: /* mov #imm,Rn */
613 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
615 case 0x9000: /* mov.w @(disp,PC),Rn */
617 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
618 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
622 case 0xd000: /* mov.l @(disp,PC),Rn */
624 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
625 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
629 case 0x7000: /* add #imm,Rn */
630 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
632 case 0xa000: /* bra disp */
634 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
635 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
636 ctx
->flags
|= DELAY_SLOT
;
638 case 0xb000: /* bsr disp */
640 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
641 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
642 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
643 ctx
->flags
|= DELAY_SLOT
;
647 switch (ctx
->opcode
& 0xf00f) {
648 case 0x6003: /* mov Rm,Rn */
649 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
651 case 0x2000: /* mov.b Rm,@Rn */
652 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
654 case 0x2001: /* mov.w Rm,@Rn */
655 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
657 case 0x2002: /* mov.l Rm,@Rn */
658 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
660 case 0x6000: /* mov.b @Rm,Rn */
661 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
663 case 0x6001: /* mov.w @Rm,Rn */
664 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
666 case 0x6002: /* mov.l @Rm,Rn */
667 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
669 case 0x2004: /* mov.b Rm,@-Rn */
671 TCGv addr
= tcg_temp_new();
672 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
673 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
674 tcg_gen_mov_i32(REG(B11_8
), addr
); /* modify register status */
678 case 0x2005: /* mov.w Rm,@-Rn */
680 TCGv addr
= tcg_temp_new();
681 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
682 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
683 tcg_gen_mov_i32(REG(B11_8
), addr
);
687 case 0x2006: /* mov.l Rm,@-Rn */
689 TCGv addr
= tcg_temp_new();
690 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
691 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
692 tcg_gen_mov_i32(REG(B11_8
), addr
);
695 case 0x6004: /* mov.b @Rm+,Rn */
696 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
698 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
700 case 0x6005: /* mov.w @Rm+,Rn */
701 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
703 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
705 case 0x6006: /* mov.l @Rm+,Rn */
706 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
708 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
710 case 0x0004: /* mov.b Rm,@(R0,Rn) */
712 TCGv addr
= tcg_temp_new();
713 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
714 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
718 case 0x0005: /* mov.w Rm,@(R0,Rn) */
720 TCGv addr
= tcg_temp_new();
721 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
722 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
726 case 0x0006: /* mov.l Rm,@(R0,Rn) */
728 TCGv addr
= tcg_temp_new();
729 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
730 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
734 case 0x000c: /* mov.b @(R0,Rm),Rn */
736 TCGv addr
= tcg_temp_new();
737 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
738 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
742 case 0x000d: /* mov.w @(R0,Rm),Rn */
744 TCGv addr
= tcg_temp_new();
745 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
746 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
750 case 0x000e: /* mov.l @(R0,Rm),Rn */
752 TCGv addr
= tcg_temp_new();
753 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
754 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
758 case 0x6008: /* swap.b Rm,Rn */
761 high
= tcg_temp_new();
762 tcg_gen_andi_i32(high
, REG(B7_4
), 0xffff0000);
763 low
= tcg_temp_new();
764 tcg_gen_ext16u_i32(low
, REG(B7_4
));
765 tcg_gen_bswap16_i32(low
, low
);
766 tcg_gen_or_i32(REG(B11_8
), high
, low
);
771 case 0x6009: /* swap.w Rm,Rn */
774 high
= tcg_temp_new();
775 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
776 low
= tcg_temp_new();
777 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
778 tcg_gen_ext16u_i32(low
, low
);
779 tcg_gen_or_i32(REG(B11_8
), high
, low
);
784 case 0x200d: /* xtrct Rm,Rn */
787 high
= tcg_temp_new();
788 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
789 low
= tcg_temp_new();
790 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
791 tcg_gen_ext16u_i32(low
, low
);
792 tcg_gen_or_i32(REG(B11_8
), high
, low
);
797 case 0x300c: /* add Rm,Rn */
798 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
800 case 0x300e: /* addc Rm,Rn */
801 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
803 case 0x300f: /* addv Rm,Rn */
804 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
806 case 0x2009: /* and Rm,Rn */
807 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
809 case 0x3000: /* cmp/eq Rm,Rn */
810 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
812 case 0x3003: /* cmp/ge Rm,Rn */
813 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
815 case 0x3007: /* cmp/gt Rm,Rn */
816 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
818 case 0x3006: /* cmp/hi Rm,Rn */
819 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
821 case 0x3002: /* cmp/hs Rm,Rn */
822 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
824 case 0x200c: /* cmp/str Rm,Rn */
826 int label1
= gen_new_label();
827 int label2
= gen_new_label();
828 TCGv cmp1
= tcg_temp_local_new();
829 TCGv cmp2
= tcg_temp_local_new();
830 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
831 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
832 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
833 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
834 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
835 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
836 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
837 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
838 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
839 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
841 gen_set_label(label1
);
842 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
843 gen_set_label(label2
);
848 case 0x2007: /* div0s Rm,Rn */
850 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
851 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
852 TCGv val
= tcg_temp_new();
853 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
854 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
858 case 0x3004: /* div1 Rm,Rn */
859 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
861 case 0x300d: /* dmuls.l Rm,Rn */
863 TCGv_i64 tmp1
= tcg_temp_new_i64();
864 TCGv_i64 tmp2
= tcg_temp_new_i64();
866 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
867 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
868 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
869 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
870 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
871 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
873 tcg_temp_free_i64(tmp2
);
874 tcg_temp_free_i64(tmp1
);
877 case 0x3005: /* dmulu.l Rm,Rn */
879 TCGv_i64 tmp1
= tcg_temp_new_i64();
880 TCGv_i64 tmp2
= tcg_temp_new_i64();
882 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
883 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
884 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
885 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
886 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
887 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
889 tcg_temp_free_i64(tmp2
);
890 tcg_temp_free_i64(tmp1
);
893 case 0x600e: /* exts.b Rm,Rn */
894 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
896 case 0x600f: /* exts.w Rm,Rn */
897 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
899 case 0x600c: /* extu.b Rm,Rn */
900 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
902 case 0x600d: /* extu.w Rm,Rn */
903 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
905 case 0x000f: /* mac.l @Rm+,@Rn+ */
908 arg0
= tcg_temp_new();
909 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
910 arg1
= tcg_temp_new();
911 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
912 gen_helper_macl(arg0
, arg1
);
915 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
916 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
919 case 0x400f: /* mac.w @Rm+,@Rn+ */
922 arg0
= tcg_temp_new();
923 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
924 arg1
= tcg_temp_new();
925 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
926 gen_helper_macw(arg0
, arg1
);
929 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
930 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
933 case 0x0007: /* mul.l Rm,Rn */
934 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
936 case 0x200f: /* muls.w Rm,Rn */
939 arg0
= tcg_temp_new();
940 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
941 arg1
= tcg_temp_new();
942 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
943 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
948 case 0x200e: /* mulu.w Rm,Rn */
951 arg0
= tcg_temp_new();
952 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
953 arg1
= tcg_temp_new();
954 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
955 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
960 case 0x600b: /* neg Rm,Rn */
961 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
963 case 0x600a: /* negc Rm,Rn */
964 gen_helper_negc(REG(B11_8
), REG(B7_4
));
966 case 0x6007: /* not Rm,Rn */
967 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
969 case 0x200b: /* or Rm,Rn */
970 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
972 case 0x400c: /* shad Rm,Rn */
974 int label1
= gen_new_label();
975 int label2
= gen_new_label();
976 int label3
= gen_new_label();
977 int label4
= gen_new_label();
979 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
980 /* Rm positive, shift to the left */
981 shift
= tcg_temp_new();
982 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
983 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
984 tcg_temp_free(shift
);
986 /* Rm negative, shift to the right */
987 gen_set_label(label1
);
988 shift
= tcg_temp_new();
989 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
990 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
991 tcg_gen_not_i32(shift
, REG(B7_4
));
992 tcg_gen_andi_i32(shift
, shift
, 0x1f);
993 tcg_gen_addi_i32(shift
, shift
, 1);
994 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
995 tcg_temp_free(shift
);
998 gen_set_label(label2
);
999 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
1000 tcg_gen_movi_i32(REG(B11_8
), 0);
1002 gen_set_label(label3
);
1003 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
1004 gen_set_label(label4
);
1007 case 0x400d: /* shld Rm,Rn */
1009 int label1
= gen_new_label();
1010 int label2
= gen_new_label();
1011 int label3
= gen_new_label();
1013 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
1014 /* Rm positive, shift to the left */
1015 shift
= tcg_temp_new();
1016 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1017 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
1018 tcg_temp_free(shift
);
1020 /* Rm negative, shift to the right */
1021 gen_set_label(label1
);
1022 shift
= tcg_temp_new();
1023 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1024 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
1025 tcg_gen_not_i32(shift
, REG(B7_4
));
1026 tcg_gen_andi_i32(shift
, shift
, 0x1f);
1027 tcg_gen_addi_i32(shift
, shift
, 1);
1028 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
1029 tcg_temp_free(shift
);
1032 gen_set_label(label2
);
1033 tcg_gen_movi_i32(REG(B11_8
), 0);
1034 gen_set_label(label3
);
1037 case 0x3008: /* sub Rm,Rn */
1038 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1040 case 0x300a: /* subc Rm,Rn */
1041 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1043 case 0x300b: /* subv Rm,Rn */
1044 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1046 case 0x2008: /* tst Rm,Rn */
1048 TCGv val
= tcg_temp_new();
1049 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1050 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1054 case 0x200a: /* xor Rm,Rn */
1055 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1057 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1059 if (ctx
->fpscr
& FPSCR_SZ
) {
1060 TCGv_i64 fp
= tcg_temp_new_i64();
1061 gen_load_fpr64(fp
, XREG(B7_4
));
1062 gen_store_fpr64(fp
, XREG(B11_8
));
1063 tcg_temp_free_i64(fp
);
1065 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1068 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1070 if (ctx
->fpscr
& FPSCR_SZ
) {
1071 TCGv addr_hi
= tcg_temp_new();
1072 int fr
= XREG(B7_4
);
1073 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1074 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1075 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1076 tcg_temp_free(addr_hi
);
1078 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1081 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1083 if (ctx
->fpscr
& FPSCR_SZ
) {
1084 TCGv addr_hi
= tcg_temp_new();
1085 int fr
= XREG(B11_8
);
1086 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1087 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1088 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1089 tcg_temp_free(addr_hi
);
1091 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1094 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1096 if (ctx
->fpscr
& FPSCR_SZ
) {
1097 TCGv addr_hi
= tcg_temp_new();
1098 int fr
= XREG(B11_8
);
1099 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1100 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1101 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1102 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1103 tcg_temp_free(addr_hi
);
1105 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1106 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1109 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1111 if (ctx
->fpscr
& FPSCR_SZ
) {
1112 TCGv addr
= tcg_temp_new_i32();
1113 int fr
= XREG(B7_4
);
1114 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1115 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1116 tcg_gen_subi_i32(addr
, addr
, 4);
1117 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1118 tcg_gen_mov_i32(REG(B11_8
), addr
);
1119 tcg_temp_free(addr
);
1122 addr
= tcg_temp_new_i32();
1123 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1124 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1125 tcg_gen_mov_i32(REG(B11_8
), addr
);
1126 tcg_temp_free(addr
);
1129 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1132 TCGv addr
= tcg_temp_new_i32();
1133 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1134 if (ctx
->fpscr
& FPSCR_SZ
) {
1135 int fr
= XREG(B11_8
);
1136 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1137 tcg_gen_addi_i32(addr
, addr
, 4);
1138 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1140 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1142 tcg_temp_free(addr
);
1145 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1148 TCGv addr
= tcg_temp_new();
1149 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1150 if (ctx
->fpscr
& FPSCR_SZ
) {
1151 int fr
= XREG(B7_4
);
1152 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1153 tcg_gen_addi_i32(addr
, addr
, 4);
1154 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1156 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1158 tcg_temp_free(addr
);
1161 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1163 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1164 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1165 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1166 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1169 if (ctx
->fpscr
& FPSCR_PR
) {
1172 if (ctx
->opcode
& 0x0110)
1173 break; /* illegal instruction */
1174 fp0
= tcg_temp_new_i64();
1175 fp1
= tcg_temp_new_i64();
1176 gen_load_fpr64(fp0
, DREG(B11_8
));
1177 gen_load_fpr64(fp1
, DREG(B7_4
));
1178 switch (ctx
->opcode
& 0xf00f) {
1179 case 0xf000: /* fadd Rm,Rn */
1180 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1182 case 0xf001: /* fsub Rm,Rn */
1183 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1185 case 0xf002: /* fmul Rm,Rn */
1186 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1188 case 0xf003: /* fdiv Rm,Rn */
1189 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1191 case 0xf004: /* fcmp/eq Rm,Rn */
1192 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1194 case 0xf005: /* fcmp/gt Rm,Rn */
1195 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1198 gen_store_fpr64(fp0
, DREG(B11_8
));
1199 tcg_temp_free_i64(fp0
);
1200 tcg_temp_free_i64(fp1
);
1202 switch (ctx
->opcode
& 0xf00f) {
1203 case 0xf000: /* fadd Rm,Rn */
1204 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1206 case 0xf001: /* fsub Rm,Rn */
1207 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1209 case 0xf002: /* fmul Rm,Rn */
1210 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1212 case 0xf003: /* fdiv Rm,Rn */
1213 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1215 case 0xf004: /* fcmp/eq Rm,Rn */
1216 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1218 case 0xf005: /* fcmp/gt Rm,Rn */
1219 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1225 case 0xf00e: /* fmac FR0,RM,Rn */
1228 if (ctx
->fpscr
& FPSCR_PR
) {
1229 break; /* illegal instruction */
1231 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1232 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1238 switch (ctx
->opcode
& 0xff00) {
1239 case 0xc900: /* and #imm,R0 */
1240 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1242 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1245 addr
= tcg_temp_new();
1246 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1247 val
= tcg_temp_new();
1248 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1249 tcg_gen_andi_i32(val
, val
, B7_0
);
1250 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1252 tcg_temp_free(addr
);
1255 case 0x8b00: /* bf label */
1256 CHECK_NOT_DELAY_SLOT
1257 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1258 ctx
->pc
+ 4 + B7_0s
* 2);
1259 ctx
->bstate
= BS_BRANCH
;
1261 case 0x8f00: /* bf/s label */
1262 CHECK_NOT_DELAY_SLOT
1263 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1264 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1266 case 0x8900: /* bt label */
1267 CHECK_NOT_DELAY_SLOT
1268 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1270 ctx
->bstate
= BS_BRANCH
;
1272 case 0x8d00: /* bt/s label */
1273 CHECK_NOT_DELAY_SLOT
1274 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1275 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1277 case 0x8800: /* cmp/eq #imm,R0 */
1278 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1280 case 0xc400: /* mov.b @(disp,GBR),R0 */
1282 TCGv addr
= tcg_temp_new();
1283 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1284 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1285 tcg_temp_free(addr
);
1288 case 0xc500: /* mov.w @(disp,GBR),R0 */
1290 TCGv addr
= tcg_temp_new();
1291 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1292 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1293 tcg_temp_free(addr
);
1296 case 0xc600: /* mov.l @(disp,GBR),R0 */
1298 TCGv addr
= tcg_temp_new();
1299 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1300 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1301 tcg_temp_free(addr
);
1304 case 0xc000: /* mov.b R0,@(disp,GBR) */
1306 TCGv addr
= tcg_temp_new();
1307 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1308 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1309 tcg_temp_free(addr
);
1312 case 0xc100: /* mov.w R0,@(disp,GBR) */
1314 TCGv addr
= tcg_temp_new();
1315 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1316 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1317 tcg_temp_free(addr
);
1320 case 0xc200: /* mov.l R0,@(disp,GBR) */
1322 TCGv addr
= tcg_temp_new();
1323 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1324 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1325 tcg_temp_free(addr
);
1328 case 0x8000: /* mov.b R0,@(disp,Rn) */
1330 TCGv addr
= tcg_temp_new();
1331 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1332 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1333 tcg_temp_free(addr
);
1336 case 0x8100: /* mov.w R0,@(disp,Rn) */
1338 TCGv addr
= tcg_temp_new();
1339 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1340 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1341 tcg_temp_free(addr
);
1344 case 0x8400: /* mov.b @(disp,Rn),R0 */
1346 TCGv addr
= tcg_temp_new();
1347 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1348 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1349 tcg_temp_free(addr
);
1352 case 0x8500: /* mov.w @(disp,Rn),R0 */
1354 TCGv addr
= tcg_temp_new();
1355 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1356 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1357 tcg_temp_free(addr
);
1360 case 0xc700: /* mova @(disp,PC),R0 */
1361 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1363 case 0xcb00: /* or #imm,R0 */
1364 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1366 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1369 addr
= tcg_temp_new();
1370 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1371 val
= tcg_temp_new();
1372 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1373 tcg_gen_ori_i32(val
, val
, B7_0
);
1374 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1376 tcg_temp_free(addr
);
1379 case 0xc300: /* trapa #imm */
1382 CHECK_NOT_DELAY_SLOT
1383 tcg_gen_movi_i32(cpu_pc
, ctx
->pc
);
1384 imm
= tcg_const_i32(B7_0
);
1385 gen_helper_trapa(imm
);
1387 ctx
->bstate
= BS_BRANCH
;
1390 case 0xc800: /* tst #imm,R0 */
1392 TCGv val
= tcg_temp_new();
1393 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1394 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1398 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1400 TCGv val
= tcg_temp_new();
1401 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1402 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1403 tcg_gen_andi_i32(val
, val
, B7_0
);
1404 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1408 case 0xca00: /* xor #imm,R0 */
1409 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1411 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1414 addr
= tcg_temp_new();
1415 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1416 val
= tcg_temp_new();
1417 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1418 tcg_gen_xori_i32(val
, val
, B7_0
);
1419 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1421 tcg_temp_free(addr
);
1426 switch (ctx
->opcode
& 0xf08f) {
1427 case 0x408e: /* ldc Rm,Rn_BANK */
1429 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1431 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1433 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1434 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1436 case 0x0082: /* stc Rm_BANK,Rn */
1438 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1440 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1443 TCGv addr
= tcg_temp_new();
1444 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1445 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1446 tcg_gen_mov_i32(REG(B11_8
), addr
);
1447 tcg_temp_free(addr
);
1452 switch (ctx
->opcode
& 0xf0ff) {
1453 case 0x0023: /* braf Rn */
1454 CHECK_NOT_DELAY_SLOT
1455 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1456 ctx
->flags
|= DELAY_SLOT
;
1457 ctx
->delayed_pc
= (uint32_t) - 1;
1459 case 0x0003: /* bsrf Rn */
1460 CHECK_NOT_DELAY_SLOT
1461 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1462 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1463 ctx
->flags
|= DELAY_SLOT
;
1464 ctx
->delayed_pc
= (uint32_t) - 1;
1466 case 0x4015: /* cmp/pl Rn */
1467 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1469 case 0x4011: /* cmp/pz Rn */
1470 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1472 case 0x4010: /* dt Rn */
1473 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1474 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1476 case 0x402b: /* jmp @Rn */
1477 CHECK_NOT_DELAY_SLOT
1478 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1479 ctx
->flags
|= DELAY_SLOT
;
1480 ctx
->delayed_pc
= (uint32_t) - 1;
1482 case 0x400b: /* jsr @Rn */
1483 CHECK_NOT_DELAY_SLOT
1484 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1485 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1486 ctx
->flags
|= DELAY_SLOT
;
1487 ctx
->delayed_pc
= (uint32_t) - 1;
1489 case 0x400e: /* ldc Rm,SR */
1491 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1492 ctx
->bstate
= BS_STOP
;
1494 case 0x4007: /* ldc.l @Rm+,SR */
1497 TCGv val
= tcg_temp_new();
1498 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1499 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1501 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1502 ctx
->bstate
= BS_STOP
;
1505 case 0x0002: /* stc SR,Rn */
1507 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1509 case 0x4003: /* stc SR,@-Rn */
1512 TCGv addr
= tcg_temp_new();
1513 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1514 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1515 tcg_gen_mov_i32(REG(B11_8
), addr
);
1516 tcg_temp_free(addr
);
1519 #define LD(reg,ldnum,ldpnum,prechk) \
1522 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1526 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1527 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1529 #define ST(reg,stnum,stpnum,prechk) \
1532 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1537 TCGv addr = tcg_temp_new(); \
1538 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1539 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1540 tcg_gen_mov_i32(REG(B11_8), addr); \
1541 tcg_temp_free(addr); \
1544 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1545 LD(reg,ldnum,ldpnum,prechk) \
1546 ST(reg,stnum,stpnum,prechk)
1547 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1548 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1549 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1550 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1551 ST(sgr
, 0x003a, 0x4032, CHECK_PRIVILEGED
)
1552 LD(sgr
, 0x403a, 0x4036, CHECK_PRIVILEGED
if (!(ctx
->features
& SH_FEATURE_SH4A
)) break;)
1553 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1554 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1555 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1556 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1557 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1558 case 0x406a: /* lds Rm,FPSCR */
1560 gen_helper_ld_fpscr(REG(B11_8
));
1561 ctx
->bstate
= BS_STOP
;
1563 case 0x4066: /* lds.l @Rm+,FPSCR */
1566 TCGv addr
= tcg_temp_new();
1567 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1568 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1569 gen_helper_ld_fpscr(addr
);
1570 tcg_temp_free(addr
);
1571 ctx
->bstate
= BS_STOP
;
1574 case 0x006a: /* sts FPSCR,Rn */
1576 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1578 case 0x4062: /* sts FPSCR,@-Rn */
1582 val
= tcg_temp_new();
1583 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1584 addr
= tcg_temp_new();
1585 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1586 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1587 tcg_gen_mov_i32(REG(B11_8
), addr
);
1588 tcg_temp_free(addr
);
1592 case 0x00c3: /* movca.l R0,@Rm */
1594 TCGv val
= tcg_temp_new();
1595 tcg_gen_qemu_ld32u(val
, REG(B11_8
), ctx
->memidx
);
1596 gen_helper_movcal (REG(B11_8
), val
);
1597 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1599 ctx
->has_movcal
= 1;
1602 /* MOVUA.L @Rm,R0 (Rm) -> R0
1603 Load non-boundary-aligned data */
1604 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1607 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1608 Load non-boundary-aligned data */
1609 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1610 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1612 case 0x0029: /* movt Rn */
1613 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1618 If (T == 1) R0 -> (Rn)
1621 if (ctx
->features
& SH_FEATURE_SH4A
) {
1622 int label
= gen_new_label();
1624 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cpu_ldst
);
1625 tcg_gen_brcondi_i32(TCG_COND_EQ
, cpu_ldst
, 0, label
);
1626 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1627 gen_set_label(label
);
1628 tcg_gen_movi_i32(cpu_ldst
, 0);
1636 When interrupt/exception
1639 if (ctx
->features
& SH_FEATURE_SH4A
) {
1640 tcg_gen_movi_i32(cpu_ldst
, 0);
1641 tcg_gen_qemu_ld32s(REG(0), REG(B11_8
), ctx
->memidx
);
1642 tcg_gen_movi_i32(cpu_ldst
, 1);
1646 case 0x0093: /* ocbi @Rn */
1648 gen_helper_ocbi (REG(B11_8
));
1651 case 0x00a3: /* ocbp @Rn */
1653 TCGv dummy
= tcg_temp_new();
1654 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1655 tcg_temp_free(dummy
);
1658 case 0x00b3: /* ocbwb @Rn */
1660 TCGv dummy
= tcg_temp_new();
1661 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1662 tcg_temp_free(dummy
);
1665 case 0x0083: /* pref @Rn */
1667 case 0x00d3: /* prefi @Rn */
1668 if (ctx
->features
& SH_FEATURE_SH4A
)
1672 case 0x00e3: /* icbi @Rn */
1673 if (ctx
->features
& SH_FEATURE_SH4A
)
1677 case 0x00ab: /* synco */
1678 if (ctx
->features
& SH_FEATURE_SH4A
)
1682 case 0x4024: /* rotcl Rn */
1684 TCGv tmp
= tcg_temp_new();
1685 tcg_gen_mov_i32(tmp
, cpu_sr
);
1686 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1687 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1688 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1692 case 0x4025: /* rotcr Rn */
1694 TCGv tmp
= tcg_temp_new();
1695 tcg_gen_mov_i32(tmp
, cpu_sr
);
1696 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1697 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1698 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1702 case 0x4004: /* rotl Rn */
1703 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1704 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1705 gen_copy_bit_i32(REG(B11_8
), 0, cpu_sr
, 0);
1707 case 0x4005: /* rotr Rn */
1708 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1709 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1710 gen_copy_bit_i32(REG(B11_8
), 31, cpu_sr
, 0);
1712 case 0x4000: /* shll Rn */
1713 case 0x4020: /* shal Rn */
1714 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1715 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1717 case 0x4021: /* shar Rn */
1718 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1719 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1721 case 0x4001: /* shlr Rn */
1722 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1723 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1725 case 0x4008: /* shll2 Rn */
1726 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1728 case 0x4018: /* shll8 Rn */
1729 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1731 case 0x4028: /* shll16 Rn */
1732 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1734 case 0x4009: /* shlr2 Rn */
1735 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1737 case 0x4019: /* shlr8 Rn */
1738 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1740 case 0x4029: /* shlr16 Rn */
1741 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1743 case 0x401b: /* tas.b @Rn */
1746 addr
= tcg_temp_local_new();
1747 tcg_gen_mov_i32(addr
, REG(B11_8
));
1748 val
= tcg_temp_local_new();
1749 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1750 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1751 tcg_gen_ori_i32(val
, val
, 0x80);
1752 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1754 tcg_temp_free(addr
);
1757 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1759 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1761 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1763 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1765 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1767 if (ctx
->fpscr
& FPSCR_PR
) {
1769 if (ctx
->opcode
& 0x0100)
1770 break; /* illegal instruction */
1771 fp
= tcg_temp_new_i64();
1772 gen_helper_float_DT(fp
, cpu_fpul
);
1773 gen_store_fpr64(fp
, DREG(B11_8
));
1774 tcg_temp_free_i64(fp
);
1777 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1780 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1782 if (ctx
->fpscr
& FPSCR_PR
) {
1784 if (ctx
->opcode
& 0x0100)
1785 break; /* illegal instruction */
1786 fp
= tcg_temp_new_i64();
1787 gen_load_fpr64(fp
, DREG(B11_8
));
1788 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1789 tcg_temp_free_i64(fp
);
1792 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1795 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1798 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1801 case 0xf05d: /* fabs FRn/DRn */
1803 if (ctx
->fpscr
& FPSCR_PR
) {
1804 if (ctx
->opcode
& 0x0100)
1805 break; /* illegal instruction */
1806 TCGv_i64 fp
= tcg_temp_new_i64();
1807 gen_load_fpr64(fp
, DREG(B11_8
));
1808 gen_helper_fabs_DT(fp
, fp
);
1809 gen_store_fpr64(fp
, DREG(B11_8
));
1810 tcg_temp_free_i64(fp
);
1812 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1815 case 0xf06d: /* fsqrt FRn */
1817 if (ctx
->fpscr
& FPSCR_PR
) {
1818 if (ctx
->opcode
& 0x0100)
1819 break; /* illegal instruction */
1820 TCGv_i64 fp
= tcg_temp_new_i64();
1821 gen_load_fpr64(fp
, DREG(B11_8
));
1822 gen_helper_fsqrt_DT(fp
, fp
);
1823 gen_store_fpr64(fp
, DREG(B11_8
));
1824 tcg_temp_free_i64(fp
);
1826 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1829 case 0xf07d: /* fsrra FRn */
1832 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1834 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1835 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1838 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1840 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1841 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1844 case 0xf0ad: /* fcnvsd FPUL,DRn */
1847 TCGv_i64 fp
= tcg_temp_new_i64();
1848 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1849 gen_store_fpr64(fp
, DREG(B11_8
));
1850 tcg_temp_free_i64(fp
);
1853 case 0xf0bd: /* fcnvds DRn,FPUL */
1856 TCGv_i64 fp
= tcg_temp_new_i64();
1857 gen_load_fpr64(fp
, DREG(B11_8
));
1858 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1859 tcg_temp_free_i64(fp
);
1862 case 0xf0ed: /* fipr FVm,FVn */
1864 if ((ctx
->fpscr
& FPSCR_PR
) == 0) {
1866 m
= tcg_const_i32((ctx
->opcode
>> 16) & 3);
1867 n
= tcg_const_i32((ctx
->opcode
>> 18) & 3);
1868 gen_helper_fipr(m
, n
);
1874 case 0xf0fd: /* ftrv XMTRX,FVn */
1876 if ((ctx
->opcode
& 0x0300) == 0x0100 &&
1877 (ctx
->fpscr
& FPSCR_PR
) == 0) {
1879 n
= tcg_const_i32((ctx
->opcode
>> 18) & 3);
1887 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1888 ctx
->opcode
, ctx
->pc
);
1891 tcg_gen_movi_i32(cpu_pc
, ctx
->pc
);
1892 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1893 gen_helper_raise_slot_illegal_instruction();
1895 gen_helper_raise_illegal_instruction();
1897 ctx
->bstate
= BS_EXCP
;
1900 static void decode_opc(DisasContext
* ctx
)
1902 uint32_t old_flags
= ctx
->flags
;
1906 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1907 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1910 /* go out of the delay slot */
1911 uint32_t new_flags
= ctx
->flags
;
1912 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1913 gen_store_flags(new_flags
);
1916 ctx
->bstate
= BS_BRANCH
;
1917 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1918 gen_delayed_conditional_jump(ctx
);
1919 } else if (old_flags
& DELAY_SLOT
) {
1925 /* go into a delay slot */
1926 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1927 gen_store_flags(ctx
->flags
);
1931 gen_intermediate_code_internal(CPUState
* env
, TranslationBlock
* tb
,
1935 target_ulong pc_start
;
1936 static uint16_t *gen_opc_end
;
1943 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1945 ctx
.flags
= (uint32_t)tb
->flags
;
1946 ctx
.bstate
= BS_NONE
;
1948 ctx
.fpscr
= env
->fpscr
;
1949 ctx
.memidx
= (env
->sr
& SR_MD
) == 0 ? 1 : 0;
1950 /* We don't know if the delayed pc came from a dynamic or static branch,
1951 so assume it is a dynamic branch. */
1952 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1954 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1955 ctx
.features
= env
->features
;
1956 ctx
.has_movcal
= (tb
->flags
& TB_FLAG_PENDING_MOVCA
);
1960 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1962 max_insns
= CF_COUNT_MASK
;
1964 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1965 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
1966 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1967 if (ctx
.pc
== bp
->pc
) {
1968 /* We have hit a breakpoint - make sure PC is up-to-date */
1969 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1971 ctx
.bstate
= BS_EXCP
;
1977 i
= gen_opc_ptr
- gen_opc_buf
;
1981 gen_opc_instr_start
[ii
++] = 0;
1983 gen_opc_pc
[ii
] = ctx
.pc
;
1984 gen_opc_hflags
[ii
] = ctx
.flags
;
1985 gen_opc_instr_start
[ii
] = 1;
1986 gen_opc_icount
[ii
] = num_insns
;
1988 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1991 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1994 ctx
.opcode
= lduw_code(ctx
.pc
);
1998 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
2000 if (env
->singlestep_enabled
)
2002 if (num_insns
>= max_insns
)
2007 if (tb
->cflags
& CF_LAST_IO
)
2009 if (env
->singlestep_enabled
) {
2010 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
2013 switch (ctx
.bstate
) {
2015 /* gen_op_interrupt_restart(); */
2019 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
2021 gen_goto_tb(&ctx
, 0, ctx
.pc
);
2024 /* gen_op_interrupt_restart(); */
2033 gen_icount_end(tb
, num_insns
);
2034 *gen_opc_ptr
= INDEX_op_end
;
2036 i
= gen_opc_ptr
- gen_opc_buf
;
2039 gen_opc_instr_start
[ii
++] = 0;
2041 tb
->size
= ctx
.pc
- pc_start
;
2042 tb
->icount
= num_insns
;
2046 #ifdef SH4_DEBUG_DISAS
2047 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
2049 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2050 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2051 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
2057 void gen_intermediate_code(CPUState
* env
, struct TranslationBlock
*tb
)
2059 gen_intermediate_code_internal(env
, tb
, 0);
2062 void gen_intermediate_code_pc(CPUState
* env
, struct TranslationBlock
*tb
)
2064 gen_intermediate_code_internal(env
, tb
, 1);
2067 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2068 unsigned long searched_pc
, int pc_pos
, void *puc
)
2070 env
->pc
= gen_opc_pc
[pc_pos
];
2071 env
->flags
= gen_opc_hflags
[pc_pos
];