4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
27 #define SH4_DEBUG_DISAS
28 //#define SH4_SINGLE_STEP
34 #include "qemu-common.h"
40 typedef struct DisasContext
{
41 struct TranslationBlock
*tb
;
50 int singlestep_enabled
;
55 #if defined(CONFIG_USER_ONLY)
56 #define IS_USER(ctx) 1
58 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
62 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
65 BS_STOP
= 1, /* We want to stop translation for any reason */
66 BS_BRANCH
= 2, /* We reached a branch condition */
67 BS_EXCP
= 3, /* We reached an exception condition */
70 /* global register indexes */
71 static TCGv_ptr cpu_env
;
72 static TCGv cpu_gregs
[24];
73 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
74 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
75 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
, cpu_ldst
;
76 static TCGv cpu_fregs
[32];
78 /* internal register indexes */
79 static TCGv cpu_flags
, cpu_delayed_pc
;
81 #include "gen-icount.h"
83 static void sh4_translate_init(void)
86 static int done_init
= 0;
87 static const char * const gregnames
[24] = {
88 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
89 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
90 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
91 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
92 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
94 static const char * const fregnames
[32] = {
95 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
96 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
97 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
98 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
99 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
100 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
101 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
102 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
108 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
110 for (i
= 0; i
< 24; i
++)
111 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
112 offsetof(CPUState
, gregs
[i
]),
115 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
116 offsetof(CPUState
, pc
), "PC");
117 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
118 offsetof(CPUState
, sr
), "SR");
119 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
120 offsetof(CPUState
, ssr
), "SSR");
121 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
122 offsetof(CPUState
, spc
), "SPC");
123 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
124 offsetof(CPUState
, gbr
), "GBR");
125 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
126 offsetof(CPUState
, vbr
), "VBR");
127 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
128 offsetof(CPUState
, sgr
), "SGR");
129 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
130 offsetof(CPUState
, dbr
), "DBR");
131 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
132 offsetof(CPUState
, mach
), "MACH");
133 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
134 offsetof(CPUState
, macl
), "MACL");
135 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
136 offsetof(CPUState
, pr
), "PR");
137 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
138 offsetof(CPUState
, fpscr
), "FPSCR");
139 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
140 offsetof(CPUState
, fpul
), "FPUL");
142 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
143 offsetof(CPUState
, flags
), "_flags_");
144 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
145 offsetof(CPUState
, delayed_pc
),
147 cpu_ldst
= tcg_global_mem_new_i32(TCG_AREG0
,
148 offsetof(CPUState
, ldst
), "_ldst_");
150 for (i
= 0; i
< 32; i
++)
151 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
152 offsetof(CPUState
, fregs
[i
]),
155 /* register helpers */
162 void cpu_dump_state(CPUState
* env
, FILE * f
,
163 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
167 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
168 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
169 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
170 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
171 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
172 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
173 for (i
= 0; i
< 24; i
+= 4) {
174 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
175 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
176 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
178 if (env
->flags
& DELAY_SLOT
) {
179 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
181 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
182 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
187 static void cpu_sh4_reset(CPUSH4State
* env
)
189 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
190 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
191 log_cpu_state(env
, 0);
194 #if defined(CONFIG_USER_ONLY)
197 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
200 env
->pc
= 0xA0000000;
201 #if defined(CONFIG_USER_ONLY)
202 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
203 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
205 env
->fpscr
= 0x00040001; /* CPU reset value according to SH4 manual */
206 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
220 static sh4_def_t sh4_defs
[] = {
223 .id
= SH_CPU_SH7750R
,
227 .features
= SH_FEATURE_BCR3_AND_BCR4
,
230 .id
= SH_CPU_SH7751R
,
233 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
234 .features
= SH_FEATURE_BCR3_AND_BCR4
,
241 .features
= SH_FEATURE_SH4A
,
245 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
249 if (strcasecmp(name
, "any") == 0)
252 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
253 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
259 void sh4_cpu_list(FILE *f
, int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...))
263 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
264 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
267 static void cpu_sh4_register(CPUSH4State
*env
, const sh4_def_t
*def
)
275 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
278 const sh4_def_t
*def
;
280 def
= cpu_sh4_find_by_name(cpu_model
);
283 env
= qemu_mallocz(sizeof(CPUSH4State
));
284 env
->features
= def
->features
;
286 env
->movcal_backup_tail
= &(env
->movcal_backup
);
287 sh4_translate_init();
288 env
->cpu_model_str
= cpu_model
;
290 cpu_sh4_register(env
, def
);
296 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
298 TranslationBlock
*tb
;
301 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
302 !ctx
->singlestep_enabled
) {
303 /* Use a direct jump if in same page and singlestep not enabled */
305 tcg_gen_movi_i32(cpu_pc
, dest
);
306 tcg_gen_exit_tb((long) tb
+ n
);
308 tcg_gen_movi_i32(cpu_pc
, dest
);
309 if (ctx
->singlestep_enabled
)
315 static void gen_jump(DisasContext
* ctx
)
317 if (ctx
->delayed_pc
== (uint32_t) - 1) {
318 /* Target is not statically known, it comes necessarily from a
319 delayed jump as immediate jump are conditinal jumps */
320 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
321 if (ctx
->singlestep_enabled
)
325 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
329 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
332 int label
= gen_new_label();
333 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
335 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
336 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, t
? SR_T
: 0, label
);
337 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
338 gen_set_label(label
);
341 /* Immediate conditional jump (bt or bf) */
342 static void gen_conditional_jump(DisasContext
* ctx
,
343 target_ulong ift
, target_ulong ifnott
)
348 l1
= gen_new_label();
350 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
351 tcg_gen_brcondi_i32(TCG_COND_EQ
, sr
, SR_T
, l1
);
352 gen_goto_tb(ctx
, 0, ifnott
);
354 gen_goto_tb(ctx
, 1, ift
);
357 /* Delayed conditional jump (bt or bf) */
358 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
363 l1
= gen_new_label();
365 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
366 tcg_gen_brcondi_i32(TCG_COND_EQ
, ds
, DELAY_SLOT_TRUE
, l1
);
367 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
369 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
373 static inline void gen_set_t(void)
375 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
378 static inline void gen_clr_t(void)
380 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
383 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
385 int label1
= gen_new_label();
386 int label2
= gen_new_label();
387 tcg_gen_brcond_i32(cond
, t1
, t0
, label1
);
390 gen_set_label(label1
);
392 gen_set_label(label2
);
395 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
397 int label1
= gen_new_label();
398 int label2
= gen_new_label();
399 tcg_gen_brcondi_i32(cond
, t0
, imm
, label1
);
402 gen_set_label(label1
);
404 gen_set_label(label2
);
407 static inline void gen_store_flags(uint32_t flags
)
409 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
410 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
413 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
415 TCGv tmp
= tcg_temp_new();
420 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
421 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
423 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
425 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
426 tcg_gen_or_i32(t0
, t0
, tmp
);
431 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
433 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
436 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
438 TCGv_i32 tmp
= tcg_temp_new_i32();
439 tcg_gen_trunc_i64_i32(tmp
, t
);
440 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
441 tcg_gen_shri_i64(t
, t
, 32);
442 tcg_gen_trunc_i64_i32(tmp
, t
);
443 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
444 tcg_temp_free_i32(tmp
);
447 #define B3_0 (ctx->opcode & 0xf)
448 #define B6_4 ((ctx->opcode >> 4) & 0x7)
449 #define B7_4 ((ctx->opcode >> 4) & 0xf)
450 #define B7_0 (ctx->opcode & 0xff)
451 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
452 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
453 (ctx->opcode & 0xfff))
454 #define B11_8 ((ctx->opcode >> 8) & 0xf)
455 #define B15_12 ((ctx->opcode >> 12) & 0xf)
457 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
458 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
460 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
461 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
463 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
464 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
465 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
466 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
468 #define CHECK_NOT_DELAY_SLOT \
469 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
471 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
472 gen_helper_raise_slot_illegal_instruction(); \
473 ctx->bstate = BS_EXCP; \
477 #define CHECK_PRIVILEGED \
478 if (IS_USER(ctx)) { \
479 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
480 gen_helper_raise_illegal_instruction(); \
481 ctx->bstate = BS_EXCP; \
485 #define CHECK_FPU_ENABLED \
486 if (ctx->flags & SR_FD) { \
487 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
488 tcg_gen_movi_i32(cpu_pc, ctx->pc-2); \
489 gen_helper_raise_slot_fpu_disable(); \
491 tcg_gen_movi_i32(cpu_pc, ctx->pc); \
492 gen_helper_raise_fpu_disable(); \
494 ctx->bstate = BS_EXCP; \
498 static void _decode_opc(DisasContext
* ctx
)
500 /* This code tries to make movcal emulation sufficiently
501 accurate for Linux purposes. This instruction writes
502 memory, and prior to that, always allocates a cache line.
503 It is used in two contexts:
504 - in memcpy, where data is copied in blocks, the first write
505 of to a block uses movca.l for performance.
506 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
507 to flush the cache. Here, the data written by movcal.l is never
508 written to memory, and the data written is just bogus.
510 To simulate this, we simulate movcal.l, we store the value to memory,
511 but we also remember the previous content. If we see ocbi, we check
512 if movcal.l for that address was done previously. If so, the write should
513 not have hit the memory, so we restore the previous content.
514 When we see an instruction that is neither movca.l
515 nor ocbi, the previous content is discarded.
517 To optimize, we only try to flush stores when we're at the start of
518 TB, or if we already saw movca.l in this TB and did not flush stores
522 int opcode
= ctx
->opcode
& 0xf0ff;
523 if (opcode
!= 0x0093 /* ocbi */
524 && opcode
!= 0x00c3 /* movca.l */)
526 gen_helper_discard_movcal_backup ();
532 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
535 switch (ctx
->opcode
) {
536 case 0x0019: /* div0u */
537 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
539 case 0x000b: /* rts */
541 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
542 ctx
->flags
|= DELAY_SLOT
;
543 ctx
->delayed_pc
= (uint32_t) - 1;
545 case 0x0028: /* clrmac */
546 tcg_gen_movi_i32(cpu_mach
, 0);
547 tcg_gen_movi_i32(cpu_macl
, 0);
549 case 0x0048: /* clrs */
550 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
552 case 0x0008: /* clrt */
555 case 0x0038: /* ldtlb */
559 case 0x002b: /* rte */
562 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
563 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
564 ctx
->flags
|= DELAY_SLOT
;
565 ctx
->delayed_pc
= (uint32_t) - 1;
567 case 0x0058: /* sets */
568 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
570 case 0x0018: /* sett */
573 case 0xfbfd: /* frchg */
574 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
575 ctx
->bstate
= BS_STOP
;
577 case 0xf3fd: /* fschg */
578 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
579 ctx
->bstate
= BS_STOP
;
581 case 0x0009: /* nop */
583 case 0x001b: /* sleep */
585 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
589 switch (ctx
->opcode
& 0xf000) {
590 case 0x1000: /* mov.l Rm,@(disp,Rn) */
592 TCGv addr
= tcg_temp_new();
593 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
594 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
598 case 0x5000: /* mov.l @(disp,Rm),Rn */
600 TCGv addr
= tcg_temp_new();
601 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
602 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
606 case 0xe000: /* mov #imm,Rn */
607 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
609 case 0x9000: /* mov.w @(disp,PC),Rn */
611 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
612 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
616 case 0xd000: /* mov.l @(disp,PC),Rn */
618 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
619 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
623 case 0x7000: /* add #imm,Rn */
624 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
626 case 0xa000: /* bra disp */
628 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
629 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
630 ctx
->flags
|= DELAY_SLOT
;
632 case 0xb000: /* bsr disp */
634 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
635 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
636 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
637 ctx
->flags
|= DELAY_SLOT
;
641 switch (ctx
->opcode
& 0xf00f) {
642 case 0x6003: /* mov Rm,Rn */
643 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
645 case 0x2000: /* mov.b Rm,@Rn */
646 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
648 case 0x2001: /* mov.w Rm,@Rn */
649 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
651 case 0x2002: /* mov.l Rm,@Rn */
652 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
654 case 0x6000: /* mov.b @Rm,Rn */
655 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
657 case 0x6001: /* mov.w @Rm,Rn */
658 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
660 case 0x6002: /* mov.l @Rm,Rn */
661 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
663 case 0x2004: /* mov.b Rm,@-Rn */
665 TCGv addr
= tcg_temp_new();
666 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
667 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
668 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1); /* modify register status */
672 case 0x2005: /* mov.w Rm,@-Rn */
674 TCGv addr
= tcg_temp_new();
675 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
676 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
677 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 2);
681 case 0x2006: /* mov.l Rm,@-Rn */
683 TCGv addr
= tcg_temp_new();
684 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
685 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
686 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
689 case 0x6004: /* mov.b @Rm+,Rn */
690 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
692 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
694 case 0x6005: /* mov.w @Rm+,Rn */
695 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
697 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
699 case 0x6006: /* mov.l @Rm+,Rn */
700 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
702 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
704 case 0x0004: /* mov.b Rm,@(R0,Rn) */
706 TCGv addr
= tcg_temp_new();
707 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
708 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
712 case 0x0005: /* mov.w Rm,@(R0,Rn) */
714 TCGv addr
= tcg_temp_new();
715 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
716 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
720 case 0x0006: /* mov.l Rm,@(R0,Rn) */
722 TCGv addr
= tcg_temp_new();
723 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
724 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
728 case 0x000c: /* mov.b @(R0,Rm),Rn */
730 TCGv addr
= tcg_temp_new();
731 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
732 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
736 case 0x000d: /* mov.w @(R0,Rm),Rn */
738 TCGv addr
= tcg_temp_new();
739 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
740 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
744 case 0x000e: /* mov.l @(R0,Rm),Rn */
746 TCGv addr
= tcg_temp_new();
747 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
748 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
752 case 0x6008: /* swap.b Rm,Rn */
754 TCGv highw
, high
, low
;
755 highw
= tcg_temp_new();
756 tcg_gen_andi_i32(highw
, REG(B7_4
), 0xffff0000);
757 high
= tcg_temp_new();
758 tcg_gen_ext8u_i32(high
, REG(B7_4
));
759 tcg_gen_shli_i32(high
, high
, 8);
760 low
= tcg_temp_new();
761 tcg_gen_shri_i32(low
, REG(B7_4
), 8);
762 tcg_gen_ext8u_i32(low
, low
);
763 tcg_gen_or_i32(REG(B11_8
), high
, low
);
764 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), highw
);
769 case 0x6009: /* swap.w Rm,Rn */
772 high
= tcg_temp_new();
773 tcg_gen_ext16u_i32(high
, REG(B7_4
));
774 tcg_gen_shli_i32(high
, high
, 16);
775 low
= tcg_temp_new();
776 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
777 tcg_gen_ext16u_i32(low
, low
);
778 tcg_gen_or_i32(REG(B11_8
), high
, low
);
783 case 0x200d: /* xtrct Rm,Rn */
786 high
= tcg_temp_new();
787 tcg_gen_ext16u_i32(high
, REG(B7_4
));
788 tcg_gen_shli_i32(high
, high
, 16);
789 low
= tcg_temp_new();
790 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
791 tcg_gen_ext16u_i32(low
, low
);
792 tcg_gen_or_i32(REG(B11_8
), high
, low
);
797 case 0x300c: /* add Rm,Rn */
798 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
800 case 0x300e: /* addc Rm,Rn */
801 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
803 case 0x300f: /* addv Rm,Rn */
804 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
806 case 0x2009: /* and Rm,Rn */
807 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
809 case 0x3000: /* cmp/eq Rm,Rn */
810 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
812 case 0x3003: /* cmp/ge Rm,Rn */
813 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
815 case 0x3007: /* cmp/gt Rm,Rn */
816 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
818 case 0x3006: /* cmp/hi Rm,Rn */
819 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
821 case 0x3002: /* cmp/hs Rm,Rn */
822 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
824 case 0x200c: /* cmp/str Rm,Rn */
826 int label1
= gen_new_label();
827 int label2
= gen_new_label();
828 TCGv cmp1
= tcg_temp_local_new();
829 TCGv cmp2
= tcg_temp_local_new();
830 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
831 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
832 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
833 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
834 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
835 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
836 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
837 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
838 tcg_gen_brcondi_i32(TCG_COND_EQ
, cmp2
, 0, label1
);
839 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
841 gen_set_label(label1
);
842 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
843 gen_set_label(label2
);
848 case 0x2007: /* div0s Rm,Rn */
850 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
851 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
852 TCGv val
= tcg_temp_new();
853 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
854 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
858 case 0x3004: /* div1 Rm,Rn */
859 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
861 case 0x300d: /* dmuls.l Rm,Rn */
863 TCGv_i64 tmp1
= tcg_temp_new_i64();
864 TCGv_i64 tmp2
= tcg_temp_new_i64();
866 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
867 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
868 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
869 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
870 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
871 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
873 tcg_temp_free_i64(tmp2
);
874 tcg_temp_free_i64(tmp1
);
877 case 0x3005: /* dmulu.l Rm,Rn */
879 TCGv_i64 tmp1
= tcg_temp_new_i64();
880 TCGv_i64 tmp2
= tcg_temp_new_i64();
882 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
883 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
884 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
885 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
886 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
887 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
889 tcg_temp_free_i64(tmp2
);
890 tcg_temp_free_i64(tmp1
);
893 case 0x600e: /* exts.b Rm,Rn */
894 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
896 case 0x600f: /* exts.w Rm,Rn */
897 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
899 case 0x600c: /* extu.b Rm,Rn */
900 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
902 case 0x600d: /* extu.w Rm,Rn */
903 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
905 case 0x000f: /* mac.l @Rm+,@Rn+ */
908 arg0
= tcg_temp_new();
909 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
910 arg1
= tcg_temp_new();
911 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
912 gen_helper_macl(arg0
, arg1
);
915 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
916 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
919 case 0x400f: /* mac.w @Rm+,@Rn+ */
922 arg0
= tcg_temp_new();
923 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
924 arg1
= tcg_temp_new();
925 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
926 gen_helper_macw(arg0
, arg1
);
929 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
930 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
933 case 0x0007: /* mul.l Rm,Rn */
934 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
936 case 0x200f: /* muls.w Rm,Rn */
939 arg0
= tcg_temp_new();
940 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
941 arg1
= tcg_temp_new();
942 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
943 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
948 case 0x200e: /* mulu.w Rm,Rn */
951 arg0
= tcg_temp_new();
952 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
953 arg1
= tcg_temp_new();
954 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
955 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
960 case 0x600b: /* neg Rm,Rn */
961 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
963 case 0x600a: /* negc Rm,Rn */
964 gen_helper_negc(REG(B11_8
), REG(B7_4
));
966 case 0x6007: /* not Rm,Rn */
967 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
969 case 0x200b: /* or Rm,Rn */
970 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
972 case 0x400c: /* shad Rm,Rn */
974 int label1
= gen_new_label();
975 int label2
= gen_new_label();
976 int label3
= gen_new_label();
977 int label4
= gen_new_label();
978 TCGv shift
= tcg_temp_local_new();
979 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
980 /* Rm positive, shift to the left */
981 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
982 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
984 /* Rm negative, shift to the right */
985 gen_set_label(label1
);
986 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
987 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
988 tcg_gen_not_i32(shift
, REG(B7_4
));
989 tcg_gen_andi_i32(shift
, shift
, 0x1f);
990 tcg_gen_addi_i32(shift
, shift
, 1);
991 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
994 gen_set_label(label2
);
995 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
996 tcg_gen_movi_i32(REG(B11_8
), 0);
998 gen_set_label(label3
);
999 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
1000 gen_set_label(label4
);
1001 tcg_temp_free(shift
);
1004 case 0x400d: /* shld Rm,Rn */
1006 int label1
= gen_new_label();
1007 int label2
= gen_new_label();
1008 int label3
= gen_new_label();
1009 TCGv shift
= tcg_temp_local_new();
1010 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
1011 /* Rm positive, shift to the left */
1012 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1013 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
1015 /* Rm negative, shift to the right */
1016 gen_set_label(label1
);
1017 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1018 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
1019 tcg_gen_not_i32(shift
, REG(B7_4
));
1020 tcg_gen_andi_i32(shift
, shift
, 0x1f);
1021 tcg_gen_addi_i32(shift
, shift
, 1);
1022 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
1025 gen_set_label(label2
);
1026 tcg_gen_movi_i32(REG(B11_8
), 0);
1027 gen_set_label(label3
);
1028 tcg_temp_free(shift
);
1031 case 0x3008: /* sub Rm,Rn */
1032 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1034 case 0x300a: /* subc Rm,Rn */
1035 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1037 case 0x300b: /* subv Rm,Rn */
1038 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1040 case 0x2008: /* tst Rm,Rn */
1042 TCGv val
= tcg_temp_new();
1043 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1044 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1048 case 0x200a: /* xor Rm,Rn */
1049 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1051 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1053 if (ctx
->fpscr
& FPSCR_SZ
) {
1054 TCGv_i64 fp
= tcg_temp_new_i64();
1055 gen_load_fpr64(fp
, XREG(B7_4
));
1056 gen_store_fpr64(fp
, XREG(B11_8
));
1057 tcg_temp_free_i64(fp
);
1059 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1062 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1064 if (ctx
->fpscr
& FPSCR_SZ
) {
1065 TCGv addr_hi
= tcg_temp_new();
1066 int fr
= XREG(B7_4
);
1067 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1068 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1069 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1070 tcg_temp_free(addr_hi
);
1072 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1075 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1077 if (ctx
->fpscr
& FPSCR_SZ
) {
1078 TCGv addr_hi
= tcg_temp_new();
1079 int fr
= XREG(B11_8
);
1080 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1081 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1082 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1083 tcg_temp_free(addr_hi
);
1085 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1088 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1090 if (ctx
->fpscr
& FPSCR_SZ
) {
1091 TCGv addr_hi
= tcg_temp_new();
1092 int fr
= XREG(B11_8
);
1093 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1094 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1095 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1096 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1097 tcg_temp_free(addr_hi
);
1099 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1100 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1103 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1105 if (ctx
->fpscr
& FPSCR_SZ
) {
1106 TCGv addr
= tcg_temp_new_i32();
1107 int fr
= XREG(B7_4
);
1108 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1109 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1110 tcg_gen_subi_i32(addr
, REG(B11_8
), 8);
1111 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1112 tcg_gen_mov_i32(REG(B11_8
), addr
);
1113 tcg_temp_free(addr
);
1116 addr
= tcg_temp_new_i32();
1117 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1118 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1119 tcg_temp_free(addr
);
1120 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1123 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1126 TCGv addr
= tcg_temp_new_i32();
1127 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1128 if (ctx
->fpscr
& FPSCR_SZ
) {
1129 int fr
= XREG(B11_8
);
1130 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1131 tcg_gen_addi_i32(addr
, addr
, 4);
1132 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1134 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1136 tcg_temp_free(addr
);
1139 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1142 TCGv addr
= tcg_temp_new();
1143 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1144 if (ctx
->fpscr
& FPSCR_SZ
) {
1145 int fr
= XREG(B7_4
);
1146 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1147 tcg_gen_addi_i32(addr
, addr
, 4);
1148 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1150 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1152 tcg_temp_free(addr
);
1155 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1156 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1157 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1158 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1159 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1160 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1163 if (ctx
->fpscr
& FPSCR_PR
) {
1166 if (ctx
->opcode
& 0x0110)
1167 break; /* illegal instruction */
1168 fp0
= tcg_temp_new_i64();
1169 fp1
= tcg_temp_new_i64();
1170 gen_load_fpr64(fp0
, DREG(B11_8
));
1171 gen_load_fpr64(fp1
, DREG(B7_4
));
1172 switch (ctx
->opcode
& 0xf00f) {
1173 case 0xf000: /* fadd Rm,Rn */
1174 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1176 case 0xf001: /* fsub Rm,Rn */
1177 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1179 case 0xf002: /* fmul Rm,Rn */
1180 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1182 case 0xf003: /* fdiv Rm,Rn */
1183 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1185 case 0xf004: /* fcmp/eq Rm,Rn */
1186 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1188 case 0xf005: /* fcmp/gt Rm,Rn */
1189 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1192 gen_store_fpr64(fp0
, DREG(B11_8
));
1193 tcg_temp_free_i64(fp0
);
1194 tcg_temp_free_i64(fp1
);
1196 switch (ctx
->opcode
& 0xf00f) {
1197 case 0xf000: /* fadd Rm,Rn */
1198 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1200 case 0xf001: /* fsub Rm,Rn */
1201 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1203 case 0xf002: /* fmul Rm,Rn */
1204 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1206 case 0xf003: /* fdiv Rm,Rn */
1207 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1209 case 0xf004: /* fcmp/eq Rm,Rn */
1210 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1212 case 0xf005: /* fcmp/gt Rm,Rn */
1213 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1219 case 0xf00e: /* fmac FR0,RM,Rn */
1222 if (ctx
->fpscr
& FPSCR_PR
) {
1223 break; /* illegal instruction */
1225 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1226 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1232 switch (ctx
->opcode
& 0xff00) {
1233 case 0xc900: /* and #imm,R0 */
1234 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1236 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1239 addr
= tcg_temp_new();
1240 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1241 val
= tcg_temp_new();
1242 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1243 tcg_gen_andi_i32(val
, val
, B7_0
);
1244 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1246 tcg_temp_free(addr
);
1249 case 0x8b00: /* bf label */
1250 CHECK_NOT_DELAY_SLOT
1251 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1252 ctx
->pc
+ 4 + B7_0s
* 2);
1253 ctx
->bstate
= BS_BRANCH
;
1255 case 0x8f00: /* bf/s label */
1256 CHECK_NOT_DELAY_SLOT
1257 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1258 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1260 case 0x8900: /* bt label */
1261 CHECK_NOT_DELAY_SLOT
1262 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1264 ctx
->bstate
= BS_BRANCH
;
1266 case 0x8d00: /* bt/s label */
1267 CHECK_NOT_DELAY_SLOT
1268 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1269 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1271 case 0x8800: /* cmp/eq #imm,R0 */
1272 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1274 case 0xc400: /* mov.b @(disp,GBR),R0 */
1276 TCGv addr
= tcg_temp_new();
1277 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1278 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1279 tcg_temp_free(addr
);
1282 case 0xc500: /* mov.w @(disp,GBR),R0 */
1284 TCGv addr
= tcg_temp_new();
1285 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1286 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1287 tcg_temp_free(addr
);
1290 case 0xc600: /* mov.l @(disp,GBR),R0 */
1292 TCGv addr
= tcg_temp_new();
1293 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1294 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1295 tcg_temp_free(addr
);
1298 case 0xc000: /* mov.b R0,@(disp,GBR) */
1300 TCGv addr
= tcg_temp_new();
1301 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1302 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1303 tcg_temp_free(addr
);
1306 case 0xc100: /* mov.w R0,@(disp,GBR) */
1308 TCGv addr
= tcg_temp_new();
1309 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1310 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1311 tcg_temp_free(addr
);
1314 case 0xc200: /* mov.l R0,@(disp,GBR) */
1316 TCGv addr
= tcg_temp_new();
1317 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1318 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1319 tcg_temp_free(addr
);
1322 case 0x8000: /* mov.b R0,@(disp,Rn) */
1324 TCGv addr
= tcg_temp_new();
1325 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1326 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1327 tcg_temp_free(addr
);
1330 case 0x8100: /* mov.w R0,@(disp,Rn) */
1332 TCGv addr
= tcg_temp_new();
1333 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1334 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1335 tcg_temp_free(addr
);
1338 case 0x8400: /* mov.b @(disp,Rn),R0 */
1340 TCGv addr
= tcg_temp_new();
1341 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1342 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1343 tcg_temp_free(addr
);
1346 case 0x8500: /* mov.w @(disp,Rn),R0 */
1348 TCGv addr
= tcg_temp_new();
1349 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1350 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1351 tcg_temp_free(addr
);
1354 case 0xc700: /* mova @(disp,PC),R0 */
1355 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1357 case 0xcb00: /* or #imm,R0 */
1358 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1360 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1363 addr
= tcg_temp_new();
1364 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1365 val
= tcg_temp_new();
1366 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1367 tcg_gen_ori_i32(val
, val
, B7_0
);
1368 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1370 tcg_temp_free(addr
);
1373 case 0xc300: /* trapa #imm */
1376 CHECK_NOT_DELAY_SLOT
1377 tcg_gen_movi_i32(cpu_pc
, ctx
->pc
);
1378 imm
= tcg_const_i32(B7_0
);
1379 gen_helper_trapa(imm
);
1381 ctx
->bstate
= BS_BRANCH
;
1384 case 0xc800: /* tst #imm,R0 */
1386 TCGv val
= tcg_temp_new();
1387 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1388 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1392 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1394 TCGv val
= tcg_temp_new();
1395 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1396 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1397 tcg_gen_andi_i32(val
, val
, B7_0
);
1398 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1402 case 0xca00: /* xor #imm,R0 */
1403 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1405 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1408 addr
= tcg_temp_new();
1409 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1410 val
= tcg_temp_new();
1411 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1412 tcg_gen_xori_i32(val
, val
, B7_0
);
1413 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1415 tcg_temp_free(addr
);
1420 switch (ctx
->opcode
& 0xf08f) {
1421 case 0x408e: /* ldc Rm,Rn_BANK */
1423 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1425 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1427 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1428 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1430 case 0x0082: /* stc Rm_BANK,Rn */
1432 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1434 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1437 TCGv addr
= tcg_temp_new();
1438 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1439 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1440 tcg_temp_free(addr
);
1441 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1446 switch (ctx
->opcode
& 0xf0ff) {
1447 case 0x0023: /* braf Rn */
1448 CHECK_NOT_DELAY_SLOT
1449 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1450 ctx
->flags
|= DELAY_SLOT
;
1451 ctx
->delayed_pc
= (uint32_t) - 1;
1453 case 0x0003: /* bsrf Rn */
1454 CHECK_NOT_DELAY_SLOT
1455 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1456 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1457 ctx
->flags
|= DELAY_SLOT
;
1458 ctx
->delayed_pc
= (uint32_t) - 1;
1460 case 0x4015: /* cmp/pl Rn */
1461 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1463 case 0x4011: /* cmp/pz Rn */
1464 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1466 case 0x4010: /* dt Rn */
1467 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1468 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1470 case 0x402b: /* jmp @Rn */
1471 CHECK_NOT_DELAY_SLOT
1472 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1473 ctx
->flags
|= DELAY_SLOT
;
1474 ctx
->delayed_pc
= (uint32_t) - 1;
1476 case 0x400b: /* jsr @Rn */
1477 CHECK_NOT_DELAY_SLOT
1478 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1479 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1480 ctx
->flags
|= DELAY_SLOT
;
1481 ctx
->delayed_pc
= (uint32_t) - 1;
1483 case 0x400e: /* ldc Rm,SR */
1485 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1486 ctx
->bstate
= BS_STOP
;
1488 case 0x4007: /* ldc.l @Rm+,SR */
1491 TCGv val
= tcg_temp_new();
1492 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1493 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1495 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1496 ctx
->bstate
= BS_STOP
;
1499 case 0x0002: /* stc SR,Rn */
1501 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1503 case 0x4003: /* stc SR,@-Rn */
1506 TCGv addr
= tcg_temp_new();
1507 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1508 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1509 tcg_temp_free(addr
);
1510 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1513 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1516 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1520 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1521 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1525 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1530 TCGv addr = tcg_temp_new(); \
1531 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1532 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1533 tcg_temp_free(addr); \
1534 tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 4); \
1537 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1538 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1539 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1540 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1541 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1542 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1543 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1544 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1545 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1546 case 0x406a: /* lds Rm,FPSCR */
1548 gen_helper_ld_fpscr(REG(B11_8
));
1549 ctx
->bstate
= BS_STOP
;
1551 case 0x4066: /* lds.l @Rm+,FPSCR */
1554 TCGv addr
= tcg_temp_new();
1555 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1556 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1557 gen_helper_ld_fpscr(addr
);
1558 tcg_temp_free(addr
);
1559 ctx
->bstate
= BS_STOP
;
1562 case 0x006a: /* sts FPSCR,Rn */
1564 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1566 case 0x4062: /* sts FPSCR,@-Rn */
1570 val
= tcg_temp_new();
1571 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1572 addr
= tcg_temp_new();
1573 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1574 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1575 tcg_temp_free(addr
);
1577 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 4);
1580 case 0x00c3: /* movca.l R0,@Rm */
1582 TCGv val
= tcg_temp_new();
1583 tcg_gen_qemu_ld32u(val
, REG(B11_8
), ctx
->memidx
);
1584 gen_helper_movcal (REG(B11_8
), val
);
1585 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1587 ctx
->has_movcal
= 1;
1590 /* MOVUA.L @Rm,R0 (Rm) -> R0
1591 Load non-boundary-aligned data */
1592 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1595 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1596 Load non-boundary-aligned data */
1597 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1598 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1600 case 0x0029: /* movt Rn */
1601 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1606 If (T == 1) R0 -> (Rn)
1609 if (ctx
->features
& SH_FEATURE_SH4A
) {
1610 int label
= gen_new_label();
1612 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cpu_ldst
);
1613 tcg_gen_brcondi_i32(TCG_COND_EQ
, cpu_ldst
, 0, label
);
1614 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1615 gen_set_label(label
);
1616 tcg_gen_movi_i32(cpu_ldst
, 0);
1624 When interrupt/exception
1627 if (ctx
->features
& SH_FEATURE_SH4A
) {
1628 tcg_gen_movi_i32(cpu_ldst
, 0);
1629 tcg_gen_qemu_ld32s(REG(0), REG(B11_8
), ctx
->memidx
);
1630 tcg_gen_movi_i32(cpu_ldst
, 1);
1634 case 0x0093: /* ocbi @Rn */
1636 gen_helper_ocbi (REG(B11_8
));
1639 case 0x00a3: /* ocbp @Rn */
1641 TCGv dummy
= tcg_temp_new();
1642 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1643 tcg_temp_free(dummy
);
1646 case 0x00b3: /* ocbwb @Rn */
1648 TCGv dummy
= tcg_temp_new();
1649 tcg_gen_qemu_ld32s(dummy
, REG(B11_8
), ctx
->memidx
);
1650 tcg_temp_free(dummy
);
1653 case 0x0083: /* pref @Rn */
1655 case 0x00d3: /* prefi @Rn */
1656 if (ctx
->features
& SH_FEATURE_SH4A
)
1660 case 0x00e3: /* icbi @Rn */
1661 if (ctx
->features
& SH_FEATURE_SH4A
)
1665 case 0x00ab: /* synco */
1666 if (ctx
->features
& SH_FEATURE_SH4A
)
1670 case 0x4024: /* rotcl Rn */
1672 TCGv tmp
= tcg_temp_new();
1673 tcg_gen_mov_i32(tmp
, cpu_sr
);
1674 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1675 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1676 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1680 case 0x4025: /* rotcr Rn */
1682 TCGv tmp
= tcg_temp_new();
1683 tcg_gen_mov_i32(tmp
, cpu_sr
);
1684 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1685 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1686 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1690 case 0x4004: /* rotl Rn */
1691 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1692 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1693 gen_copy_bit_i32(REG(B11_8
), 0, cpu_sr
, 0);
1695 case 0x4005: /* rotr Rn */
1696 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1697 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1698 gen_copy_bit_i32(REG(B11_8
), 31, cpu_sr
, 0);
1700 case 0x4000: /* shll Rn */
1701 case 0x4020: /* shal Rn */
1702 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1703 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1705 case 0x4021: /* shar Rn */
1706 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1707 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1709 case 0x4001: /* shlr Rn */
1710 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1711 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1713 case 0x4008: /* shll2 Rn */
1714 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1716 case 0x4018: /* shll8 Rn */
1717 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1719 case 0x4028: /* shll16 Rn */
1720 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1722 case 0x4009: /* shlr2 Rn */
1723 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1725 case 0x4019: /* shlr8 Rn */
1726 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1728 case 0x4029: /* shlr16 Rn */
1729 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1731 case 0x401b: /* tas.b @Rn */
1734 addr
= tcg_temp_local_new();
1735 tcg_gen_mov_i32(addr
, REG(B11_8
));
1736 val
= tcg_temp_local_new();
1737 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1738 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1739 tcg_gen_ori_i32(val
, val
, 0x80);
1740 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1742 tcg_temp_free(addr
);
1745 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1747 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1749 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1751 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1753 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1755 if (ctx
->fpscr
& FPSCR_PR
) {
1757 if (ctx
->opcode
& 0x0100)
1758 break; /* illegal instruction */
1759 fp
= tcg_temp_new_i64();
1760 gen_helper_float_DT(fp
, cpu_fpul
);
1761 gen_store_fpr64(fp
, DREG(B11_8
));
1762 tcg_temp_free_i64(fp
);
1765 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1768 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1770 if (ctx
->fpscr
& FPSCR_PR
) {
1772 if (ctx
->opcode
& 0x0100)
1773 break; /* illegal instruction */
1774 fp
= tcg_temp_new_i64();
1775 gen_load_fpr64(fp
, DREG(B11_8
));
1776 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1777 tcg_temp_free_i64(fp
);
1780 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1783 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1786 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1789 case 0xf05d: /* fabs FRn/DRn */
1791 if (ctx
->fpscr
& FPSCR_PR
) {
1792 if (ctx
->opcode
& 0x0100)
1793 break; /* illegal instruction */
1794 TCGv_i64 fp
= tcg_temp_new_i64();
1795 gen_load_fpr64(fp
, DREG(B11_8
));
1796 gen_helper_fabs_DT(fp
, fp
);
1797 gen_store_fpr64(fp
, DREG(B11_8
));
1798 tcg_temp_free_i64(fp
);
1800 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1803 case 0xf06d: /* fsqrt FRn */
1805 if (ctx
->fpscr
& FPSCR_PR
) {
1806 if (ctx
->opcode
& 0x0100)
1807 break; /* illegal instruction */
1808 TCGv_i64 fp
= tcg_temp_new_i64();
1809 gen_load_fpr64(fp
, DREG(B11_8
));
1810 gen_helper_fsqrt_DT(fp
, fp
);
1811 gen_store_fpr64(fp
, DREG(B11_8
));
1812 tcg_temp_free_i64(fp
);
1814 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1817 case 0xf07d: /* fsrra FRn */
1820 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1822 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1823 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1826 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1828 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1829 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1832 case 0xf0ad: /* fcnvsd FPUL,DRn */
1835 TCGv_i64 fp
= tcg_temp_new_i64();
1836 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1837 gen_store_fpr64(fp
, DREG(B11_8
));
1838 tcg_temp_free_i64(fp
);
1841 case 0xf0bd: /* fcnvds DRn,FPUL */
1844 TCGv_i64 fp
= tcg_temp_new_i64();
1845 gen_load_fpr64(fp
, DREG(B11_8
));
1846 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1847 tcg_temp_free_i64(fp
);
1852 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1853 ctx
->opcode
, ctx
->pc
);
1856 gen_helper_raise_illegal_instruction();
1857 ctx
->bstate
= BS_EXCP
;
1860 static void decode_opc(DisasContext
* ctx
)
1862 uint32_t old_flags
= ctx
->flags
;
1866 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1867 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1870 /* go out of the delay slot */
1871 uint32_t new_flags
= ctx
->flags
;
1872 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1873 gen_store_flags(new_flags
);
1876 ctx
->bstate
= BS_BRANCH
;
1877 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1878 gen_delayed_conditional_jump(ctx
);
1879 } else if (old_flags
& DELAY_SLOT
) {
1885 /* go into a delay slot */
1886 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1887 gen_store_flags(ctx
->flags
);
1891 gen_intermediate_code_internal(CPUState
* env
, TranslationBlock
* tb
,
1895 target_ulong pc_start
;
1896 static uint16_t *gen_opc_end
;
1903 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1905 ctx
.flags
= (uint32_t)tb
->flags
;
1906 ctx
.bstate
= BS_NONE
;
1908 ctx
.fpscr
= env
->fpscr
;
1909 ctx
.memidx
= (env
->sr
& SR_MD
) ? 1 : 0;
1910 /* We don't know if the delayed pc came from a dynamic or static branch,
1911 so assume it is a dynamic branch. */
1912 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1914 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1915 ctx
.features
= env
->features
;
1916 ctx
.has_movcal
= (tb
->flags
& TB_FLAG_PENDING_MOVCA
);
1919 qemu_log_mask(CPU_LOG_TB_CPU
,
1920 "------------------------------------------------\n");
1921 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
1926 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1928 max_insns
= CF_COUNT_MASK
;
1930 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1931 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
1932 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1933 if (ctx
.pc
== bp
->pc
) {
1934 /* We have hit a breakpoint - make sure PC is up-to-date */
1935 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1937 ctx
.bstate
= BS_EXCP
;
1943 i
= gen_opc_ptr
- gen_opc_buf
;
1947 gen_opc_instr_start
[ii
++] = 0;
1949 gen_opc_pc
[ii
] = ctx
.pc
;
1950 gen_opc_hflags
[ii
] = ctx
.flags
;
1951 gen_opc_instr_start
[ii
] = 1;
1952 gen_opc_icount
[ii
] = num_insns
;
1954 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1957 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1960 ctx
.opcode
= lduw_code(ctx
.pc
);
1964 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
1966 if (env
->singlestep_enabled
)
1968 if (num_insns
>= max_insns
)
1973 if (tb
->cflags
& CF_LAST_IO
)
1975 if (env
->singlestep_enabled
) {
1976 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1979 switch (ctx
.bstate
) {
1981 /* gen_op_interrupt_restart(); */
1985 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
1987 gen_goto_tb(&ctx
, 0, ctx
.pc
);
1990 /* gen_op_interrupt_restart(); */
1999 gen_icount_end(tb
, num_insns
);
2000 *gen_opc_ptr
= INDEX_op_end
;
2002 i
= gen_opc_ptr
- gen_opc_buf
;
2005 gen_opc_instr_start
[ii
++] = 0;
2007 tb
->size
= ctx
.pc
- pc_start
;
2008 tb
->icount
= num_insns
;
2012 #ifdef SH4_DEBUG_DISAS
2013 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
2015 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2016 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2017 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
2023 void gen_intermediate_code(CPUState
* env
, struct TranslationBlock
*tb
)
2025 gen_intermediate_code_internal(env
, tb
, 0);
2028 void gen_intermediate_code_pc(CPUState
* env
, struct TranslationBlock
*tb
)
2030 gen_intermediate_code_internal(env
, tb
, 1);
2033 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2034 unsigned long searched_pc
, int pc_pos
, void *puc
)
2036 env
->pc
= gen_opc_pc
[pc_pos
];
2037 env
->flags
= gen_opc_hflags
[pc_pos
];