4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
26 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
27 /* #define DEBUG_INLINE_BRANCHES */
28 #define S390X_DEBUG_DISAS
29 /* #define S390X_DEBUG_DISAS_VERBOSE */
31 #ifdef S390X_DEBUG_DISAS_VERBOSE
32 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
34 # define LOG_DISAS(...) do { } while (0)
42 /* global register indexes */
43 static TCGv_ptr cpu_env
;
45 #include "gen-icount.h"
50 typedef struct DisasContext DisasContext
;
55 struct TranslationBlock
*tb
;
60 static void gen_op_calc_cc(DisasContext
*s
);
62 #ifdef DEBUG_INLINE_BRANCHES
63 static uint64_t inline_branch_hit
[CC_OP_MAX
];
64 static uint64_t inline_branch_miss
[CC_OP_MAX
];
67 static inline void debug_insn(uint64_t insn
)
69 LOG_DISAS("insn: 0x%" PRIx64
"\n", insn
);
72 static inline uint64_t pc_to_link_info(DisasContext
*s
, uint64_t pc
)
74 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
75 if (s
->tb
->flags
& FLAG_MASK_32
) {
76 return pc
| 0x80000000;
82 void cpu_dump_state(CPUState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
87 for (i
= 0; i
< 16; i
++) {
88 cpu_fprintf(f
, "R%02d=%016" PRIx64
, i
, env
->regs
[i
]);
96 for (i
= 0; i
< 16; i
++) {
97 cpu_fprintf(f
, "F%02d=%016" PRIx64
, i
, *(uint64_t *)&env
->fregs
[i
]);
105 cpu_fprintf(f
, "\n");
107 #ifndef CONFIG_USER_ONLY
108 for (i
= 0; i
< 16; i
++) {
109 cpu_fprintf(f
, "C%02d=%016" PRIx64
, i
, env
->cregs
[i
]);
111 cpu_fprintf(f
, "\n");
118 cpu_fprintf(f
, "\n");
120 if (env
->cc_op
> 3) {
121 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %15s\n",
122 env
->psw
.mask
, env
->psw
.addr
, cc_name(env
->cc_op
));
124 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %02x\n",
125 env
->psw
.mask
, env
->psw
.addr
, env
->cc_op
);
128 #ifdef DEBUG_INLINE_BRANCHES
129 for (i
= 0; i
< CC_OP_MAX
; i
++) {
130 cpu_fprintf(f
, " %15s = %10ld\t%10ld\n", cc_name(i
),
131 inline_branch_miss
[i
], inline_branch_hit
[i
]);
136 static TCGv_i64 psw_addr
;
137 static TCGv_i64 psw_mask
;
139 static TCGv_i32 cc_op
;
140 static TCGv_i64 cc_src
;
141 static TCGv_i64 cc_dst
;
142 static TCGv_i64 cc_vr
;
144 static char cpu_reg_names
[10*3 + 6*4];
145 static TCGv_i64 regs
[16];
147 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
149 void s390x_translate_init(void)
152 size_t cpu_reg_names_size
= sizeof(cpu_reg_names
);
155 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
156 psw_addr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, psw
.addr
),
158 psw_mask
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, psw
.mask
),
161 cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
163 cc_src
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_src
),
165 cc_dst
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
167 cc_vr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_vr
),
171 for (i
= 0; i
< 16; i
++) {
172 snprintf(p
, cpu_reg_names_size
, "r%d", i
);
173 regs
[i
] = tcg_global_mem_new(TCG_AREG0
,
174 offsetof(CPUState
, regs
[i
]), p
);
175 p
+= (i
< 10) ? 3 : 4;
176 cpu_reg_names_size
-= (i
< 10) ? 3 : 4;
180 static inline TCGv_i64
load_reg(int reg
)
182 TCGv_i64 r
= tcg_temp_new_i64();
183 tcg_gen_mov_i64(r
, regs
[reg
]);
187 static inline TCGv_i64
load_freg(int reg
)
189 TCGv_i64 r
= tcg_temp_new_i64();
190 tcg_gen_ld_i64(r
, cpu_env
, offsetof(CPUState
, fregs
[reg
].d
));
194 static inline TCGv_i32
load_freg32(int reg
)
196 TCGv_i32 r
= tcg_temp_new_i32();
197 tcg_gen_ld_i32(r
, cpu_env
, offsetof(CPUState
, fregs
[reg
].l
.upper
));
201 static inline TCGv_i32
load_reg32(int reg
)
203 TCGv_i32 r
= tcg_temp_new_i32();
204 tcg_gen_trunc_i64_i32(r
, regs
[reg
]);
208 static inline TCGv_i64
load_reg32_i64(int reg
)
210 TCGv_i64 r
= tcg_temp_new_i64();
211 tcg_gen_ext32s_i64(r
, regs
[reg
]);
215 static inline void store_reg(int reg
, TCGv_i64 v
)
217 tcg_gen_mov_i64(regs
[reg
], v
);
220 static inline void store_freg(int reg
, TCGv_i64 v
)
222 tcg_gen_st_i64(v
, cpu_env
, offsetof(CPUState
, fregs
[reg
].d
));
225 static inline void store_reg32(int reg
, TCGv_i32 v
)
227 #if HOST_LONG_BITS == 32
228 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), v
);
230 TCGv_i64 tmp
= tcg_temp_new_i64();
231 tcg_gen_extu_i32_i64(tmp
, v
);
232 /* 32 bit register writes keep the upper half */
233 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 32);
234 tcg_temp_free_i64(tmp
);
238 static inline void store_reg32_i64(int reg
, TCGv_i64 v
)
240 /* 32 bit register writes keep the upper half */
241 #if HOST_LONG_BITS == 32
242 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), TCGV_LOW(v
));
244 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 32);
248 static inline void store_reg16(int reg
, TCGv_i32 v
)
250 TCGv_i64 tmp
= tcg_temp_new_i64();
251 tcg_gen_extu_i32_i64(tmp
, v
);
252 /* 16 bit register writes keep the upper bytes */
253 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 16);
254 tcg_temp_free_i64(tmp
);
257 static inline void store_reg8(int reg
, TCGv_i64 v
)
259 /* 8 bit register writes keep the upper bytes */
260 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 8);
263 static inline void store_freg32(int reg
, TCGv_i32 v
)
265 tcg_gen_st_i32(v
, cpu_env
, offsetof(CPUState
, fregs
[reg
].l
.upper
));
268 static inline void update_psw_addr(DisasContext
*s
)
271 tcg_gen_movi_i64(psw_addr
, s
->pc
);
274 static inline void potential_page_fault(DisasContext
*s
)
276 #ifndef CONFIG_USER_ONLY
282 static inline uint64_t ld_code2(uint64_t pc
)
284 return (uint64_t)lduw_code(pc
);
287 static inline uint64_t ld_code4(uint64_t pc
)
289 return (uint64_t)ldl_code(pc
);
292 static inline uint64_t ld_code6(uint64_t pc
)
295 opc
= (uint64_t)lduw_code(pc
) << 32;
296 opc
|= (uint64_t)(uint32_t)ldl_code(pc
+2);
300 static inline int get_mem_index(DisasContext
*s
)
302 switch (s
->tb
->flags
& FLAG_MASK_ASC
) {
303 case PSW_ASC_PRIMARY
>> 32:
305 case PSW_ASC_SECONDARY
>> 32:
307 case PSW_ASC_HOME
>> 32:
315 static inline void gen_debug(DisasContext
*s
)
317 TCGv_i32 tmp
= tcg_const_i32(EXCP_DEBUG
);
320 gen_helper_exception(tmp
);
321 tcg_temp_free_i32(tmp
);
322 s
->is_jmp
= DISAS_EXCP
;
325 #ifdef CONFIG_USER_ONLY
327 static void gen_illegal_opcode(DisasContext
*s
, int ilc
)
329 TCGv_i32 tmp
= tcg_const_i32(EXCP_SPEC
);
332 gen_helper_exception(tmp
);
333 tcg_temp_free_i32(tmp
);
334 s
->is_jmp
= DISAS_EXCP
;
337 #else /* CONFIG_USER_ONLY */
339 static void debug_print_inst(DisasContext
*s
, int ilc
)
341 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
346 inst
= ld_code2(s
->pc
);
349 inst
= ld_code4(s
->pc
);
352 inst
= ld_code6(s
->pc
);
356 fprintf(stderr
, "Illegal instruction [%d at %016" PRIx64
"]: 0x%016"
357 PRIx64
"\n", ilc
, s
->pc
, inst
);
361 static void gen_program_exception(DisasContext
*s
, int ilc
, int code
)
365 debug_print_inst(s
, ilc
);
367 /* remember what pgm exeption this was */
368 tmp
= tcg_const_i32(code
);
369 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUState
, int_pgm_code
));
370 tcg_temp_free_i32(tmp
);
372 tmp
= tcg_const_i32(ilc
);
373 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUState
, int_pgm_ilc
));
374 tcg_temp_free_i32(tmp
);
376 /* advance past instruction */
383 /* trigger exception */
384 tmp
= tcg_const_i32(EXCP_PGM
);
385 gen_helper_exception(tmp
);
386 tcg_temp_free_i32(tmp
);
389 s
->is_jmp
= DISAS_EXCP
;
393 static void gen_illegal_opcode(DisasContext
*s
, int ilc
)
395 gen_program_exception(s
, ilc
, PGM_SPECIFICATION
);
398 static void gen_privileged_exception(DisasContext
*s
, int ilc
)
400 gen_program_exception(s
, ilc
, PGM_PRIVILEGED
);
403 static void check_privileged(DisasContext
*s
, int ilc
)
405 if (s
->tb
->flags
& (PSW_MASK_PSTATE
>> 32)) {
406 gen_privileged_exception(s
, ilc
);
410 #endif /* CONFIG_USER_ONLY */
412 static TCGv_i64
get_address(DisasContext
*s
, int x2
, int b2
, int d2
)
416 /* 31-bitify the immediate part; register contents are dealt with below */
417 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
423 tmp
= tcg_const_i64(d2
);
424 tcg_gen_add_i64(tmp
, tmp
, regs
[x2
]);
429 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
433 tmp
= tcg_const_i64(d2
);
434 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
439 tmp
= tcg_const_i64(d2
);
442 /* 31-bit mode mask if there are values loaded from registers */
443 if (!(s
->tb
->flags
& FLAG_MASK_64
) && (x2
|| b2
)) {
444 tcg_gen_andi_i64(tmp
, tmp
, 0x7fffffffUL
);
450 static void gen_op_movi_cc(DisasContext
*s
, uint32_t val
)
452 s
->cc_op
= CC_OP_CONST0
+ val
;
455 static void gen_op_update1_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 dst
)
457 tcg_gen_discard_i64(cc_src
);
458 tcg_gen_mov_i64(cc_dst
, dst
);
459 tcg_gen_discard_i64(cc_vr
);
463 static void gen_op_update1_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 dst
)
465 tcg_gen_discard_i64(cc_src
);
466 tcg_gen_extu_i32_i64(cc_dst
, dst
);
467 tcg_gen_discard_i64(cc_vr
);
471 static void gen_op_update2_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
474 tcg_gen_mov_i64(cc_src
, src
);
475 tcg_gen_mov_i64(cc_dst
, dst
);
476 tcg_gen_discard_i64(cc_vr
);
480 static void gen_op_update2_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
483 tcg_gen_extu_i32_i64(cc_src
, src
);
484 tcg_gen_extu_i32_i64(cc_dst
, dst
);
485 tcg_gen_discard_i64(cc_vr
);
489 static void gen_op_update3_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
490 TCGv_i64 dst
, TCGv_i64 vr
)
492 tcg_gen_mov_i64(cc_src
, src
);
493 tcg_gen_mov_i64(cc_dst
, dst
);
494 tcg_gen_mov_i64(cc_vr
, vr
);
498 static void gen_op_update3_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
499 TCGv_i32 dst
, TCGv_i32 vr
)
501 tcg_gen_extu_i32_i64(cc_src
, src
);
502 tcg_gen_extu_i32_i64(cc_dst
, dst
);
503 tcg_gen_extu_i32_i64(cc_vr
, vr
);
507 static inline void set_cc_nz_u32(DisasContext
*s
, TCGv_i32 val
)
509 gen_op_update1_cc_i32(s
, CC_OP_NZ
, val
);
512 static inline void set_cc_nz_u64(DisasContext
*s
, TCGv_i64 val
)
514 gen_op_update1_cc_i64(s
, CC_OP_NZ
, val
);
517 static inline void cmp_32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
520 gen_op_update2_cc_i32(s
, cond
, v1
, v2
);
523 static inline void cmp_64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
526 gen_op_update2_cc_i64(s
, cond
, v1
, v2
);
529 static inline void cmp_s32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
531 cmp_32(s
, v1
, v2
, CC_OP_LTGT_32
);
534 static inline void cmp_u32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
536 cmp_32(s
, v1
, v2
, CC_OP_LTUGTU_32
);
539 static inline void cmp_s32c(DisasContext
*s
, TCGv_i32 v1
, int32_t v2
)
541 /* XXX optimize for the constant? put it in s? */
542 TCGv_i32 tmp
= tcg_const_i32(v2
);
543 cmp_32(s
, v1
, tmp
, CC_OP_LTGT_32
);
544 tcg_temp_free_i32(tmp
);
547 static inline void cmp_u32c(DisasContext
*s
, TCGv_i32 v1
, uint32_t v2
)
549 TCGv_i32 tmp
= tcg_const_i32(v2
);
550 cmp_32(s
, v1
, tmp
, CC_OP_LTUGTU_32
);
551 tcg_temp_free_i32(tmp
);
554 static inline void cmp_s64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
556 cmp_64(s
, v1
, v2
, CC_OP_LTGT_64
);
559 static inline void cmp_u64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
561 cmp_64(s
, v1
, v2
, CC_OP_LTUGTU_64
);
564 static inline void cmp_s64c(DisasContext
*s
, TCGv_i64 v1
, int64_t v2
)
566 TCGv_i64 tmp
= tcg_const_i64(v2
);
568 tcg_temp_free_i64(tmp
);
571 static inline void cmp_u64c(DisasContext
*s
, TCGv_i64 v1
, uint64_t v2
)
573 TCGv_i64 tmp
= tcg_const_i64(v2
);
575 tcg_temp_free_i64(tmp
);
578 static inline void set_cc_s32(DisasContext
*s
, TCGv_i32 val
)
580 gen_op_update1_cc_i32(s
, CC_OP_LTGT0_32
, val
);
583 static inline void set_cc_s64(DisasContext
*s
, TCGv_i64 val
)
585 gen_op_update1_cc_i64(s
, CC_OP_LTGT0_64
, val
);
588 static void set_cc_add64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
590 gen_op_update3_cc_i64(s
, CC_OP_ADD_64
, v1
, v2
, vr
);
593 static void set_cc_addu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
596 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, v1
, v2
, vr
);
599 static void set_cc_sub64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
601 gen_op_update3_cc_i64(s
, CC_OP_SUB_64
, v1
, v2
, vr
);
604 static void set_cc_subu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
607 gen_op_update3_cc_i64(s
, CC_OP_SUBU_64
, v1
, v2
, vr
);
610 static void set_cc_abs64(DisasContext
*s
, TCGv_i64 v1
)
612 gen_op_update1_cc_i64(s
, CC_OP_ABS_64
, v1
);
615 static void set_cc_nabs64(DisasContext
*s
, TCGv_i64 v1
)
617 gen_op_update1_cc_i64(s
, CC_OP_NABS_64
, v1
);
620 static void set_cc_add32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
622 gen_op_update3_cc_i32(s
, CC_OP_ADD_32
, v1
, v2
, vr
);
625 static void set_cc_addu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
628 gen_op_update3_cc_i32(s
, CC_OP_ADDU_32
, v1
, v2
, vr
);
631 static void set_cc_sub32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
633 gen_op_update3_cc_i32(s
, CC_OP_SUB_32
, v1
, v2
, vr
);
636 static void set_cc_subu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
639 gen_op_update3_cc_i32(s
, CC_OP_SUBU_32
, v1
, v2
, vr
);
642 static void set_cc_abs32(DisasContext
*s
, TCGv_i32 v1
)
644 gen_op_update1_cc_i32(s
, CC_OP_ABS_32
, v1
);
647 static void set_cc_nabs32(DisasContext
*s
, TCGv_i32 v1
)
649 gen_op_update1_cc_i32(s
, CC_OP_NABS_32
, v1
);
652 static void set_cc_comp32(DisasContext
*s
, TCGv_i32 v1
)
654 gen_op_update1_cc_i32(s
, CC_OP_COMP_32
, v1
);
657 static void set_cc_comp64(DisasContext
*s
, TCGv_i64 v1
)
659 gen_op_update1_cc_i64(s
, CC_OP_COMP_64
, v1
);
662 static void set_cc_icm(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
664 gen_op_update2_cc_i32(s
, CC_OP_ICM
, v1
, v2
);
667 static void set_cc_cmp_f32_i64(DisasContext
*s
, TCGv_i32 v1
, TCGv_i64 v2
)
669 tcg_gen_extu_i32_i64(cc_src
, v1
);
670 tcg_gen_mov_i64(cc_dst
, v2
);
671 tcg_gen_discard_i64(cc_vr
);
672 s
->cc_op
= CC_OP_LTGT_F32
;
675 static void set_cc_nz_f32(DisasContext
*s
, TCGv_i32 v1
)
677 gen_op_update1_cc_i32(s
, CC_OP_NZ_F32
, v1
);
680 static inline void set_cc_nz_f64(DisasContext
*s
, TCGv_i64 v1
)
682 gen_op_update1_cc_i64(s
, CC_OP_NZ_F64
, v1
);
685 /* CC value is in env->cc_op */
686 static inline void set_cc_static(DisasContext
*s
)
688 tcg_gen_discard_i64(cc_src
);
689 tcg_gen_discard_i64(cc_dst
);
690 tcg_gen_discard_i64(cc_vr
);
691 s
->cc_op
= CC_OP_STATIC
;
694 static inline void gen_op_set_cc_op(DisasContext
*s
)
696 if (s
->cc_op
!= CC_OP_DYNAMIC
&& s
->cc_op
!= CC_OP_STATIC
) {
697 tcg_gen_movi_i32(cc_op
, s
->cc_op
);
701 static inline void gen_update_cc_op(DisasContext
*s
)
706 /* calculates cc into cc_op */
707 static void gen_op_calc_cc(DisasContext
*s
)
709 TCGv_i32 local_cc_op
= tcg_const_i32(s
->cc_op
);
710 TCGv_i64 dummy
= tcg_const_i64(0);
717 /* s->cc_op is the cc value */
718 tcg_gen_movi_i32(cc_op
, s
->cc_op
- CC_OP_CONST0
);
721 /* env->cc_op already is the cc value */
735 gen_helper_calc_cc(cc_op
, local_cc_op
, dummy
, cc_dst
, dummy
);
740 case CC_OP_LTUGTU_32
:
741 case CC_OP_LTUGTU_64
:
748 gen_helper_calc_cc(cc_op
, local_cc_op
, cc_src
, cc_dst
, dummy
);
759 gen_helper_calc_cc(cc_op
, local_cc_op
, cc_src
, cc_dst
, cc_vr
);
762 /* unknown operation - assume 3 arguments and cc_op in env */
763 gen_helper_calc_cc(cc_op
, cc_op
, cc_src
, cc_dst
, cc_vr
);
769 tcg_temp_free_i32(local_cc_op
);
771 /* We now have cc in cc_op as constant */
775 static inline void decode_rr(DisasContext
*s
, uint64_t insn
, int *r1
, int *r2
)
779 *r1
= (insn
>> 4) & 0xf;
783 static inline TCGv_i64
decode_rx(DisasContext
*s
, uint64_t insn
, int *r1
,
784 int *x2
, int *b2
, int *d2
)
788 *r1
= (insn
>> 20) & 0xf;
789 *x2
= (insn
>> 16) & 0xf;
790 *b2
= (insn
>> 12) & 0xf;
793 return get_address(s
, *x2
, *b2
, *d2
);
796 static inline void decode_rs(DisasContext
*s
, uint64_t insn
, int *r1
, int *r3
,
801 *r1
= (insn
>> 20) & 0xf;
803 *r3
= (insn
>> 16) & 0xf;
804 *b2
= (insn
>> 12) & 0xf;
808 static inline TCGv_i64
decode_si(DisasContext
*s
, uint64_t insn
, int *i2
,
813 *i2
= (insn
>> 16) & 0xff;
814 *b1
= (insn
>> 12) & 0xf;
817 return get_address(s
, 0, *b1
, *d1
);
820 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong pc
)
822 TranslationBlock
*tb
;
827 /* NOTE: we handle the case where the TB spans two pages here */
828 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
829 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
830 /* jump to same page: we can use a direct jump */
831 tcg_gen_goto_tb(tb_num
);
832 tcg_gen_movi_i64(psw_addr
, pc
);
833 tcg_gen_exit_tb((long)tb
+ tb_num
);
835 /* jump to another page: currently not optimized */
836 tcg_gen_movi_i64(psw_addr
, pc
);
841 static inline void account_noninline_branch(DisasContext
*s
, int cc_op
)
843 #ifdef DEBUG_INLINE_BRANCHES
844 inline_branch_miss
[cc_op
]++;
848 static inline void account_inline_branch(DisasContext
*s
)
850 #ifdef DEBUG_INLINE_BRANCHES
851 inline_branch_hit
[s
->cc_op
]++;
855 static void gen_jcc(DisasContext
*s
, uint32_t mask
, int skip
)
857 TCGv_i32 tmp
, tmp2
, r
;
863 tmp
= tcg_temp_new_i32();
864 tcg_gen_trunc_i64_i32(tmp
, cc_dst
);
866 case 0x8 | 0x4: /* dst <= 0 */
867 tcg_gen_brcondi_i32(TCG_COND_GT
, tmp
, 0, skip
);
869 case 0x8 | 0x2: /* dst >= 0 */
870 tcg_gen_brcondi_i32(TCG_COND_LT
, tmp
, 0, skip
);
872 case 0x8: /* dst == 0 */
873 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
875 case 0x7: /* dst != 0 */
876 case 0x6: /* dst != 0 */
877 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
879 case 0x4: /* dst < 0 */
880 tcg_gen_brcondi_i32(TCG_COND_GE
, tmp
, 0, skip
);
882 case 0x2: /* dst > 0 */
883 tcg_gen_brcondi_i32(TCG_COND_LE
, tmp
, 0, skip
);
886 tcg_temp_free_i32(tmp
);
889 account_inline_branch(s
);
890 tcg_temp_free_i32(tmp
);
894 case 0x8 | 0x4: /* dst <= 0 */
895 tcg_gen_brcondi_i64(TCG_COND_GT
, cc_dst
, 0, skip
);
897 case 0x8 | 0x2: /* dst >= 0 */
898 tcg_gen_brcondi_i64(TCG_COND_LT
, cc_dst
, 0, skip
);
900 case 0x8: /* dst == 0 */
901 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
903 case 0x7: /* dst != 0 */
904 case 0x6: /* dst != 0 */
905 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
907 case 0x4: /* dst < 0 */
908 tcg_gen_brcondi_i64(TCG_COND_GE
, cc_dst
, 0, skip
);
910 case 0x2: /* dst > 0 */
911 tcg_gen_brcondi_i64(TCG_COND_LE
, cc_dst
, 0, skip
);
916 account_inline_branch(s
);
919 tmp
= tcg_temp_new_i32();
920 tmp2
= tcg_temp_new_i32();
921 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
922 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
924 case 0x8 | 0x4: /* src <= dst */
925 tcg_gen_brcond_i32(TCG_COND_GT
, tmp
, tmp2
, skip
);
927 case 0x8 | 0x2: /* src >= dst */
928 tcg_gen_brcond_i32(TCG_COND_LT
, tmp
, tmp2
, skip
);
930 case 0x8: /* src == dst */
931 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
933 case 0x7: /* src != dst */
934 case 0x6: /* src != dst */
935 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
937 case 0x4: /* src < dst */
938 tcg_gen_brcond_i32(TCG_COND_GE
, tmp
, tmp2
, skip
);
940 case 0x2: /* src > dst */
941 tcg_gen_brcond_i32(TCG_COND_LE
, tmp
, tmp2
, skip
);
944 tcg_temp_free_i32(tmp
);
945 tcg_temp_free_i32(tmp2
);
948 account_inline_branch(s
);
949 tcg_temp_free_i32(tmp
);
950 tcg_temp_free_i32(tmp2
);
954 case 0x8 | 0x4: /* src <= dst */
955 tcg_gen_brcond_i64(TCG_COND_GT
, cc_src
, cc_dst
, skip
);
957 case 0x8 | 0x2: /* src >= dst */
958 tcg_gen_brcond_i64(TCG_COND_LT
, cc_src
, cc_dst
, skip
);
960 case 0x8: /* src == dst */
961 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
963 case 0x7: /* src != dst */
964 case 0x6: /* src != dst */
965 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
967 case 0x4: /* src < dst */
968 tcg_gen_brcond_i64(TCG_COND_GE
, cc_src
, cc_dst
, skip
);
970 case 0x2: /* src > dst */
971 tcg_gen_brcond_i64(TCG_COND_LE
, cc_src
, cc_dst
, skip
);
976 account_inline_branch(s
);
978 case CC_OP_LTUGTU_32
:
979 tmp
= tcg_temp_new_i32();
980 tmp2
= tcg_temp_new_i32();
981 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
982 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
984 case 0x8 | 0x4: /* src <= dst */
985 tcg_gen_brcond_i32(TCG_COND_GTU
, tmp
, tmp2
, skip
);
987 case 0x8 | 0x2: /* src >= dst */
988 tcg_gen_brcond_i32(TCG_COND_LTU
, tmp
, tmp2
, skip
);
990 case 0x8: /* src == dst */
991 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
993 case 0x7: /* src != dst */
994 case 0x6: /* src != dst */
995 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
997 case 0x4: /* src < dst */
998 tcg_gen_brcond_i32(TCG_COND_GEU
, tmp
, tmp2
, skip
);
1000 case 0x2: /* src > dst */
1001 tcg_gen_brcond_i32(TCG_COND_LEU
, tmp
, tmp2
, skip
);
1004 tcg_temp_free_i32(tmp
);
1005 tcg_temp_free_i32(tmp2
);
1008 account_inline_branch(s
);
1009 tcg_temp_free_i32(tmp
);
1010 tcg_temp_free_i32(tmp2
);
1012 case CC_OP_LTUGTU_64
:
1014 case 0x8 | 0x4: /* src <= dst */
1015 tcg_gen_brcond_i64(TCG_COND_GTU
, cc_src
, cc_dst
, skip
);
1017 case 0x8 | 0x2: /* src >= dst */
1018 tcg_gen_brcond_i64(TCG_COND_LTU
, cc_src
, cc_dst
, skip
);
1020 case 0x8: /* src == dst */
1021 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
1023 case 0x7: /* src != dst */
1024 case 0x6: /* src != dst */
1025 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
1027 case 0x4: /* src < dst */
1028 tcg_gen_brcond_i64(TCG_COND_GEU
, cc_src
, cc_dst
, skip
);
1030 case 0x2: /* src > dst */
1031 tcg_gen_brcond_i64(TCG_COND_LEU
, cc_src
, cc_dst
, skip
);
1036 account_inline_branch(s
);
1040 /* dst == 0 || dst != 0 */
1042 case 0x8 | 0x4 | 0x2:
1043 case 0x8 | 0x4 | 0x2 | 0x1:
1044 case 0x8 | 0x4 | 0x1:
1049 case 0x8 | 0x2 | 0x1:
1051 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1056 case 0x4 | 0x2 | 0x1:
1058 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1063 account_inline_branch(s
);
1066 tmp
= tcg_temp_new_i32();
1067 tmp2
= tcg_temp_new_i32();
1069 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
1070 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
1071 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
1073 case 0x8: /* val & mask == 0 */
1074 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1076 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1077 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1080 tcg_temp_free_i32(tmp
);
1081 tcg_temp_free_i32(tmp2
);
1084 tcg_temp_free_i32(tmp
);
1085 tcg_temp_free_i32(tmp2
);
1086 account_inline_branch(s
);
1089 tmp64
= tcg_temp_new_i64();
1091 tcg_gen_and_i64(tmp64
, cc_src
, cc_dst
);
1093 case 0x8: /* val & mask == 0 */
1094 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp64
, 0, skip
);
1096 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1097 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp64
, 0, skip
);
1100 tcg_temp_free_i64(tmp64
);
1103 tcg_temp_free_i64(tmp64
);
1104 account_inline_branch(s
);
1108 case 0x8: /* val == 0 */
1109 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1111 case 0x4 | 0x2 | 0x1: /* val != 0 */
1112 case 0x4 | 0x2: /* val != 0 */
1113 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1118 account_inline_branch(s
);
1121 old_cc_op
= s
->cc_op
;
1122 goto do_dynamic_nocccalc
;
1126 old_cc_op
= s
->cc_op
;
1127 /* calculate cc value */
1130 do_dynamic_nocccalc
:
1131 /* jump based on cc */
1132 account_noninline_branch(s
, old_cc_op
);
1135 case 0x8 | 0x4 | 0x2 | 0x1:
1138 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1139 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 3, skip
);
1141 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1142 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 2, skip
);
1144 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1145 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 1, skip
);
1147 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 */
1148 tmp
= tcg_temp_new_i32();
1149 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1150 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1151 tcg_temp_free_i32(tmp
);
1153 case 0x8 | 0x4: /* cc < 2 */
1154 tcg_gen_brcondi_i32(TCG_COND_GEU
, cc_op
, 2, skip
);
1156 case 0x8: /* cc == 0 */
1157 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 0, skip
);
1159 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1160 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 0, skip
);
1162 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 */
1163 tmp
= tcg_temp_new_i32();
1164 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1165 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1166 tcg_temp_free_i32(tmp
);
1168 case 0x4: /* cc == 1 */
1169 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 1, skip
);
1171 case 0x2 | 0x1: /* cc > 1 */
1172 tcg_gen_brcondi_i32(TCG_COND_LEU
, cc_op
, 1, skip
);
1174 case 0x2: /* cc == 2 */
1175 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 2, skip
);
1177 case 0x1: /* cc == 3 */
1178 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 3, skip
);
1180 default: /* cc is masked by something else */
1181 tmp
= tcg_const_i32(3);
1183 tcg_gen_sub_i32(tmp
, tmp
, cc_op
);
1184 tmp2
= tcg_const_i32(1);
1186 tcg_gen_shl_i32(tmp2
, tmp2
, tmp
);
1187 r
= tcg_const_i32(mask
);
1188 /* mask & (1 << (3 - cc)) */
1189 tcg_gen_and_i32(r
, r
, tmp2
);
1190 tcg_temp_free_i32(tmp
);
1191 tcg_temp_free_i32(tmp2
);
1193 tcg_gen_brcondi_i32(TCG_COND_EQ
, r
, 0, skip
);
1194 tcg_temp_free_i32(r
);
1201 static void gen_bcr(DisasContext
*s
, uint32_t mask
, TCGv_i64 target
,
1208 tcg_gen_mov_i64(psw_addr
, target
);
1210 } else if (mask
== 0) {
1211 /* ignore cc and never match */
1212 gen_goto_tb(s
, 0, offset
+ 2);
1214 TCGv_i64 new_addr
= tcg_temp_local_new_i64();
1216 tcg_gen_mov_i64(new_addr
, target
);
1217 skip
= gen_new_label();
1218 gen_jcc(s
, mask
, skip
);
1219 tcg_gen_mov_i64(psw_addr
, new_addr
);
1220 tcg_temp_free_i64(new_addr
);
1222 gen_set_label(skip
);
1223 tcg_temp_free_i64(new_addr
);
1224 gen_goto_tb(s
, 1, offset
+ 2);
1228 static void gen_brc(uint32_t mask
, DisasContext
*s
, int32_t offset
)
1234 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1235 } else if (mask
== 0) {
1236 /* ignore cc and never match */
1237 gen_goto_tb(s
, 0, s
->pc
+ 4);
1239 skip
= gen_new_label();
1240 gen_jcc(s
, mask
, skip
);
1241 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1242 gen_set_label(skip
);
1243 gen_goto_tb(s
, 1, s
->pc
+ 4);
1245 s
->is_jmp
= DISAS_TB_JUMP
;
1248 static void gen_op_mvc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1252 int l_memset
= gen_new_label();
1253 int l_out
= gen_new_label();
1254 TCGv_i64 dest
= tcg_temp_local_new_i64();
1255 TCGv_i64 src
= tcg_temp_local_new_i64();
1258 /* Find out if we should use the inline version of mvc */
1273 /* Fall back to helper */
1274 vl
= tcg_const_i32(l
);
1275 potential_page_fault(s
);
1276 gen_helper_mvc(vl
, s1
, s2
);
1277 tcg_temp_free_i32(vl
);
1281 tcg_gen_mov_i64(dest
, s1
);
1282 tcg_gen_mov_i64(src
, s2
);
1284 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
1285 /* XXX what if we overflow while moving? */
1286 tcg_gen_andi_i64(dest
, dest
, 0x7fffffffUL
);
1287 tcg_gen_andi_i64(src
, src
, 0x7fffffffUL
);
1290 tmp
= tcg_temp_new_i64();
1291 tcg_gen_addi_i64(tmp
, src
, 1);
1292 tcg_gen_brcond_i64(TCG_COND_EQ
, dest
, tmp
, l_memset
);
1293 tcg_temp_free_i64(tmp
);
1297 tmp
= tcg_temp_new_i64();
1299 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1300 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1302 tcg_temp_free_i64(tmp
);
1305 tmp
= tcg_temp_new_i64();
1307 tcg_gen_qemu_ld16u(tmp
, src
, get_mem_index(s
));
1308 tcg_gen_qemu_st16(tmp
, dest
, get_mem_index(s
));
1310 tcg_temp_free_i64(tmp
);
1313 tmp
= tcg_temp_new_i64();
1315 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1316 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1318 tcg_temp_free_i64(tmp
);
1321 tmp
= tcg_temp_new_i64();
1322 tmp2
= tcg_temp_new_i64();
1324 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1325 tcg_gen_addi_i64(src
, src
, 4);
1326 tcg_gen_qemu_ld8u(tmp2
, src
, get_mem_index(s
));
1327 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1328 tcg_gen_addi_i64(dest
, dest
, 4);
1329 tcg_gen_qemu_st8(tmp2
, dest
, get_mem_index(s
));
1331 tcg_temp_free_i64(tmp
);
1332 tcg_temp_free_i64(tmp2
);
1335 tmp
= tcg_temp_new_i64();
1337 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1338 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1340 tcg_temp_free_i64(tmp
);
1343 /* The inline version can become too big for too uneven numbers, only
1344 use it on known good lengths */
1345 tmp
= tcg_temp_new_i64();
1346 tmp2
= tcg_const_i64(8);
1347 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1348 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1349 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1351 tcg_gen_add_i64(src
, src
, tmp2
);
1352 tcg_gen_add_i64(dest
, dest
, tmp2
);
1355 tcg_temp_free_i64(tmp2
);
1356 tmp2
= tcg_const_i64(1);
1358 for (; i
<= l
; i
++) {
1359 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1360 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1362 tcg_gen_add_i64(src
, src
, tmp2
);
1363 tcg_gen_add_i64(dest
, dest
, tmp2
);
1366 tcg_temp_free_i64(tmp2
);
1367 tcg_temp_free_i64(tmp
);
1373 gen_set_label(l_memset
);
1374 /* memset case (dest == (src + 1)) */
1376 tmp
= tcg_temp_new_i64();
1377 tmp2
= tcg_temp_new_i64();
1378 /* fill tmp with the byte */
1379 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1380 tcg_gen_shli_i64(tmp2
, tmp
, 8);
1381 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1382 tcg_gen_shli_i64(tmp2
, tmp
, 16);
1383 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1384 tcg_gen_shli_i64(tmp2
, tmp
, 32);
1385 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1386 tcg_temp_free_i64(tmp2
);
1388 tmp2
= tcg_const_i64(8);
1390 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1391 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1392 tcg_gen_addi_i64(dest
, dest
, 8);
1395 tcg_temp_free_i64(tmp2
);
1396 tmp2
= tcg_const_i64(1);
1398 for (; i
<= l
; i
++) {
1399 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1400 tcg_gen_addi_i64(dest
, dest
, 1);
1403 tcg_temp_free_i64(tmp2
);
1404 tcg_temp_free_i64(tmp
);
1406 gen_set_label(l_out
);
1408 tcg_temp_free(dest
);
1412 static void gen_op_clc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1418 /* check for simple 32bit or 64bit match */
1421 tmp
= tcg_temp_new_i64();
1422 tmp2
= tcg_temp_new_i64();
1424 tcg_gen_qemu_ld8u(tmp
, s1
, get_mem_index(s
));
1425 tcg_gen_qemu_ld8u(tmp2
, s2
, get_mem_index(s
));
1426 cmp_u64(s
, tmp
, tmp2
);
1428 tcg_temp_free_i64(tmp
);
1429 tcg_temp_free_i64(tmp2
);
1432 tmp
= tcg_temp_new_i64();
1433 tmp2
= tcg_temp_new_i64();
1435 tcg_gen_qemu_ld16u(tmp
, s1
, get_mem_index(s
));
1436 tcg_gen_qemu_ld16u(tmp2
, s2
, get_mem_index(s
));
1437 cmp_u64(s
, tmp
, tmp2
);
1439 tcg_temp_free_i64(tmp
);
1440 tcg_temp_free_i64(tmp2
);
1443 tmp
= tcg_temp_new_i64();
1444 tmp2
= tcg_temp_new_i64();
1446 tcg_gen_qemu_ld32u(tmp
, s1
, get_mem_index(s
));
1447 tcg_gen_qemu_ld32u(tmp2
, s2
, get_mem_index(s
));
1448 cmp_u64(s
, tmp
, tmp2
);
1450 tcg_temp_free_i64(tmp
);
1451 tcg_temp_free_i64(tmp2
);
1454 tmp
= tcg_temp_new_i64();
1455 tmp2
= tcg_temp_new_i64();
1457 tcg_gen_qemu_ld64(tmp
, s1
, get_mem_index(s
));
1458 tcg_gen_qemu_ld64(tmp2
, s2
, get_mem_index(s
));
1459 cmp_u64(s
, tmp
, tmp2
);
1461 tcg_temp_free_i64(tmp
);
1462 tcg_temp_free_i64(tmp2
);
1466 potential_page_fault(s
);
1467 vl
= tcg_const_i32(l
);
1468 gen_helper_clc(cc_op
, vl
, s1
, s2
);
1469 tcg_temp_free_i32(vl
);
1473 static void disas_e3(DisasContext
* s
, int op
, int r1
, int x2
, int b2
, int d2
)
1475 TCGv_i64 addr
, tmp
, tmp2
, tmp3
, tmp4
;
1476 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
1478 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1479 op
, r1
, x2
, b2
, d2
);
1480 addr
= get_address(s
, x2
, b2
, d2
);
1482 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1483 case 0x4: /* lg r1,d2(x2,b2) */
1484 tcg_gen_qemu_ld64(regs
[r1
], addr
, get_mem_index(s
));
1486 set_cc_s64(s
, regs
[r1
]);
1489 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1490 tmp2
= tcg_temp_new_i64();
1491 tmp32_1
= tcg_temp_new_i32();
1492 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1493 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1494 store_reg32(r1
, tmp32_1
);
1495 set_cc_s32(s
, tmp32_1
);
1496 tcg_temp_free_i64(tmp2
);
1497 tcg_temp_free_i32(tmp32_1
);
1499 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1500 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1501 tmp2
= tcg_temp_new_i64();
1503 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1505 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1507 tcg_gen_mul_i64(regs
[r1
], regs
[r1
], tmp2
);
1508 tcg_temp_free_i64(tmp2
);
1510 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1511 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1512 tmp2
= tcg_temp_new_i64();
1514 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1516 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1518 tmp4
= load_reg(r1
+ 1);
1519 tmp3
= tcg_temp_new_i64();
1520 tcg_gen_div_i64(tmp3
, tmp4
, tmp2
);
1521 store_reg(r1
+ 1, tmp3
);
1522 tcg_gen_rem_i64(tmp3
, tmp4
, tmp2
);
1523 store_reg(r1
, tmp3
);
1524 tcg_temp_free_i64(tmp2
);
1525 tcg_temp_free_i64(tmp3
);
1526 tcg_temp_free_i64(tmp4
);
1528 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1529 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1530 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1531 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1533 tmp2
= tcg_temp_new_i64();
1534 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1535 } else if (op
== 0x18) {
1536 tmp2
= tcg_temp_new_i64();
1537 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1539 tmp2
= tcg_temp_new_i64();
1540 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1542 tmp4
= load_reg(r1
);
1543 tmp3
= tcg_temp_new_i64();
1544 tcg_gen_add_i64(tmp3
, tmp4
, tmp2
);
1545 store_reg(r1
, tmp3
);
1549 set_cc_add64(s
, tmp4
, tmp2
, tmp3
);
1553 set_cc_addu64(s
, tmp4
, tmp2
, tmp3
);
1558 tcg_temp_free_i64(tmp2
);
1559 tcg_temp_free_i64(tmp3
);
1560 tcg_temp_free_i64(tmp4
);
1562 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1563 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1564 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1565 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1566 tmp2
= tcg_temp_new_i64();
1568 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1569 } else if (op
== 0x1b) {
1570 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1572 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1574 tmp4
= load_reg(r1
);
1575 tmp3
= tcg_temp_new_i64();
1576 tcg_gen_sub_i64(tmp3
, tmp4
, tmp2
);
1577 store_reg(r1
, tmp3
);
1581 set_cc_sub64(s
, tmp4
, tmp2
, tmp3
);
1585 set_cc_subu64(s
, tmp4
, tmp2
, tmp3
);
1590 tcg_temp_free_i64(tmp2
);
1591 tcg_temp_free_i64(tmp3
);
1592 tcg_temp_free_i64(tmp4
);
1594 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1595 tmp2
= tcg_temp_new_i64();
1596 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1597 tcg_gen_bswap64_i64(tmp2
, tmp2
);
1598 store_reg(r1
, tmp2
);
1599 tcg_temp_free_i64(tmp2
);
1601 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1602 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1603 tmp2
= tcg_temp_new_i64();
1604 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1606 tcg_gen_ext32s_i64(tmp2
, tmp2
);
1608 store_reg(r1
, tmp2
);
1609 tcg_temp_free_i64(tmp2
);
1611 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1612 tmp2
= tcg_temp_new_i64();
1613 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1614 store_reg(r1
, tmp2
);
1615 tcg_temp_free_i64(tmp2
);
1617 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1618 tmp2
= tcg_temp_new_i64();
1619 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1620 tcg_gen_andi_i64(tmp2
, tmp2
, 0x7fffffffULL
);
1621 store_reg(r1
, tmp2
);
1622 tcg_temp_free_i64(tmp2
);
1624 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1625 tmp2
= tcg_temp_new_i64();
1626 tmp32_1
= tcg_temp_new_i32();
1627 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1628 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1629 tcg_temp_free_i64(tmp2
);
1630 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1631 store_reg32(r1
, tmp32_1
);
1632 tcg_temp_free_i32(tmp32_1
);
1634 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1635 tmp2
= tcg_temp_new_i64();
1636 tmp32_1
= tcg_temp_new_i32();
1637 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1638 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1639 tcg_temp_free_i64(tmp2
);
1640 tcg_gen_bswap16_i32(tmp32_1
, tmp32_1
);
1641 store_reg16(r1
, tmp32_1
);
1642 tcg_temp_free_i32(tmp32_1
);
1644 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1645 case 0x21: /* CLG R1,D2(X2,B2) */
1646 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1647 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1648 tmp2
= tcg_temp_new_i64();
1652 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1655 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1658 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1666 cmp_s64(s
, regs
[r1
], tmp2
);
1670 cmp_u64(s
, regs
[r1
], tmp2
);
1675 tcg_temp_free_i64(tmp2
);
1677 case 0x24: /* stg r1, d2(x2,b2) */
1678 tcg_gen_qemu_st64(regs
[r1
], addr
, get_mem_index(s
));
1680 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1681 tmp32_1
= load_reg32(r1
);
1682 tmp2
= tcg_temp_new_i64();
1683 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1684 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1685 tcg_temp_free_i32(tmp32_1
);
1686 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1687 tcg_temp_free_i64(tmp2
);
1689 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1690 tmp32_1
= load_reg32(r1
);
1691 tmp2
= tcg_temp_new_i64();
1692 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1693 tcg_temp_free_i32(tmp32_1
);
1694 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1695 tcg_temp_free_i64(tmp2
);
1697 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1698 tmp32_1
= load_reg32(r1
);
1699 tmp32_2
= tcg_temp_new_i32();
1700 tmp2
= tcg_temp_new_i64();
1701 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1702 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1703 tcg_temp_free_i64(tmp2
);
1704 tcg_gen_xor_i32(tmp32_2
, tmp32_1
, tmp32_2
);
1705 store_reg32(r1
, tmp32_2
);
1706 set_cc_nz_u32(s
, tmp32_2
);
1707 tcg_temp_free_i32(tmp32_1
);
1708 tcg_temp_free_i32(tmp32_2
);
1710 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1711 tmp3
= tcg_temp_new_i64();
1712 tcg_gen_qemu_ld32u(tmp3
, addr
, get_mem_index(s
));
1713 store_reg32_i64(r1
, tmp3
);
1714 tcg_temp_free_i64(tmp3
);
1716 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1717 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1718 tmp32_1
= load_reg32(r1
);
1719 tmp32_2
= tcg_temp_new_i32();
1720 tmp32_3
= tcg_temp_new_i32();
1721 tmp2
= tcg_temp_new_i64();
1722 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1723 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1724 tcg_temp_free_i64(tmp2
);
1727 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1730 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1735 store_reg32(r1
, tmp32_3
);
1738 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1741 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1746 tcg_temp_free_i32(tmp32_1
);
1747 tcg_temp_free_i32(tmp32_2
);
1748 tcg_temp_free_i32(tmp32_3
);
1750 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1751 store_reg(r1
, addr
);
1753 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1754 tmp32_1
= load_reg32(r1
);
1755 tmp2
= tcg_temp_new_i64();
1756 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
1757 tcg_gen_qemu_st8(tmp2
, addr
, get_mem_index(s
));
1758 tcg_temp_free_i32(tmp32_1
);
1759 tcg_temp_free_i64(tmp2
);
1761 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1762 tmp3
= tcg_temp_new_i64();
1763 tcg_gen_qemu_ld8u(tmp3
, addr
, get_mem_index(s
));
1764 store_reg8(r1
, tmp3
);
1765 tcg_temp_free_i64(tmp3
);
1767 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1768 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1769 tmp2
= tcg_temp_new_i64();
1770 tcg_gen_qemu_ld8s(tmp2
, addr
, get_mem_index(s
));
1773 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1774 store_reg32_i64(r1
, tmp2
);
1777 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1778 store_reg(r1
, tmp2
);
1783 tcg_temp_free_i64(tmp2
);
1785 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1786 tmp2
= tcg_temp_new_i64();
1787 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1788 store_reg32_i64(r1
, tmp2
);
1789 tcg_temp_free_i64(tmp2
);
1791 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1792 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1793 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1794 tmp3
= tcg_temp_new_i64();
1795 tcg_gen_qemu_ld64(tmp3
, addr
, get_mem_index(s
));
1798 tcg_gen_and_i64(regs
[r1
], regs
[r1
], tmp3
);
1801 tcg_gen_or_i64(regs
[r1
], regs
[r1
], tmp3
);
1804 tcg_gen_xor_i64(regs
[r1
], regs
[r1
], tmp3
);
1809 set_cc_nz_u64(s
, regs
[r1
]);
1810 tcg_temp_free_i64(tmp3
);
1812 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1813 tmp2
= tcg_temp_new_i64();
1814 tmp32_1
= tcg_const_i32(r1
);
1815 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1816 gen_helper_mlg(tmp32_1
, tmp2
);
1817 tcg_temp_free_i64(tmp2
);
1818 tcg_temp_free_i32(tmp32_1
);
1820 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1821 tmp2
= tcg_temp_new_i64();
1822 tmp32_1
= tcg_const_i32(r1
);
1823 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1824 gen_helper_dlg(tmp32_1
, tmp2
);
1825 tcg_temp_free_i64(tmp2
);
1826 tcg_temp_free_i32(tmp32_1
);
1828 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1829 tmp2
= tcg_temp_new_i64();
1830 tmp3
= tcg_temp_new_i64();
1831 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1832 /* XXX possible optimization point */
1834 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
1835 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
1836 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
1837 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
1838 tcg_gen_add_i64(tmp3
, regs
[r1
], tmp3
);
1839 store_reg(r1
, tmp3
);
1840 set_cc_addu64(s
, regs
[r1
], tmp2
, tmp3
);
1841 tcg_temp_free_i64(tmp2
);
1842 tcg_temp_free_i64(tmp3
);
1844 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1845 tmp2
= tcg_temp_new_i64();
1846 tmp32_1
= tcg_const_i32(r1
);
1847 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1848 /* XXX possible optimization point */
1850 gen_helper_slbg(cc_op
, cc_op
, tmp32_1
, regs
[r1
], tmp2
);
1852 tcg_temp_free_i64(tmp2
);
1853 tcg_temp_free_i32(tmp32_1
);
1855 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1856 tcg_gen_qemu_ld8u(regs
[r1
], addr
, get_mem_index(s
));
1858 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1859 tcg_gen_qemu_ld16u(regs
[r1
], addr
, get_mem_index(s
));
1861 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1862 tmp2
= tcg_temp_new_i64();
1863 tcg_gen_qemu_ld8u(tmp2
, addr
, get_mem_index(s
));
1864 store_reg32_i64(r1
, tmp2
);
1865 tcg_temp_free_i64(tmp2
);
1867 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1868 tmp2
= tcg_temp_new_i64();
1869 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1870 store_reg32_i64(r1
, tmp2
);
1871 tcg_temp_free_i64(tmp2
);
1873 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1874 tmp2
= tcg_temp_new_i64();
1875 tmp3
= load_reg((r1
+ 1) & 15);
1876 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1877 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1878 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
1879 store_reg32_i64((r1
+ 1) & 15, tmp2
);
1880 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
1881 store_reg32_i64(r1
, tmp2
);
1882 tcg_temp_free_i64(tmp2
);
1883 tcg_temp_free_i64(tmp3
);
1885 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1886 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1887 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1889 tmp2
= tcg_temp_new_i64();
1890 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1891 tmp3
= load_reg((r1
+ 1) & 15);
1892 tcg_gen_ext32u_i64(tmp2
, tmp2
);
1893 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1894 tcg_gen_shli_i64(tmp
, tmp
, 32);
1895 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
1897 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
1898 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
1899 store_reg32_i64((r1
+ 1) & 15, tmp
);
1900 store_reg32_i64(r1
, tmp3
);
1901 tcg_temp_free_i64(tmp
);
1902 tcg_temp_free_i64(tmp2
);
1903 tcg_temp_free_i64(tmp3
);
1905 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1906 tmp2
= tcg_temp_new_i64();
1907 tmp32_1
= load_reg32(r1
);
1908 tmp32_2
= tcg_temp_new_i32();
1909 tmp32_3
= tcg_temp_new_i32();
1910 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1911 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1912 /* XXX possible optimization point */
1914 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
1915 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1916 store_reg32(r1
, tmp32_3
);
1917 tcg_temp_free_i64(tmp2
);
1918 tcg_temp_free_i32(tmp32_1
);
1919 tcg_temp_free_i32(tmp32_2
);
1920 tcg_temp_free_i32(tmp32_3
);
1922 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1923 tmp2
= tcg_temp_new_i64();
1924 tmp32_1
= tcg_const_i32(r1
);
1925 tmp32_2
= tcg_temp_new_i32();
1926 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1927 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1928 /* XXX possible optimization point */
1930 gen_helper_slb(cc_op
, cc_op
, tmp32_1
, tmp32_2
);
1932 tcg_temp_free_i64(tmp2
);
1933 tcg_temp_free_i32(tmp32_1
);
1934 tcg_temp_free_i32(tmp32_2
);
1937 LOG_DISAS("illegal e3 operation 0x%x\n", op
);
1938 gen_illegal_opcode(s
, 3);
1941 tcg_temp_free_i64(addr
);
1944 #ifndef CONFIG_USER_ONLY
1945 static void disas_e5(DisasContext
* s
, uint64_t insn
)
1948 int op
= (insn
>> 32) & 0xff;
1950 tmp
= get_address(s
, 0, (insn
>> 28) & 0xf, (insn
>> 16) & 0xfff);
1951 tmp2
= get_address(s
, 0, (insn
>> 12) & 0xf, insn
& 0xfff);
1953 LOG_DISAS("disas_e5: insn %" PRIx64
"\n", insn
);
1955 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1956 /* Test Protection */
1957 potential_page_fault(s
);
1958 gen_helper_tprot(cc_op
, tmp
, tmp2
);
1962 LOG_DISAS("illegal e5 operation 0x%x\n", op
);
1963 gen_illegal_opcode(s
, 3);
1967 tcg_temp_free_i64(tmp
);
1968 tcg_temp_free_i64(tmp2
);
1972 static void disas_eb(DisasContext
*s
, int op
, int r1
, int r3
, int b2
, int d2
)
1974 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
1975 TCGv_i32 tmp32_1
, tmp32_2
;
1979 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1980 op
, r1
, r3
, b2
, d2
);
1982 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1983 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1984 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1985 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1986 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1988 tmp
= get_address(s
, 0, b2
, d2
);
1989 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1991 tmp
= tcg_const_i64(d2
& 0x3f);
1995 tcg_gen_shr_i64(regs
[r1
], regs
[r3
], tmp
);
1998 tcg_gen_shl_i64(regs
[r1
], regs
[r3
], tmp
);
2001 tcg_gen_sar_i64(regs
[r1
], regs
[r3
], tmp
);
2004 tmp2
= tcg_temp_new_i64();
2005 tmp3
= tcg_temp_new_i64();
2006 gen_op_update2_cc_i64(s
, CC_OP_SLAG
, regs
[r3
], tmp
);
2007 tcg_gen_shl_i64(tmp2
, regs
[r3
], tmp
);
2008 /* override sign bit with source sign */
2009 tcg_gen_andi_i64(tmp2
, tmp2
, ~0x8000000000000000ULL
);
2010 tcg_gen_andi_i64(tmp3
, regs
[r3
], 0x8000000000000000ULL
);
2011 tcg_gen_or_i64(regs
[r1
], tmp2
, tmp3
);
2012 tcg_temp_free_i64(tmp2
);
2013 tcg_temp_free_i64(tmp3
);
2016 tcg_gen_rotl_i64(regs
[r1
], regs
[r3
], tmp
);
2023 set_cc_s64(s
, regs
[r1
]);
2025 tcg_temp_free_i64(tmp
);
2027 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2029 tmp
= get_address(s
, 0, b2
, d2
);
2030 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
2032 tmp
= tcg_const_i64(d2
& 0x3f);
2034 tmp32_1
= tcg_temp_new_i32();
2035 tmp32_2
= load_reg32(r3
);
2036 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2039 tcg_gen_rotl_i32(tmp32_1
, tmp32_2
, tmp32_1
);
2045 store_reg32(r1
, tmp32_1
);
2046 tcg_temp_free_i64(tmp
);
2047 tcg_temp_free_i32(tmp32_1
);
2048 tcg_temp_free_i32(tmp32_2
);
2050 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2051 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2054 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2055 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2058 /* Apparently, unrolling lmg/stmg of any size gains performance -
2059 even for very long ones... */
2060 tmp
= get_address(s
, 0, b2
, d2
);
2061 tmp3
= tcg_const_i64(stm_len
);
2062 tmp4
= tcg_const_i64(op
== 0x26 ? 32 : 4);
2063 for (i
= r1
;; i
= (i
+ 1) % 16) {
2066 tcg_gen_qemu_ld64(regs
[i
], tmp
, get_mem_index(s
));
2069 tmp2
= tcg_temp_new_i64();
2070 #if HOST_LONG_BITS == 32
2071 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2072 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs
[i
]), tmp2
);
2074 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2075 tcg_gen_shl_i64(tmp2
, tmp2
, tmp4
);
2076 tcg_gen_ext32u_i64(regs
[i
], regs
[i
]);
2077 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
2079 tcg_temp_free_i64(tmp2
);
2082 tcg_gen_qemu_st64(regs
[i
], tmp
, get_mem_index(s
));
2085 tmp2
= tcg_temp_new_i64();
2086 tcg_gen_shr_i64(tmp2
, regs
[i
], tmp4
);
2087 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2088 tcg_temp_free_i64(tmp2
);
2096 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
2098 tcg_temp_free_i64(tmp
);
2099 tcg_temp_free_i64(tmp3
);
2100 tcg_temp_free_i64(tmp4
);
2102 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2103 tmp
= get_address(s
, 0, b2
, d2
);
2104 tmp32_1
= tcg_const_i32(r1
);
2105 tmp32_2
= tcg_const_i32(r3
);
2106 potential_page_fault(s
);
2107 gen_helper_stcmh(tmp32_1
, tmp
, tmp32_2
);
2108 tcg_temp_free_i64(tmp
);
2109 tcg_temp_free_i32(tmp32_1
);
2110 tcg_temp_free_i32(tmp32_2
);
2112 #ifndef CONFIG_USER_ONLY
2113 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2115 check_privileged(s
, ilc
);
2116 tmp
= get_address(s
, 0, b2
, d2
);
2117 tmp32_1
= tcg_const_i32(r1
);
2118 tmp32_2
= tcg_const_i32(r3
);
2119 potential_page_fault(s
);
2120 gen_helper_lctlg(tmp32_1
, tmp
, tmp32_2
);
2121 tcg_temp_free_i64(tmp
);
2122 tcg_temp_free_i32(tmp32_1
);
2123 tcg_temp_free_i32(tmp32_2
);
2125 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2127 check_privileged(s
, ilc
);
2128 tmp
= get_address(s
, 0, b2
, d2
);
2129 tmp32_1
= tcg_const_i32(r1
);
2130 tmp32_2
= tcg_const_i32(r3
);
2131 potential_page_fault(s
);
2132 gen_helper_stctg(tmp32_1
, tmp
, tmp32_2
);
2133 tcg_temp_free_i64(tmp
);
2134 tcg_temp_free_i32(tmp32_1
);
2135 tcg_temp_free_i32(tmp32_2
);
2138 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2139 tmp
= get_address(s
, 0, b2
, d2
);
2140 tmp32_1
= tcg_const_i32(r1
);
2141 tmp32_2
= tcg_const_i32(r3
);
2142 potential_page_fault(s
);
2143 /* XXX rewrite in tcg */
2144 gen_helper_csg(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2146 tcg_temp_free_i64(tmp
);
2147 tcg_temp_free_i32(tmp32_1
);
2148 tcg_temp_free_i32(tmp32_2
);
2150 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2151 tmp
= get_address(s
, 0, b2
, d2
);
2152 tmp32_1
= tcg_const_i32(r1
);
2153 tmp32_2
= tcg_const_i32(r3
);
2154 potential_page_fault(s
);
2155 /* XXX rewrite in tcg */
2156 gen_helper_cdsg(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2158 tcg_temp_free_i64(tmp
);
2159 tcg_temp_free_i32(tmp32_1
);
2160 tcg_temp_free_i32(tmp32_2
);
2162 case 0x51: /* TMY D1(B1),I2 [SIY] */
2163 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2164 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2165 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
2166 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2167 that incurs less conversions */
2168 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
2169 tcg_temp_free_i64(tmp
);
2170 tcg_temp_free_i64(tmp2
);
2172 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2173 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2174 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2175 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
2176 tcg_temp_free_i64(tmp
);
2177 tcg_temp_free_i64(tmp2
);
2179 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2180 tmp3
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the 1st operand */
2181 tmp
= tcg_temp_new_i64();
2182 tmp32_1
= tcg_temp_new_i32();
2183 tcg_gen_qemu_ld8u(tmp
, tmp3
, get_mem_index(s
));
2184 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2185 cmp_u32c(s
, tmp32_1
, (r1
<< 4) | r3
);
2186 tcg_temp_free_i64(tmp
);
2187 tcg_temp_free_i64(tmp3
);
2188 tcg_temp_free_i32(tmp32_1
);
2190 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2191 tmp
= get_address(s
, 0, b2
, d2
);
2192 tmp32_1
= tcg_const_i32(r1
);
2193 tmp32_2
= tcg_const_i32(r3
);
2194 potential_page_fault(s
);
2195 /* XXX split CC calculation out */
2196 gen_helper_icmh(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2198 tcg_temp_free_i64(tmp
);
2199 tcg_temp_free_i32(tmp32_1
);
2200 tcg_temp_free_i32(tmp32_2
);
2203 LOG_DISAS("illegal eb operation 0x%x\n", op
);
2204 gen_illegal_opcode(s
, ilc
);
2209 static void disas_ed(DisasContext
*s
, int op
, int r1
, int x2
, int b2
, int d2
,
2212 TCGv_i32 tmp_r1
, tmp32
;
2214 addr
= get_address(s
, x2
, b2
, d2
);
2215 tmp_r1
= tcg_const_i32(r1
);
2217 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2218 potential_page_fault(s
);
2219 gen_helper_lxdb(tmp_r1
, addr
);
2221 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2222 tmp
= tcg_temp_new_i64();
2223 tmp32
= load_freg32(r1
);
2224 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2225 set_cc_cmp_f32_i64(s
, tmp32
, tmp
);
2226 tcg_temp_free_i64(tmp
);
2227 tcg_temp_free_i32(tmp32
);
2229 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2230 tmp
= tcg_temp_new_i64();
2231 tmp32
= tcg_temp_new_i32();
2232 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2233 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2234 gen_helper_aeb(tmp_r1
, tmp32
);
2235 tcg_temp_free_i64(tmp
);
2236 tcg_temp_free_i32(tmp32
);
2238 tmp32
= load_freg32(r1
);
2239 set_cc_nz_f32(s
, tmp32
);
2240 tcg_temp_free_i32(tmp32
);
2242 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2243 tmp
= tcg_temp_new_i64();
2244 tmp32
= tcg_temp_new_i32();
2245 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2246 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2247 gen_helper_seb(tmp_r1
, tmp32
);
2248 tcg_temp_free_i64(tmp
);
2249 tcg_temp_free_i32(tmp32
);
2251 tmp32
= load_freg32(r1
);
2252 set_cc_nz_f32(s
, tmp32
);
2253 tcg_temp_free_i32(tmp32
);
2255 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2256 tmp
= tcg_temp_new_i64();
2257 tmp32
= tcg_temp_new_i32();
2258 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2259 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2260 gen_helper_deb(tmp_r1
, tmp32
);
2261 tcg_temp_free_i64(tmp
);
2262 tcg_temp_free_i32(tmp32
);
2264 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2265 potential_page_fault(s
);
2266 gen_helper_tceb(cc_op
, tmp_r1
, addr
);
2269 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2270 potential_page_fault(s
);
2271 gen_helper_tcdb(cc_op
, tmp_r1
, addr
);
2274 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2275 potential_page_fault(s
);
2276 gen_helper_tcxb(cc_op
, tmp_r1
, addr
);
2279 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2280 tmp
= tcg_temp_new_i64();
2281 tmp32
= tcg_temp_new_i32();
2282 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2283 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2284 gen_helper_meeb(tmp_r1
, tmp32
);
2285 tcg_temp_free_i64(tmp
);
2286 tcg_temp_free_i32(tmp32
);
2288 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2289 potential_page_fault(s
);
2290 gen_helper_cdb(cc_op
, tmp_r1
, addr
);
2293 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2294 potential_page_fault(s
);
2295 gen_helper_adb(cc_op
, tmp_r1
, addr
);
2298 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2299 potential_page_fault(s
);
2300 gen_helper_sdb(cc_op
, tmp_r1
, addr
);
2303 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2304 potential_page_fault(s
);
2305 gen_helper_mdb(tmp_r1
, addr
);
2307 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2308 potential_page_fault(s
);
2309 gen_helper_ddb(tmp_r1
, addr
);
2311 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2312 /* for RXF insns, r1 is R3 and r1b is R1 */
2313 tmp32
= tcg_const_i32(r1b
);
2314 potential_page_fault(s
);
2315 gen_helper_madb(tmp32
, addr
, tmp_r1
);
2316 tcg_temp_free_i32(tmp32
);
2319 LOG_DISAS("illegal ed operation 0x%x\n", op
);
2320 gen_illegal_opcode(s
, 3);
2323 tcg_temp_free_i32(tmp_r1
);
2324 tcg_temp_free_i64(addr
);
2327 static void disas_a5(DisasContext
*s
, int op
, int r1
, int i2
)
2331 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2333 case 0x0: /* IIHH R1,I2 [RI] */
2334 tmp
= tcg_const_i64(i2
);
2335 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 48, 16);
2336 tcg_temp_free_i64(tmp
);
2338 case 0x1: /* IIHL R1,I2 [RI] */
2339 tmp
= tcg_const_i64(i2
);
2340 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 32, 16);
2341 tcg_temp_free_i64(tmp
);
2343 case 0x2: /* IILH R1,I2 [RI] */
2344 tmp
= tcg_const_i64(i2
);
2345 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 16, 16);
2346 tcg_temp_free_i64(tmp
);
2348 case 0x3: /* IILL R1,I2 [RI] */
2349 tmp
= tcg_const_i64(i2
);
2350 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 0, 16);
2351 tcg_temp_free_i64(tmp
);
2353 case 0x4: /* NIHH R1,I2 [RI] */
2354 case 0x8: /* OIHH R1,I2 [RI] */
2356 tmp32
= tcg_temp_new_i32();
2359 tmp2
= tcg_const_i64((((uint64_t)i2
) << 48)
2360 | 0x0000ffffffffffffULL
);
2361 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2364 tmp2
= tcg_const_i64(((uint64_t)i2
) << 48);
2365 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2371 tcg_gen_shri_i64(tmp2
, tmp
, 48);
2372 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2373 set_cc_nz_u32(s
, tmp32
);
2374 tcg_temp_free_i64(tmp2
);
2375 tcg_temp_free_i32(tmp32
);
2376 tcg_temp_free_i64(tmp
);
2378 case 0x5: /* NIHL R1,I2 [RI] */
2379 case 0x9: /* OIHL R1,I2 [RI] */
2381 tmp32
= tcg_temp_new_i32();
2384 tmp2
= tcg_const_i64((((uint64_t)i2
) << 32)
2385 | 0xffff0000ffffffffULL
);
2386 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2389 tmp2
= tcg_const_i64(((uint64_t)i2
) << 32);
2390 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2396 tcg_gen_shri_i64(tmp2
, tmp
, 32);
2397 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2398 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2399 set_cc_nz_u32(s
, tmp32
);
2400 tcg_temp_free_i64(tmp2
);
2401 tcg_temp_free_i32(tmp32
);
2402 tcg_temp_free_i64(tmp
);
2404 case 0x6: /* NILH R1,I2 [RI] */
2405 case 0xa: /* OILH R1,I2 [RI] */
2407 tmp32
= tcg_temp_new_i32();
2410 tmp2
= tcg_const_i64((((uint64_t)i2
) << 16)
2411 | 0xffffffff0000ffffULL
);
2412 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2415 tmp2
= tcg_const_i64(((uint64_t)i2
) << 16);
2416 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2422 tcg_gen_shri_i64(tmp
, tmp
, 16);
2423 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2424 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2425 set_cc_nz_u32(s
, tmp32
);
2426 tcg_temp_free_i64(tmp2
);
2427 tcg_temp_free_i32(tmp32
);
2428 tcg_temp_free_i64(tmp
);
2430 case 0x7: /* NILL R1,I2 [RI] */
2431 case 0xb: /* OILL R1,I2 [RI] */
2433 tmp32
= tcg_temp_new_i32();
2436 tmp2
= tcg_const_i64(i2
| 0xffffffffffff0000ULL
);
2437 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2440 tmp2
= tcg_const_i64(i2
);
2441 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2447 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2448 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2449 set_cc_nz_u32(s
, tmp32
); /* signedness should not matter here */
2450 tcg_temp_free_i64(tmp2
);
2451 tcg_temp_free_i32(tmp32
);
2452 tcg_temp_free_i64(tmp
);
2454 case 0xc: /* LLIHH R1,I2 [RI] */
2455 tmp
= tcg_const_i64( ((uint64_t)i2
) << 48 );
2457 tcg_temp_free_i64(tmp
);
2459 case 0xd: /* LLIHL R1,I2 [RI] */
2460 tmp
= tcg_const_i64( ((uint64_t)i2
) << 32 );
2462 tcg_temp_free_i64(tmp
);
2464 case 0xe: /* LLILH R1,I2 [RI] */
2465 tmp
= tcg_const_i64( ((uint64_t)i2
) << 16 );
2467 tcg_temp_free_i64(tmp
);
2469 case 0xf: /* LLILL R1,I2 [RI] */
2470 tmp
= tcg_const_i64(i2
);
2472 tcg_temp_free_i64(tmp
);
2475 LOG_DISAS("illegal a5 operation 0x%x\n", op
);
2476 gen_illegal_opcode(s
, 2);
2481 static void disas_a7(DisasContext
*s
, int op
, int r1
, int i2
)
2484 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2487 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2489 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2490 case 0x1: /* TMLL or TML R1,I2 [RI] */
2491 case 0x2: /* TMHH R1,I2 [RI] */
2492 case 0x3: /* TMHL R1,I2 [RI] */
2494 tmp2
= tcg_const_i64((uint16_t)i2
);
2497 tcg_gen_shri_i64(tmp
, tmp
, 16);
2502 tcg_gen_shri_i64(tmp
, tmp
, 48);
2505 tcg_gen_shri_i64(tmp
, tmp
, 32);
2508 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
2509 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_64
);
2510 tcg_temp_free_i64(tmp
);
2511 tcg_temp_free_i64(tmp2
);
2513 case 0x4: /* brc m1, i2 */
2514 gen_brc(r1
, s
, i2
* 2LL);
2516 case 0x5: /* BRAS R1,I2 [RI] */
2517 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
2519 tcg_temp_free_i64(tmp
);
2520 gen_goto_tb(s
, 0, s
->pc
+ i2
* 2LL);
2521 s
->is_jmp
= DISAS_TB_JUMP
;
2523 case 0x6: /* BRCT R1,I2 [RI] */
2524 tmp32_1
= load_reg32(r1
);
2525 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
2526 store_reg32(r1
, tmp32_1
);
2527 gen_update_cc_op(s
);
2528 l1
= gen_new_label();
2529 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
2530 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2532 gen_goto_tb(s
, 1, s
->pc
+ 4);
2533 s
->is_jmp
= DISAS_TB_JUMP
;
2534 tcg_temp_free_i32(tmp32_1
);
2536 case 0x7: /* BRCTG R1,I2 [RI] */
2538 tcg_gen_subi_i64(tmp
, tmp
, 1);
2540 gen_update_cc_op(s
);
2541 l1
= gen_new_label();
2542 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
2543 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2545 gen_goto_tb(s
, 1, s
->pc
+ 4);
2546 s
->is_jmp
= DISAS_TB_JUMP
;
2547 tcg_temp_free_i64(tmp
);
2549 case 0x8: /* lhi r1, i2 */
2550 tmp32_1
= tcg_const_i32(i2
);
2551 store_reg32(r1
, tmp32_1
);
2552 tcg_temp_free_i32(tmp32_1
);
2554 case 0x9: /* lghi r1, i2 */
2555 tmp
= tcg_const_i64(i2
);
2557 tcg_temp_free_i64(tmp
);
2559 case 0xa: /* AHI R1,I2 [RI] */
2560 tmp32_1
= load_reg32(r1
);
2561 tmp32_2
= tcg_temp_new_i32();
2562 tmp32_3
= tcg_const_i32(i2
);
2565 tcg_gen_subi_i32(tmp32_2
, tmp32_1
, -i2
);
2567 tcg_gen_add_i32(tmp32_2
, tmp32_1
, tmp32_3
);
2570 store_reg32(r1
, tmp32_2
);
2571 set_cc_add32(s
, tmp32_1
, tmp32_3
, tmp32_2
);
2572 tcg_temp_free_i32(tmp32_1
);
2573 tcg_temp_free_i32(tmp32_2
);
2574 tcg_temp_free_i32(tmp32_3
);
2576 case 0xb: /* aghi r1, i2 */
2578 tmp2
= tcg_const_i64(i2
);
2581 tcg_gen_subi_i64(regs
[r1
], tmp
, -i2
);
2583 tcg_gen_add_i64(regs
[r1
], tmp
, tmp2
);
2585 set_cc_add64(s
, tmp
, tmp2
, regs
[r1
]);
2586 tcg_temp_free_i64(tmp
);
2587 tcg_temp_free_i64(tmp2
);
2589 case 0xc: /* MHI R1,I2 [RI] */
2590 tmp32_1
= load_reg32(r1
);
2591 tcg_gen_muli_i32(tmp32_1
, tmp32_1
, i2
);
2592 store_reg32(r1
, tmp32_1
);
2593 tcg_temp_free_i32(tmp32_1
);
2595 case 0xd: /* MGHI R1,I2 [RI] */
2597 tcg_gen_muli_i64(tmp
, tmp
, i2
);
2599 tcg_temp_free_i64(tmp
);
2601 case 0xe: /* CHI R1,I2 [RI] */
2602 tmp32_1
= load_reg32(r1
);
2603 cmp_s32c(s
, tmp32_1
, i2
);
2604 tcg_temp_free_i32(tmp32_1
);
2606 case 0xf: /* CGHI R1,I2 [RI] */
2608 cmp_s64c(s
, tmp
, i2
);
2609 tcg_temp_free_i64(tmp
);
2612 LOG_DISAS("illegal a7 operation 0x%x\n", op
);
2613 gen_illegal_opcode(s
, 2);
2618 static void disas_b2(DisasContext
*s
, int op
, uint32_t insn
)
2620 TCGv_i64 tmp
, tmp2
, tmp3
;
2621 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2624 #ifndef CONFIG_USER_ONLY
2628 r1
= (insn
>> 4) & 0xf;
2631 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2634 case 0x22: /* IPM R1 [RRE] */
2635 tmp32_1
= tcg_const_i32(r1
);
2637 gen_helper_ipm(cc_op
, tmp32_1
);
2638 tcg_temp_free_i32(tmp32_1
);
2640 case 0x41: /* CKSM R1,R2 [RRE] */
2641 tmp32_1
= tcg_const_i32(r1
);
2642 tmp32_2
= tcg_const_i32(r2
);
2643 potential_page_fault(s
);
2644 gen_helper_cksm(tmp32_1
, tmp32_2
);
2645 tcg_temp_free_i32(tmp32_1
);
2646 tcg_temp_free_i32(tmp32_2
);
2647 gen_op_movi_cc(s
, 0);
2649 case 0x4e: /* SAR R1,R2 [RRE] */
2650 tmp32_1
= load_reg32(r2
);
2651 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, aregs
[r1
]));
2652 tcg_temp_free_i32(tmp32_1
);
2654 case 0x4f: /* EAR R1,R2 [RRE] */
2655 tmp32_1
= tcg_temp_new_i32();
2656 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, aregs
[r2
]));
2657 store_reg32(r1
, tmp32_1
);
2658 tcg_temp_free_i32(tmp32_1
);
2660 case 0x52: /* MSR R1,R2 [RRE] */
2661 tmp32_1
= load_reg32(r1
);
2662 tmp32_2
= load_reg32(r2
);
2663 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
2664 store_reg32(r1
, tmp32_1
);
2665 tcg_temp_free_i32(tmp32_1
);
2666 tcg_temp_free_i32(tmp32_2
);
2668 case 0x54: /* MVPG R1,R2 [RRE] */
2670 tmp2
= load_reg(r1
);
2671 tmp3
= load_reg(r2
);
2672 potential_page_fault(s
);
2673 gen_helper_mvpg(tmp
, tmp2
, tmp3
);
2674 tcg_temp_free_i64(tmp
);
2675 tcg_temp_free_i64(tmp2
);
2676 tcg_temp_free_i64(tmp3
);
2677 /* XXX check CCO bit and set CC accordingly */
2678 gen_op_movi_cc(s
, 0);
2680 case 0x55: /* MVST R1,R2 [RRE] */
2681 tmp32_1
= load_reg32(0);
2682 tmp32_2
= tcg_const_i32(r1
);
2683 tmp32_3
= tcg_const_i32(r2
);
2684 potential_page_fault(s
);
2685 gen_helper_mvst(tmp32_1
, tmp32_2
, tmp32_3
);
2686 tcg_temp_free_i32(tmp32_1
);
2687 tcg_temp_free_i32(tmp32_2
);
2688 tcg_temp_free_i32(tmp32_3
);
2689 gen_op_movi_cc(s
, 1);
2691 case 0x5d: /* CLST R1,R2 [RRE] */
2692 tmp32_1
= load_reg32(0);
2693 tmp32_2
= tcg_const_i32(r1
);
2694 tmp32_3
= tcg_const_i32(r2
);
2695 potential_page_fault(s
);
2696 gen_helper_clst(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
2698 tcg_temp_free_i32(tmp32_1
);
2699 tcg_temp_free_i32(tmp32_2
);
2700 tcg_temp_free_i32(tmp32_3
);
2702 case 0x5e: /* SRST R1,R2 [RRE] */
2703 tmp32_1
= load_reg32(0);
2704 tmp32_2
= tcg_const_i32(r1
);
2705 tmp32_3
= tcg_const_i32(r2
);
2706 potential_page_fault(s
);
2707 gen_helper_srst(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
2709 tcg_temp_free_i32(tmp32_1
);
2710 tcg_temp_free_i32(tmp32_2
);
2711 tcg_temp_free_i32(tmp32_3
);
2714 #ifndef CONFIG_USER_ONLY
2715 case 0x02: /* STIDP D2(B2) [S] */
2717 check_privileged(s
, ilc
);
2718 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2719 tmp
= get_address(s
, 0, b2
, d2
);
2720 potential_page_fault(s
);
2721 gen_helper_stidp(tmp
);
2722 tcg_temp_free_i64(tmp
);
2724 case 0x04: /* SCK D2(B2) [S] */
2726 check_privileged(s
, ilc
);
2727 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2728 tmp
= get_address(s
, 0, b2
, d2
);
2729 potential_page_fault(s
);
2730 gen_helper_sck(cc_op
, tmp
);
2732 tcg_temp_free_i64(tmp
);
2734 case 0x05: /* STCK D2(B2) [S] */
2736 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2737 tmp
= get_address(s
, 0, b2
, d2
);
2738 potential_page_fault(s
);
2739 gen_helper_stck(cc_op
, tmp
);
2741 tcg_temp_free_i64(tmp
);
2743 case 0x06: /* SCKC D2(B2) [S] */
2744 /* Set Clock Comparator */
2745 check_privileged(s
, ilc
);
2746 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2747 tmp
= get_address(s
, 0, b2
, d2
);
2748 potential_page_fault(s
);
2749 gen_helper_sckc(tmp
);
2750 tcg_temp_free_i64(tmp
);
2752 case 0x07: /* STCKC D2(B2) [S] */
2753 /* Store Clock Comparator */
2754 check_privileged(s
, ilc
);
2755 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2756 tmp
= get_address(s
, 0, b2
, d2
);
2757 potential_page_fault(s
);
2758 gen_helper_stckc(tmp
);
2759 tcg_temp_free_i64(tmp
);
2761 case 0x08: /* SPT D2(B2) [S] */
2763 check_privileged(s
, ilc
);
2764 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2765 tmp
= get_address(s
, 0, b2
, d2
);
2766 potential_page_fault(s
);
2767 gen_helper_spt(tmp
);
2768 tcg_temp_free_i64(tmp
);
2770 case 0x09: /* STPT D2(B2) [S] */
2771 /* Store CPU Timer */
2772 check_privileged(s
, ilc
);
2773 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2774 tmp
= get_address(s
, 0, b2
, d2
);
2775 potential_page_fault(s
);
2776 gen_helper_stpt(tmp
);
2777 tcg_temp_free_i64(tmp
);
2779 case 0x0a: /* SPKA D2(B2) [S] */
2780 /* Set PSW Key from Address */
2781 check_privileged(s
, ilc
);
2782 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2783 tmp
= get_address(s
, 0, b2
, d2
);
2784 tmp2
= tcg_temp_new_i64();
2785 tcg_gen_andi_i64(tmp2
, psw_mask
, ~PSW_MASK_KEY
);
2786 tcg_gen_shli_i64(tmp
, tmp
, PSW_SHIFT_KEY
- 4);
2787 tcg_gen_or_i64(psw_mask
, tmp2
, tmp
);
2788 tcg_temp_free_i64(tmp2
);
2789 tcg_temp_free_i64(tmp
);
2791 case 0x0d: /* PTLB [S] */
2793 check_privileged(s
, ilc
);
2796 case 0x10: /* SPX D2(B2) [S] */
2797 /* Set Prefix Register */
2798 check_privileged(s
, ilc
);
2799 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2800 tmp
= get_address(s
, 0, b2
, d2
);
2801 potential_page_fault(s
);
2802 gen_helper_spx(tmp
);
2803 tcg_temp_free_i64(tmp
);
2805 case 0x11: /* STPX D2(B2) [S] */
2807 check_privileged(s
, ilc
);
2808 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2809 tmp
= get_address(s
, 0, b2
, d2
);
2810 tmp2
= tcg_temp_new_i64();
2811 tcg_gen_ld_i64(tmp2
, cpu_env
, offsetof(CPUState
, psa
));
2812 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2813 tcg_temp_free_i64(tmp
);
2814 tcg_temp_free_i64(tmp2
);
2816 case 0x12: /* STAP D2(B2) [S] */
2817 /* Store CPU Address */
2818 check_privileged(s
, ilc
);
2819 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2820 tmp
= get_address(s
, 0, b2
, d2
);
2821 tmp2
= tcg_temp_new_i64();
2822 tmp32_1
= tcg_temp_new_i32();
2823 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, cpu_num
));
2824 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
2825 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2826 tcg_temp_free_i64(tmp
);
2827 tcg_temp_free_i64(tmp2
);
2828 tcg_temp_free_i32(tmp32_1
);
2830 case 0x21: /* IPTE R1,R2 [RRE] */
2831 /* Invalidate PTE */
2832 check_privileged(s
, ilc
);
2833 r1
= (insn
>> 4) & 0xf;
2836 tmp2
= load_reg(r2
);
2837 gen_helper_ipte(tmp
, tmp2
);
2838 tcg_temp_free_i64(tmp
);
2839 tcg_temp_free_i64(tmp2
);
2841 case 0x29: /* ISKE R1,R2 [RRE] */
2842 /* Insert Storage Key Extended */
2843 check_privileged(s
, ilc
);
2844 r1
= (insn
>> 4) & 0xf;
2847 tmp2
= tcg_temp_new_i64();
2848 gen_helper_iske(tmp2
, tmp
);
2849 store_reg(r1
, tmp2
);
2850 tcg_temp_free_i64(tmp
);
2851 tcg_temp_free_i64(tmp2
);
2853 case 0x2a: /* RRBE R1,R2 [RRE] */
2854 /* Set Storage Key Extended */
2855 check_privileged(s
, ilc
);
2856 r1
= (insn
>> 4) & 0xf;
2858 tmp32_1
= load_reg32(r1
);
2860 gen_helper_rrbe(cc_op
, tmp32_1
, tmp
);
2862 tcg_temp_free_i32(tmp32_1
);
2863 tcg_temp_free_i64(tmp
);
2865 case 0x2b: /* SSKE R1,R2 [RRE] */
2866 /* Set Storage Key Extended */
2867 check_privileged(s
, ilc
);
2868 r1
= (insn
>> 4) & 0xf;
2870 tmp32_1
= load_reg32(r1
);
2872 gen_helper_sske(tmp32_1
, tmp
);
2873 tcg_temp_free_i32(tmp32_1
);
2874 tcg_temp_free_i64(tmp
);
2876 case 0x34: /* STCH ? */
2877 /* Store Subchannel */
2878 check_privileged(s
, ilc
);
2879 gen_op_movi_cc(s
, 3);
2881 case 0x46: /* STURA R1,R2 [RRE] */
2882 /* Store Using Real Address */
2883 check_privileged(s
, ilc
);
2884 r1
= (insn
>> 4) & 0xf;
2886 tmp32_1
= load_reg32(r1
);
2888 potential_page_fault(s
);
2889 gen_helper_stura(tmp
, tmp32_1
);
2890 tcg_temp_free_i32(tmp32_1
);
2891 tcg_temp_free_i64(tmp
);
2893 case 0x50: /* CSP R1,R2 [RRE] */
2894 /* Compare And Swap And Purge */
2895 check_privileged(s
, ilc
);
2896 r1
= (insn
>> 4) & 0xf;
2898 tmp32_1
= tcg_const_i32(r1
);
2899 tmp32_2
= tcg_const_i32(r2
);
2900 gen_helper_csp(cc_op
, tmp32_1
, tmp32_2
);
2902 tcg_temp_free_i32(tmp32_1
);
2903 tcg_temp_free_i32(tmp32_2
);
2905 case 0x5f: /* CHSC ? */
2906 /* Channel Subsystem Call */
2907 check_privileged(s
, ilc
);
2908 gen_op_movi_cc(s
, 3);
2910 case 0x78: /* STCKE D2(B2) [S] */
2911 /* Store Clock Extended */
2912 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2913 tmp
= get_address(s
, 0, b2
, d2
);
2914 potential_page_fault(s
);
2915 gen_helper_stcke(cc_op
, tmp
);
2917 tcg_temp_free_i64(tmp
);
2919 case 0x79: /* SACF D2(B2) [S] */
2920 /* Store Clock Extended */
2921 check_privileged(s
, ilc
);
2922 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2923 tmp
= get_address(s
, 0, b2
, d2
);
2924 potential_page_fault(s
);
2925 gen_helper_sacf(tmp
);
2926 tcg_temp_free_i64(tmp
);
2927 /* addressing mode has changed, so end the block */
2930 s
->is_jmp
= DISAS_EXCP
;
2932 case 0x7d: /* STSI D2,(B2) [S] */
2933 check_privileged(s
, ilc
);
2934 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2935 tmp
= get_address(s
, 0, b2
, d2
);
2936 tmp32_1
= load_reg32(0);
2937 tmp32_2
= load_reg32(1);
2938 potential_page_fault(s
);
2939 gen_helper_stsi(cc_op
, tmp
, tmp32_1
, tmp32_2
);
2941 tcg_temp_free_i64(tmp
);
2942 tcg_temp_free_i32(tmp32_1
);
2943 tcg_temp_free_i32(tmp32_2
);
2945 case 0x9d: /* LFPC D2(B2) [S] */
2946 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2947 tmp
= get_address(s
, 0, b2
, d2
);
2948 tmp2
= tcg_temp_new_i64();
2949 tmp32_1
= tcg_temp_new_i32();
2950 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2951 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
2952 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
2953 tcg_temp_free_i64(tmp
);
2954 tcg_temp_free_i64(tmp2
);
2955 tcg_temp_free_i32(tmp32_1
);
2957 case 0xb1: /* STFL D2(B2) [S] */
2958 /* Store Facility List (CPU features) at 200 */
2959 check_privileged(s
, ilc
);
2960 tmp2
= tcg_const_i64(0xc0000000);
2961 tmp
= tcg_const_i64(200);
2962 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2963 tcg_temp_free_i64(tmp2
);
2964 tcg_temp_free_i64(tmp
);
2966 case 0xb2: /* LPSWE D2(B2) [S] */
2967 /* Load PSW Extended */
2968 check_privileged(s
, ilc
);
2969 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2970 tmp
= get_address(s
, 0, b2
, d2
);
2971 tmp2
= tcg_temp_new_i64();
2972 tmp3
= tcg_temp_new_i64();
2973 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
2974 tcg_gen_addi_i64(tmp
, tmp
, 8);
2975 tcg_gen_qemu_ld64(tmp3
, tmp
, get_mem_index(s
));
2976 gen_helper_load_psw(tmp2
, tmp3
);
2977 /* we need to keep cc_op intact */
2978 s
->is_jmp
= DISAS_JUMP
;
2979 tcg_temp_free_i64(tmp
);
2980 tcg_temp_free_i64(tmp2
);
2981 tcg_temp_free_i64(tmp3
);
2983 case 0x20: /* SERVC R1,R2 [RRE] */
2984 /* SCLP Service call (PV hypercall) */
2985 check_privileged(s
, ilc
);
2986 potential_page_fault(s
);
2987 tmp32_1
= load_reg32(r2
);
2989 gen_helper_servc(cc_op
, tmp32_1
, tmp
);
2991 tcg_temp_free_i32(tmp32_1
);
2992 tcg_temp_free_i64(tmp
);
2996 LOG_DISAS("illegal b2 operation 0x%x\n", op
);
2997 gen_illegal_opcode(s
, ilc
);
3002 static void disas_b3(DisasContext
*s
, int op
, int m3
, int r1
, int r2
)
3005 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3006 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op
, m3
, r1
, r2
);
3007 #define FP_HELPER(i) \
3008 tmp32_1 = tcg_const_i32(r1); \
3009 tmp32_2 = tcg_const_i32(r2); \
3010 gen_helper_ ## i (tmp32_1, tmp32_2); \
3011 tcg_temp_free_i32(tmp32_1); \
3012 tcg_temp_free_i32(tmp32_2);
3014 #define FP_HELPER_CC(i) \
3015 tmp32_1 = tcg_const_i32(r1); \
3016 tmp32_2 = tcg_const_i32(r2); \
3017 gen_helper_ ## i (cc_op, tmp32_1, tmp32_2); \
3019 tcg_temp_free_i32(tmp32_1); \
3020 tcg_temp_free_i32(tmp32_2);
3023 case 0x0: /* LPEBR R1,R2 [RRE] */
3024 FP_HELPER_CC(lpebr
);
3026 case 0x2: /* LTEBR R1,R2 [RRE] */
3027 FP_HELPER_CC(ltebr
);
3029 case 0x3: /* LCEBR R1,R2 [RRE] */
3030 FP_HELPER_CC(lcebr
);
3032 case 0x4: /* LDEBR R1,R2 [RRE] */
3035 case 0x5: /* LXDBR R1,R2 [RRE] */
3038 case 0x9: /* CEBR R1,R2 [RRE] */
3041 case 0xa: /* AEBR R1,R2 [RRE] */
3044 case 0xb: /* SEBR R1,R2 [RRE] */
3047 case 0xd: /* DEBR R1,R2 [RRE] */
3050 case 0x10: /* LPDBR R1,R2 [RRE] */
3051 FP_HELPER_CC(lpdbr
);
3053 case 0x12: /* LTDBR R1,R2 [RRE] */
3054 FP_HELPER_CC(ltdbr
);
3056 case 0x13: /* LCDBR R1,R2 [RRE] */
3057 FP_HELPER_CC(lcdbr
);
3059 case 0x15: /* SQBDR R1,R2 [RRE] */
3062 case 0x17: /* MEEBR R1,R2 [RRE] */
3065 case 0x19: /* CDBR R1,R2 [RRE] */
3068 case 0x1a: /* ADBR R1,R2 [RRE] */
3071 case 0x1b: /* SDBR R1,R2 [RRE] */
3074 case 0x1c: /* MDBR R1,R2 [RRE] */
3077 case 0x1d: /* DDBR R1,R2 [RRE] */
3080 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3081 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3082 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3083 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3084 tmp32_1
= tcg_const_i32(m3
);
3085 tmp32_2
= tcg_const_i32(r2
);
3086 tmp32_3
= tcg_const_i32(r1
);
3089 gen_helper_maebr(tmp32_1
, tmp32_3
, tmp32_2
);
3092 gen_helper_madbr(tmp32_1
, tmp32_3
, tmp32_2
);
3095 gen_helper_msdbr(tmp32_1
, tmp32_3
, tmp32_2
);
3100 tcg_temp_free_i32(tmp32_1
);
3101 tcg_temp_free_i32(tmp32_2
);
3102 tcg_temp_free_i32(tmp32_3
);
3104 case 0x40: /* LPXBR R1,R2 [RRE] */
3105 FP_HELPER_CC(lpxbr
);
3107 case 0x42: /* LTXBR R1,R2 [RRE] */
3108 FP_HELPER_CC(ltxbr
);
3110 case 0x43: /* LCXBR R1,R2 [RRE] */
3111 FP_HELPER_CC(lcxbr
);
3113 case 0x44: /* LEDBR R1,R2 [RRE] */
3116 case 0x45: /* LDXBR R1,R2 [RRE] */
3119 case 0x46: /* LEXBR R1,R2 [RRE] */
3122 case 0x49: /* CXBR R1,R2 [RRE] */
3125 case 0x4a: /* AXBR R1,R2 [RRE] */
3128 case 0x4b: /* SXBR R1,R2 [RRE] */
3131 case 0x4c: /* MXBR R1,R2 [RRE] */
3134 case 0x4d: /* DXBR R1,R2 [RRE] */
3137 case 0x65: /* LXR R1,R2 [RRE] */
3138 tmp
= load_freg(r2
);
3139 store_freg(r1
, tmp
);
3140 tcg_temp_free_i64(tmp
);
3141 tmp
= load_freg(r2
+ 2);
3142 store_freg(r1
+ 2, tmp
);
3143 tcg_temp_free_i64(tmp
);
3145 case 0x74: /* LZER R1 [RRE] */
3146 tmp32_1
= tcg_const_i32(r1
);
3147 gen_helper_lzer(tmp32_1
);
3148 tcg_temp_free_i32(tmp32_1
);
3150 case 0x75: /* LZDR R1 [RRE] */
3151 tmp32_1
= tcg_const_i32(r1
);
3152 gen_helper_lzdr(tmp32_1
);
3153 tcg_temp_free_i32(tmp32_1
);
3155 case 0x76: /* LZXR R1 [RRE] */
3156 tmp32_1
= tcg_const_i32(r1
);
3157 gen_helper_lzxr(tmp32_1
);
3158 tcg_temp_free_i32(tmp32_1
);
3160 case 0x84: /* SFPC R1 [RRE] */
3161 tmp32_1
= load_reg32(r1
);
3162 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
3163 tcg_temp_free_i32(tmp32_1
);
3165 case 0x8c: /* EFPC R1 [RRE] */
3166 tmp32_1
= tcg_temp_new_i32();
3167 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
3168 store_reg32(r1
, tmp32_1
);
3169 tcg_temp_free_i32(tmp32_1
);
3171 case 0x94: /* CEFBR R1,R2 [RRE] */
3172 case 0x95: /* CDFBR R1,R2 [RRE] */
3173 case 0x96: /* CXFBR R1,R2 [RRE] */
3174 tmp32_1
= tcg_const_i32(r1
);
3175 tmp32_2
= load_reg32(r2
);
3178 gen_helper_cefbr(tmp32_1
, tmp32_2
);
3181 gen_helper_cdfbr(tmp32_1
, tmp32_2
);
3184 gen_helper_cxfbr(tmp32_1
, tmp32_2
);
3189 tcg_temp_free_i32(tmp32_1
);
3190 tcg_temp_free_i32(tmp32_2
);
3192 case 0x98: /* CFEBR R1,R2 [RRE] */
3193 case 0x99: /* CFDBR R1,R2 [RRE] */
3194 case 0x9a: /* CFXBR R1,R2 [RRE] */
3195 tmp32_1
= tcg_const_i32(r1
);
3196 tmp32_2
= tcg_const_i32(r2
);
3197 tmp32_3
= tcg_const_i32(m3
);
3200 gen_helper_cfebr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3203 gen_helper_cfdbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3206 gen_helper_cfxbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3212 tcg_temp_free_i32(tmp32_1
);
3213 tcg_temp_free_i32(tmp32_2
);
3214 tcg_temp_free_i32(tmp32_3
);
3216 case 0xa4: /* CEGBR R1,R2 [RRE] */
3217 case 0xa5: /* CDGBR R1,R2 [RRE] */
3218 tmp32_1
= tcg_const_i32(r1
);
3222 gen_helper_cegbr(tmp32_1
, tmp
);
3225 gen_helper_cdgbr(tmp32_1
, tmp
);
3230 tcg_temp_free_i32(tmp32_1
);
3231 tcg_temp_free_i64(tmp
);
3233 case 0xa6: /* CXGBR R1,R2 [RRE] */
3234 tmp32_1
= tcg_const_i32(r1
);
3236 gen_helper_cxgbr(tmp32_1
, tmp
);
3237 tcg_temp_free_i32(tmp32_1
);
3238 tcg_temp_free_i64(tmp
);
3240 case 0xa8: /* CGEBR R1,R2 [RRE] */
3241 tmp32_1
= tcg_const_i32(r1
);
3242 tmp32_2
= tcg_const_i32(r2
);
3243 tmp32_3
= tcg_const_i32(m3
);
3244 gen_helper_cgebr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3246 tcg_temp_free_i32(tmp32_1
);
3247 tcg_temp_free_i32(tmp32_2
);
3248 tcg_temp_free_i32(tmp32_3
);
3250 case 0xa9: /* CGDBR R1,R2 [RRE] */
3251 tmp32_1
= tcg_const_i32(r1
);
3252 tmp32_2
= tcg_const_i32(r2
);
3253 tmp32_3
= tcg_const_i32(m3
);
3254 gen_helper_cgdbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3256 tcg_temp_free_i32(tmp32_1
);
3257 tcg_temp_free_i32(tmp32_2
);
3258 tcg_temp_free_i32(tmp32_3
);
3260 case 0xaa: /* CGXBR R1,R2 [RRE] */
3261 tmp32_1
= tcg_const_i32(r1
);
3262 tmp32_2
= tcg_const_i32(r2
);
3263 tmp32_3
= tcg_const_i32(m3
);
3264 gen_helper_cgxbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3266 tcg_temp_free_i32(tmp32_1
);
3267 tcg_temp_free_i32(tmp32_2
);
3268 tcg_temp_free_i32(tmp32_3
);
3271 LOG_DISAS("illegal b3 operation 0x%x\n", op
);
3272 gen_illegal_opcode(s
, 2);
3280 static void disas_b9(DisasContext
*s
, int op
, int r1
, int r2
)
3282 TCGv_i64 tmp
, tmp2
, tmp3
;
3283 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3285 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
3287 case 0x0: /* LPGR R1,R2 [RRE] */
3288 case 0x1: /* LNGR R1,R2 [RRE] */
3289 case 0x2: /* LTGR R1,R2 [RRE] */
3290 case 0x3: /* LCGR R1,R2 [RRE] */
3291 case 0x10: /* LPGFR R1,R2 [RRE] */
3292 case 0x11: /* LNFGR R1,R2 [RRE] */
3293 case 0x12: /* LTGFR R1,R2 [RRE] */
3294 case 0x13: /* LCGFR R1,R2 [RRE] */
3296 tmp
= load_reg32_i64(r2
);
3301 case 0x0: /* LP?GR */
3302 set_cc_abs64(s
, tmp
);
3303 gen_helper_abs_i64(tmp
, tmp
);
3306 case 0x1: /* LN?GR */
3307 set_cc_nabs64(s
, tmp
);
3308 gen_helper_nabs_i64(tmp
, tmp
);
3311 case 0x2: /* LT?GR */
3317 case 0x3: /* LC?GR */
3318 tcg_gen_neg_i64(regs
[r1
], tmp
);
3319 set_cc_comp64(s
, regs
[r1
]);
3322 tcg_temp_free_i64(tmp
);
3324 case 0x4: /* LGR R1,R2 [RRE] */
3325 store_reg(r1
, regs
[r2
]);
3327 case 0x6: /* LGBR R1,R2 [RRE] */
3328 tmp2
= load_reg(r2
);
3329 tcg_gen_ext8s_i64(tmp2
, tmp2
);
3330 store_reg(r1
, tmp2
);
3331 tcg_temp_free_i64(tmp2
);
3333 case 0x8: /* AGR R1,R2 [RRE] */
3334 case 0xa: /* ALGR R1,R2 [RRE] */
3336 tmp2
= load_reg(r2
);
3337 tmp3
= tcg_temp_new_i64();
3338 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3339 store_reg(r1
, tmp3
);
3342 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3345 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3350 tcg_temp_free_i64(tmp
);
3351 tcg_temp_free_i64(tmp2
);
3352 tcg_temp_free_i64(tmp3
);
3354 case 0x9: /* SGR R1,R2 [RRE] */
3355 case 0xb: /* SLGR R1,R2 [RRE] */
3356 case 0x1b: /* SLGFR R1,R2 [RRE] */
3357 case 0x19: /* SGFR R1,R2 [RRE] */
3361 tmp32_1
= load_reg32(r2
);
3362 tmp2
= tcg_temp_new_i64();
3363 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3364 tcg_temp_free_i32(tmp32_1
);
3367 tmp32_1
= load_reg32(r2
);
3368 tmp2
= tcg_temp_new_i64();
3369 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3370 tcg_temp_free_i32(tmp32_1
);
3373 tmp2
= load_reg(r2
);
3376 tmp3
= tcg_temp_new_i64();
3377 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3378 store_reg(r1
, tmp3
);
3382 set_cc_sub64(s
, tmp
, tmp2
, tmp3
);
3386 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3391 tcg_temp_free_i64(tmp
);
3392 tcg_temp_free_i64(tmp2
);
3393 tcg_temp_free_i64(tmp3
);
3395 case 0xc: /* MSGR R1,R2 [RRE] */
3396 case 0x1c: /* MSGFR R1,R2 [RRE] */
3398 tmp2
= load_reg(r2
);
3400 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3402 tcg_gen_mul_i64(tmp
, tmp
, tmp2
);
3404 tcg_temp_free_i64(tmp
);
3405 tcg_temp_free_i64(tmp2
);
3407 case 0xd: /* DSGR R1,R2 [RRE] */
3408 case 0x1d: /* DSGFR R1,R2 [RRE] */
3409 tmp
= load_reg(r1
+ 1);
3411 tmp2
= load_reg(r2
);
3413 tmp32_1
= load_reg32(r2
);
3414 tmp2
= tcg_temp_new_i64();
3415 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3416 tcg_temp_free_i32(tmp32_1
);
3418 tmp3
= tcg_temp_new_i64();
3419 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3420 store_reg(r1
+ 1, tmp3
);
3421 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3422 store_reg(r1
, tmp3
);
3423 tcg_temp_free_i64(tmp
);
3424 tcg_temp_free_i64(tmp2
);
3425 tcg_temp_free_i64(tmp3
);
3427 case 0x14: /* LGFR R1,R2 [RRE] */
3428 tmp32_1
= load_reg32(r2
);
3429 tmp
= tcg_temp_new_i64();
3430 tcg_gen_ext_i32_i64(tmp
, tmp32_1
);
3432 tcg_temp_free_i32(tmp32_1
);
3433 tcg_temp_free_i64(tmp
);
3435 case 0x16: /* LLGFR R1,R2 [RRE] */
3436 tmp32_1
= load_reg32(r2
);
3437 tmp
= tcg_temp_new_i64();
3438 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3440 tcg_temp_free_i32(tmp32_1
);
3441 tcg_temp_free_i64(tmp
);
3443 case 0x17: /* LLGTR R1,R2 [RRE] */
3444 tmp32_1
= load_reg32(r2
);
3445 tmp
= tcg_temp_new_i64();
3446 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0x7fffffffUL
);
3447 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3449 tcg_temp_free_i32(tmp32_1
);
3450 tcg_temp_free_i64(tmp
);
3452 case 0x18: /* AGFR R1,R2 [RRE] */
3453 case 0x1a: /* ALGFR R1,R2 [RRE] */
3454 tmp32_1
= load_reg32(r2
);
3455 tmp2
= tcg_temp_new_i64();
3457 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3459 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3461 tcg_temp_free_i32(tmp32_1
);
3463 tmp3
= tcg_temp_new_i64();
3464 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3465 store_reg(r1
, tmp3
);
3467 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3469 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3471 tcg_temp_free_i64(tmp
);
3472 tcg_temp_free_i64(tmp2
);
3473 tcg_temp_free_i64(tmp3
);
3475 case 0x0f: /* LRVGR R1,R2 [RRE] */
3476 tcg_gen_bswap64_i64(regs
[r1
], regs
[r2
]);
3478 case 0x1f: /* LRVR R1,R2 [RRE] */
3479 tmp32_1
= load_reg32(r2
);
3480 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
3481 store_reg32(r1
, tmp32_1
);
3482 tcg_temp_free_i32(tmp32_1
);
3484 case 0x20: /* CGR R1,R2 [RRE] */
3485 case 0x30: /* CGFR R1,R2 [RRE] */
3486 tmp2
= load_reg(r2
);
3488 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3491 cmp_s64(s
, tmp
, tmp2
);
3492 tcg_temp_free_i64(tmp
);
3493 tcg_temp_free_i64(tmp2
);
3495 case 0x21: /* CLGR R1,R2 [RRE] */
3496 case 0x31: /* CLGFR R1,R2 [RRE] */
3497 tmp2
= load_reg(r2
);
3499 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3502 cmp_u64(s
, tmp
, tmp2
);
3503 tcg_temp_free_i64(tmp
);
3504 tcg_temp_free_i64(tmp2
);
3506 case 0x26: /* LBR R1,R2 [RRE] */
3507 tmp32_1
= load_reg32(r2
);
3508 tcg_gen_ext8s_i32(tmp32_1
, tmp32_1
);
3509 store_reg32(r1
, tmp32_1
);
3510 tcg_temp_free_i32(tmp32_1
);
3512 case 0x27: /* LHR R1,R2 [RRE] */
3513 tmp32_1
= load_reg32(r2
);
3514 tcg_gen_ext16s_i32(tmp32_1
, tmp32_1
);
3515 store_reg32(r1
, tmp32_1
);
3516 tcg_temp_free_i32(tmp32_1
);
3518 case 0x80: /* NGR R1,R2 [RRE] */
3519 case 0x81: /* OGR R1,R2 [RRE] */
3520 case 0x82: /* XGR R1,R2 [RRE] */
3522 tmp2
= load_reg(r2
);
3525 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
3528 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3531 tcg_gen_xor_i64(tmp
, tmp
, tmp2
);
3537 set_cc_nz_u64(s
, tmp
);
3538 tcg_temp_free_i64(tmp
);
3539 tcg_temp_free_i64(tmp2
);
3541 case 0x83: /* FLOGR R1,R2 [RRE] */
3543 tmp32_1
= tcg_const_i32(r1
);
3544 gen_helper_flogr(cc_op
, tmp32_1
, tmp
);
3546 tcg_temp_free_i64(tmp
);
3547 tcg_temp_free_i32(tmp32_1
);
3549 case 0x84: /* LLGCR R1,R2 [RRE] */
3551 tcg_gen_andi_i64(tmp
, tmp
, 0xff);
3553 tcg_temp_free_i64(tmp
);
3555 case 0x85: /* LLGHR R1,R2 [RRE] */
3557 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
3559 tcg_temp_free_i64(tmp
);
3561 case 0x87: /* DLGR R1,R2 [RRE] */
3562 tmp32_1
= tcg_const_i32(r1
);
3564 gen_helper_dlg(tmp32_1
, tmp
);
3565 tcg_temp_free_i64(tmp
);
3566 tcg_temp_free_i32(tmp32_1
);
3568 case 0x88: /* ALCGR R1,R2 [RRE] */
3570 tmp2
= load_reg(r2
);
3571 tmp3
= tcg_temp_new_i64();
3573 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
3574 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
3575 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
3576 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
3577 tcg_gen_add_i64(tmp3
, tmp
, tmp3
);
3578 store_reg(r1
, tmp3
);
3579 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3580 tcg_temp_free_i64(tmp
);
3581 tcg_temp_free_i64(tmp2
);
3582 tcg_temp_free_i64(tmp3
);
3584 case 0x89: /* SLBGR R1,R2 [RRE] */
3586 tmp2
= load_reg(r2
);
3587 tmp32_1
= tcg_const_i32(r1
);
3589 gen_helper_slbg(cc_op
, cc_op
, tmp32_1
, tmp
, tmp2
);
3591 tcg_temp_free_i64(tmp
);
3592 tcg_temp_free_i64(tmp2
);
3593 tcg_temp_free_i32(tmp32_1
);
3595 case 0x94: /* LLCR R1,R2 [RRE] */
3596 tmp32_1
= load_reg32(r2
);
3597 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xff);
3598 store_reg32(r1
, tmp32_1
);
3599 tcg_temp_free_i32(tmp32_1
);
3601 case 0x95: /* LLHR R1,R2 [RRE] */
3602 tmp32_1
= load_reg32(r2
);
3603 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xffff);
3604 store_reg32(r1
, tmp32_1
);
3605 tcg_temp_free_i32(tmp32_1
);
3607 case 0x96: /* MLR R1,R2 [RRE] */
3608 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3609 tmp2
= load_reg(r2
);
3610 tmp3
= load_reg((r1
+ 1) & 15);
3611 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3612 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3613 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3614 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3615 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3616 store_reg32_i64(r1
, tmp2
);
3617 tcg_temp_free_i64(tmp2
);
3618 tcg_temp_free_i64(tmp3
);
3620 case 0x97: /* DLR R1,R2 [RRE] */
3621 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3622 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3624 tmp2
= load_reg(r2
);
3625 tmp3
= load_reg((r1
+ 1) & 15);
3626 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3627 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3628 tcg_gen_shli_i64(tmp
, tmp
, 32);
3629 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
3631 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3632 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
3633 store_reg32_i64((r1
+ 1) & 15, tmp
);
3634 store_reg32_i64(r1
, tmp3
);
3635 tcg_temp_free_i64(tmp
);
3636 tcg_temp_free_i64(tmp2
);
3637 tcg_temp_free_i64(tmp3
);
3639 case 0x98: /* ALCR R1,R2 [RRE] */
3640 tmp32_1
= load_reg32(r1
);
3641 tmp32_2
= load_reg32(r2
);
3642 tmp32_3
= tcg_temp_new_i32();
3643 /* XXX possible optimization point */
3645 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
3646 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3647 store_reg32(r1
, tmp32_3
);
3648 tcg_temp_free_i32(tmp32_1
);
3649 tcg_temp_free_i32(tmp32_2
);
3650 tcg_temp_free_i32(tmp32_3
);
3652 case 0x99: /* SLBR R1,R2 [RRE] */
3653 tmp32_1
= load_reg32(r2
);
3654 tmp32_2
= tcg_const_i32(r1
);
3656 gen_helper_slb(cc_op
, cc_op
, tmp32_2
, tmp32_1
);
3658 tcg_temp_free_i32(tmp32_1
);
3659 tcg_temp_free_i32(tmp32_2
);
3662 LOG_DISAS("illegal b9 operation 0x%x\n", op
);
3663 gen_illegal_opcode(s
, 2);
3668 static void disas_c0(DisasContext
*s
, int op
, int r1
, int i2
)
3671 TCGv_i32 tmp32_1
, tmp32_2
;
3672 uint64_t target
= s
->pc
+ i2
* 2LL;
3675 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op
, r1
, i2
);
3678 case 0: /* larl r1, i2 */
3679 tmp
= tcg_const_i64(target
);
3681 tcg_temp_free_i64(tmp
);
3683 case 0x1: /* LGFI R1,I2 [RIL] */
3684 tmp
= tcg_const_i64((int64_t)i2
);
3686 tcg_temp_free_i64(tmp
);
3688 case 0x4: /* BRCL M1,I2 [RIL] */
3689 /* m1 & (1 << (3 - cc)) */
3690 tmp32_1
= tcg_const_i32(3);
3691 tmp32_2
= tcg_const_i32(1);
3693 tcg_gen_sub_i32(tmp32_1
, tmp32_1
, cc_op
);
3694 tcg_gen_shl_i32(tmp32_2
, tmp32_2
, tmp32_1
);
3695 tcg_temp_free_i32(tmp32_1
);
3696 tmp32_1
= tcg_const_i32(r1
); /* m1 == r1 */
3697 tcg_gen_and_i32(tmp32_1
, tmp32_1
, tmp32_2
);
3698 l1
= gen_new_label();
3699 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
3700 gen_goto_tb(s
, 0, target
);
3702 gen_goto_tb(s
, 1, s
->pc
+ 6);
3703 s
->is_jmp
= DISAS_TB_JUMP
;
3704 tcg_temp_free_i32(tmp32_1
);
3705 tcg_temp_free_i32(tmp32_2
);
3707 case 0x5: /* brasl r1, i2 */
3708 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 6));
3710 tcg_temp_free_i64(tmp
);
3711 gen_goto_tb(s
, 0, target
);
3712 s
->is_jmp
= DISAS_TB_JUMP
;
3714 case 0x7: /* XILF R1,I2 [RIL] */
3715 case 0xb: /* NILF R1,I2 [RIL] */
3716 case 0xd: /* OILF R1,I2 [RIL] */
3717 tmp32_1
= load_reg32(r1
);
3720 tcg_gen_xori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3723 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3726 tcg_gen_ori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3731 store_reg32(r1
, tmp32_1
);
3732 set_cc_nz_u32(s
, tmp32_1
);
3733 tcg_temp_free_i32(tmp32_1
);
3735 case 0x9: /* IILF R1,I2 [RIL] */
3736 tmp32_1
= tcg_const_i32((uint32_t)i2
);
3737 store_reg32(r1
, tmp32_1
);
3738 tcg_temp_free_i32(tmp32_1
);
3740 case 0xa: /* NIHF R1,I2 [RIL] */
3742 tmp32_1
= tcg_temp_new_i32();
3743 tcg_gen_andi_i64(tmp
, tmp
, (((uint64_t)((uint32_t)i2
)) << 32)
3746 tcg_gen_shri_i64(tmp
, tmp
, 32);
3747 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3748 set_cc_nz_u32(s
, tmp32_1
);
3749 tcg_temp_free_i64(tmp
);
3750 tcg_temp_free_i32(tmp32_1
);
3752 case 0xe: /* LLIHF R1,I2 [RIL] */
3753 tmp
= tcg_const_i64(((uint64_t)(uint32_t)i2
) << 32);
3755 tcg_temp_free_i64(tmp
);
3757 case 0xf: /* LLILF R1,I2 [RIL] */
3758 tmp
= tcg_const_i64((uint32_t)i2
);
3760 tcg_temp_free_i64(tmp
);
3763 LOG_DISAS("illegal c0 operation 0x%x\n", op
);
3764 gen_illegal_opcode(s
, 3);
3769 static void disas_c2(DisasContext
*s
, int op
, int r1
, int i2
)
3771 TCGv_i64 tmp
, tmp2
, tmp3
;
3772 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3775 case 0x4: /* SLGFI R1,I2 [RIL] */
3776 case 0xa: /* ALGFI R1,I2 [RIL] */
3778 tmp2
= tcg_const_i64((uint64_t)(uint32_t)i2
);
3779 tmp3
= tcg_temp_new_i64();
3782 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3783 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3786 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3787 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3792 store_reg(r1
, tmp3
);
3793 tcg_temp_free_i64(tmp
);
3794 tcg_temp_free_i64(tmp2
);
3795 tcg_temp_free_i64(tmp3
);
3797 case 0x5: /* SLFI R1,I2 [RIL] */
3798 case 0xb: /* ALFI R1,I2 [RIL] */
3799 tmp32_1
= load_reg32(r1
);
3800 tmp32_2
= tcg_const_i32(i2
);
3801 tmp32_3
= tcg_temp_new_i32();
3804 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3805 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3808 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3809 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3814 store_reg32(r1
, tmp32_3
);
3815 tcg_temp_free_i32(tmp32_1
);
3816 tcg_temp_free_i32(tmp32_2
);
3817 tcg_temp_free_i32(tmp32_3
);
3819 case 0xc: /* CGFI R1,I2 [RIL] */
3821 cmp_s64c(s
, tmp
, (int64_t)i2
);
3822 tcg_temp_free_i64(tmp
);
3824 case 0xe: /* CLGFI R1,I2 [RIL] */
3826 cmp_u64c(s
, tmp
, (uint64_t)(uint32_t)i2
);
3827 tcg_temp_free_i64(tmp
);
3829 case 0xd: /* CFI R1,I2 [RIL] */
3830 tmp32_1
= load_reg32(r1
);
3831 cmp_s32c(s
, tmp32_1
, i2
);
3832 tcg_temp_free_i32(tmp32_1
);
3834 case 0xf: /* CLFI R1,I2 [RIL] */
3835 tmp32_1
= load_reg32(r1
);
3836 cmp_u32c(s
, tmp32_1
, i2
);
3837 tcg_temp_free_i32(tmp32_1
);
3840 LOG_DISAS("illegal c2 operation 0x%x\n", op
);
3841 gen_illegal_opcode(s
, 3);
3846 static void gen_and_or_xor_i32(int opc
, TCGv_i32 tmp
, TCGv_i32 tmp2
)
3848 switch (opc
& 0xf) {
3850 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
3853 tcg_gen_or_i32(tmp
, tmp
, tmp2
);
3856 tcg_gen_xor_i32(tmp
, tmp
, tmp2
);
3863 static void disas_s390_insn(DisasContext
*s
)
3865 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
3866 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
, tmp32_4
;
3869 int op
, r1
, r2
, r3
, d1
, d2
, x2
, b1
, b2
, i
, i2
, r1b
;
3874 opc
= ldub_code(s
->pc
);
3875 LOG_DISAS("opc 0x%x\n", opc
);
3880 #ifndef CONFIG_USER_ONLY
3881 case 0x01: /* SAM */
3882 insn
= ld_code2(s
->pc
);
3883 /* set addressing mode, but we only do 64bit anyways */
3886 case 0x6: /* BCTR R1,R2 [RR] */
3887 insn
= ld_code2(s
->pc
);
3888 decode_rr(s
, insn
, &r1
, &r2
);
3889 tmp32_1
= load_reg32(r1
);
3890 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3891 store_reg32(r1
, tmp32_1
);
3894 gen_update_cc_op(s
);
3895 l1
= gen_new_label();
3896 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3898 /* not taking the branch, jump to after the instruction */
3899 gen_goto_tb(s
, 0, s
->pc
+ 2);
3902 /* take the branch, move R2 into psw.addr */
3903 tmp32_1
= load_reg32(r2
);
3904 tmp
= tcg_temp_new_i64();
3905 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3906 tcg_gen_mov_i64(psw_addr
, tmp
);
3907 s
->is_jmp
= DISAS_JUMP
;
3908 tcg_temp_free_i32(tmp32_1
);
3909 tcg_temp_free_i64(tmp
);
3912 case 0x7: /* BCR M1,R2 [RR] */
3913 insn
= ld_code2(s
->pc
);
3914 decode_rr(s
, insn
, &r1
, &r2
);
3917 gen_bcr(s
, r1
, tmp
, s
->pc
);
3918 tcg_temp_free_i64(tmp
);
3919 s
->is_jmp
= DISAS_TB_JUMP
;
3921 /* XXX: "serialization and checkpoint-synchronization function"? */
3924 case 0xa: /* SVC I [RR] */
3925 insn
= ld_code2(s
->pc
);
3930 tmp32_1
= tcg_const_i32(i
);
3931 tmp32_2
= tcg_const_i32(ilc
* 2);
3932 tmp32_3
= tcg_const_i32(EXCP_SVC
);
3933 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, int_svc_code
));
3934 tcg_gen_st_i32(tmp32_2
, cpu_env
, offsetof(CPUState
, int_svc_ilc
));
3935 gen_helper_exception(tmp32_3
);
3936 s
->is_jmp
= DISAS_EXCP
;
3937 tcg_temp_free_i32(tmp32_1
);
3938 tcg_temp_free_i32(tmp32_2
);
3939 tcg_temp_free_i32(tmp32_3
);
3941 case 0xd: /* BASR R1,R2 [RR] */
3942 insn
= ld_code2(s
->pc
);
3943 decode_rr(s
, insn
, &r1
, &r2
);
3944 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 2));
3947 tmp2
= load_reg(r2
);
3948 tcg_gen_mov_i64(psw_addr
, tmp2
);
3949 tcg_temp_free_i64(tmp2
);
3950 s
->is_jmp
= DISAS_JUMP
;
3952 tcg_temp_free_i64(tmp
);
3954 case 0xe: /* MVCL R1,R2 [RR] */
3955 insn
= ld_code2(s
->pc
);
3956 decode_rr(s
, insn
, &r1
, &r2
);
3957 tmp32_1
= tcg_const_i32(r1
);
3958 tmp32_2
= tcg_const_i32(r2
);
3959 potential_page_fault(s
);
3960 gen_helper_mvcl(cc_op
, tmp32_1
, tmp32_2
);
3962 tcg_temp_free_i32(tmp32_1
);
3963 tcg_temp_free_i32(tmp32_2
);
3965 case 0x10: /* LPR R1,R2 [RR] */
3966 insn
= ld_code2(s
->pc
);
3967 decode_rr(s
, insn
, &r1
, &r2
);
3968 tmp32_1
= load_reg32(r2
);
3969 set_cc_abs32(s
, tmp32_1
);
3970 gen_helper_abs_i32(tmp32_1
, tmp32_1
);
3971 store_reg32(r1
, tmp32_1
);
3972 tcg_temp_free_i32(tmp32_1
);
3974 case 0x11: /* LNR R1,R2 [RR] */
3975 insn
= ld_code2(s
->pc
);
3976 decode_rr(s
, insn
, &r1
, &r2
);
3977 tmp32_1
= load_reg32(r2
);
3978 set_cc_nabs32(s
, tmp32_1
);
3979 gen_helper_nabs_i32(tmp32_1
, tmp32_1
);
3980 store_reg32(r1
, tmp32_1
);
3981 tcg_temp_free_i32(tmp32_1
);
3983 case 0x12: /* LTR R1,R2 [RR] */
3984 insn
= ld_code2(s
->pc
);
3985 decode_rr(s
, insn
, &r1
, &r2
);
3986 tmp32_1
= load_reg32(r2
);
3988 store_reg32(r1
, tmp32_1
);
3990 set_cc_s32(s
, tmp32_1
);
3991 tcg_temp_free_i32(tmp32_1
);
3993 case 0x13: /* LCR R1,R2 [RR] */
3994 insn
= ld_code2(s
->pc
);
3995 decode_rr(s
, insn
, &r1
, &r2
);
3996 tmp32_1
= load_reg32(r2
);
3997 tcg_gen_neg_i32(tmp32_1
, tmp32_1
);
3998 store_reg32(r1
, tmp32_1
);
3999 set_cc_comp32(s
, tmp32_1
);
4000 tcg_temp_free_i32(tmp32_1
);
4002 case 0x14: /* NR R1,R2 [RR] */
4003 case 0x16: /* OR R1,R2 [RR] */
4004 case 0x17: /* XR R1,R2 [RR] */
4005 insn
= ld_code2(s
->pc
);
4006 decode_rr(s
, insn
, &r1
, &r2
);
4007 tmp32_2
= load_reg32(r2
);
4008 tmp32_1
= load_reg32(r1
);
4009 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4010 store_reg32(r1
, tmp32_1
);
4011 set_cc_nz_u32(s
, tmp32_1
);
4012 tcg_temp_free_i32(tmp32_1
);
4013 tcg_temp_free_i32(tmp32_2
);
4015 case 0x18: /* LR R1,R2 [RR] */
4016 insn
= ld_code2(s
->pc
);
4017 decode_rr(s
, insn
, &r1
, &r2
);
4018 tmp32_1
= load_reg32(r2
);
4019 store_reg32(r1
, tmp32_1
);
4020 tcg_temp_free_i32(tmp32_1
);
4022 case 0x15: /* CLR R1,R2 [RR] */
4023 case 0x19: /* CR R1,R2 [RR] */
4024 insn
= ld_code2(s
->pc
);
4025 decode_rr(s
, insn
, &r1
, &r2
);
4026 tmp32_1
= load_reg32(r1
);
4027 tmp32_2
= load_reg32(r2
);
4029 cmp_u32(s
, tmp32_1
, tmp32_2
);
4031 cmp_s32(s
, tmp32_1
, tmp32_2
);
4033 tcg_temp_free_i32(tmp32_1
);
4034 tcg_temp_free_i32(tmp32_2
);
4036 case 0x1a: /* AR R1,R2 [RR] */
4037 case 0x1e: /* ALR R1,R2 [RR] */
4038 insn
= ld_code2(s
->pc
);
4039 decode_rr(s
, insn
, &r1
, &r2
);
4040 tmp32_1
= load_reg32(r1
);
4041 tmp32_2
= load_reg32(r2
);
4042 tmp32_3
= tcg_temp_new_i32();
4043 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4044 store_reg32(r1
, tmp32_3
);
4046 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4048 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4050 tcg_temp_free_i32(tmp32_1
);
4051 tcg_temp_free_i32(tmp32_2
);
4052 tcg_temp_free_i32(tmp32_3
);
4054 case 0x1b: /* SR R1,R2 [RR] */
4055 case 0x1f: /* SLR R1,R2 [RR] */
4056 insn
= ld_code2(s
->pc
);
4057 decode_rr(s
, insn
, &r1
, &r2
);
4058 tmp32_1
= load_reg32(r1
);
4059 tmp32_2
= load_reg32(r2
);
4060 tmp32_3
= tcg_temp_new_i32();
4061 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4062 store_reg32(r1
, tmp32_3
);
4064 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4066 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4068 tcg_temp_free_i32(tmp32_1
);
4069 tcg_temp_free_i32(tmp32_2
);
4070 tcg_temp_free_i32(tmp32_3
);
4072 case 0x1c: /* MR R1,R2 [RR] */
4073 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4074 insn
= ld_code2(s
->pc
);
4075 decode_rr(s
, insn
, &r1
, &r2
);
4076 tmp2
= load_reg(r2
);
4077 tmp3
= load_reg((r1
+ 1) & 15);
4078 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4079 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4080 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4081 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4082 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4083 store_reg32_i64(r1
, tmp2
);
4084 tcg_temp_free_i64(tmp2
);
4085 tcg_temp_free_i64(tmp3
);
4087 case 0x1d: /* DR R1,R2 [RR] */
4088 insn
= ld_code2(s
->pc
);
4089 decode_rr(s
, insn
, &r1
, &r2
);
4090 tmp32_1
= load_reg32(r1
);
4091 tmp32_2
= load_reg32(r1
+ 1);
4092 tmp32_3
= load_reg32(r2
);
4094 tmp
= tcg_temp_new_i64(); /* dividend */
4095 tmp2
= tcg_temp_new_i64(); /* divisor */
4096 tmp3
= tcg_temp_new_i64();
4098 /* dividend is r(r1 << 32) | r(r1 + 1) */
4099 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4100 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4101 tcg_gen_shli_i64(tmp
, tmp
, 32);
4102 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4104 /* divisor is r(r2) */
4105 tcg_gen_ext_i32_i64(tmp2
, tmp32_3
);
4107 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4108 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4110 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4111 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4113 store_reg32(r1
, tmp32_1
); /* remainder */
4114 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4115 tcg_temp_free_i32(tmp32_1
);
4116 tcg_temp_free_i32(tmp32_2
);
4117 tcg_temp_free_i32(tmp32_3
);
4118 tcg_temp_free_i64(tmp
);
4119 tcg_temp_free_i64(tmp2
);
4120 tcg_temp_free_i64(tmp3
);
4122 case 0x28: /* LDR R1,R2 [RR] */
4123 insn
= ld_code2(s
->pc
);
4124 decode_rr(s
, insn
, &r1
, &r2
);
4125 tmp
= load_freg(r2
);
4126 store_freg(r1
, tmp
);
4127 tcg_temp_free_i64(tmp
);
4129 case 0x38: /* LER R1,R2 [RR] */
4130 insn
= ld_code2(s
->pc
);
4131 decode_rr(s
, insn
, &r1
, &r2
);
4132 tmp32_1
= load_freg32(r2
);
4133 store_freg32(r1
, tmp32_1
);
4134 tcg_temp_free_i32(tmp32_1
);
4136 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4137 insn
= ld_code4(s
->pc
);
4138 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4139 tmp2
= load_reg(r1
);
4140 tcg_gen_qemu_st16(tmp2
, tmp
, get_mem_index(s
));
4141 tcg_temp_free_i64(tmp
);
4142 tcg_temp_free_i64(tmp2
);
4145 insn
= ld_code4(s
->pc
);
4146 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4147 store_reg(r1
, tmp
); /* FIXME: 31/24-bit addressing */
4148 tcg_temp_free_i64(tmp
);
4150 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4151 insn
= ld_code4(s
->pc
);
4152 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4153 tmp2
= load_reg(r1
);
4154 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4155 tcg_temp_free_i64(tmp
);
4156 tcg_temp_free_i64(tmp2
);
4158 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4159 insn
= ld_code4(s
->pc
);
4160 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4161 tmp2
= tcg_temp_new_i64();
4162 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4163 store_reg8(r1
, tmp2
);
4164 tcg_temp_free_i64(tmp
);
4165 tcg_temp_free_i64(tmp2
);
4167 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4168 insn
= ld_code4(s
->pc
);
4169 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4170 tmp2
= load_reg(r1
);
4171 tmp3
= tcg_const_i64(s
->pc
+ 4);
4174 gen_helper_ex(cc_op
, cc_op
, tmp2
, tmp
, tmp3
);
4176 tcg_temp_free_i64(tmp
);
4177 tcg_temp_free_i64(tmp2
);
4178 tcg_temp_free_i64(tmp3
);
4180 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4181 insn
= ld_code4(s
->pc
);
4182 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4183 tcg_temp_free_i64(tmp
);
4185 tmp32_1
= load_reg32(r1
);
4186 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
4187 store_reg32(r1
, tmp32_1
);
4189 gen_update_cc_op(s
);
4190 l1
= gen_new_label();
4191 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
4193 /* not taking the branch, jump to after the instruction */
4194 gen_goto_tb(s
, 0, s
->pc
+ 4);
4197 /* take the branch, move R2 into psw.addr */
4198 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4199 tcg_gen_mov_i64(psw_addr
, tmp
);
4200 s
->is_jmp
= DISAS_JUMP
;
4201 tcg_temp_free_i32(tmp32_1
);
4202 tcg_temp_free_i64(tmp
);
4204 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4205 insn
= ld_code4(s
->pc
);
4206 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4207 gen_bcr(s
, r1
, tmp
, s
->pc
+ 4);
4208 tcg_temp_free_i64(tmp
);
4209 s
->is_jmp
= DISAS_TB_JUMP
;
4211 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4212 insn
= ld_code4(s
->pc
);
4213 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4214 tmp2
= tcg_temp_new_i64();
4215 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4216 store_reg32_i64(r1
, tmp2
);
4217 tcg_temp_free_i64(tmp
);
4218 tcg_temp_free_i64(tmp2
);
4220 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4221 insn
= ld_code4(s
->pc
);
4222 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4223 tmp32_1
= load_reg32(r1
);
4224 tmp32_2
= tcg_temp_new_i32();
4225 tmp2
= tcg_temp_new_i64();
4226 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4227 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4228 cmp_s32(s
, tmp32_1
, tmp32_2
);
4229 tcg_temp_free_i32(tmp32_1
);
4230 tcg_temp_free_i32(tmp32_2
);
4231 tcg_temp_free_i64(tmp
);
4232 tcg_temp_free_i64(tmp2
);
4234 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4235 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4236 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4237 insn
= ld_code4(s
->pc
);
4238 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4239 tmp2
= tcg_temp_new_i64();
4240 tmp32_1
= load_reg32(r1
);
4241 tmp32_2
= tcg_temp_new_i32();
4242 tmp32_3
= tcg_temp_new_i32();
4244 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4245 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4248 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4249 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4252 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4253 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4256 tcg_gen_mul_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4261 store_reg32(r1
, tmp32_3
);
4263 tcg_temp_free_i32(tmp32_1
);
4264 tcg_temp_free_i32(tmp32_2
);
4265 tcg_temp_free_i32(tmp32_3
);
4266 tcg_temp_free_i64(tmp
);
4267 tcg_temp_free_i64(tmp2
);
4269 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4270 insn
= ld_code4(s
->pc
);
4271 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4272 tmp2
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
4273 store_reg(r1
, tmp2
);
4274 tcg_gen_mov_i64(psw_addr
, tmp
);
4275 tcg_temp_free_i64(tmp
);
4276 tcg_temp_free_i64(tmp2
);
4277 s
->is_jmp
= DISAS_JUMP
;
4279 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4280 insn
= ld_code4(s
->pc
);
4281 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4282 tmp2
= tcg_temp_new_i64();
4283 tmp32_1
= tcg_temp_new_i32();
4284 tcg_gen_trunc_i64_i32(tmp32_1
, regs
[r1
]);
4285 gen_helper_cvd(tmp2
, tmp32_1
);
4286 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4287 tcg_temp_free_i64(tmp
);
4288 tcg_temp_free_i64(tmp2
);
4289 tcg_temp_free_i32(tmp32_1
);
4291 case 0x50: /* st r1, d2(x2, b2) */
4292 insn
= ld_code4(s
->pc
);
4293 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4294 tmp2
= load_reg(r1
);
4295 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4296 tcg_temp_free_i64(tmp
);
4297 tcg_temp_free_i64(tmp2
);
4299 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4300 insn
= ld_code4(s
->pc
);
4301 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4302 tmp2
= tcg_temp_new_i64();
4303 tmp32_1
= tcg_temp_new_i32();
4304 tmp32_2
= load_reg32(r1
);
4305 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4306 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4307 cmp_u32(s
, tmp32_2
, tmp32_1
);
4308 tcg_temp_free_i64(tmp
);
4309 tcg_temp_free_i64(tmp2
);
4310 tcg_temp_free_i32(tmp32_1
);
4311 tcg_temp_free_i32(tmp32_2
);
4313 case 0x54: /* N R1,D2(X2,B2) [RX] */
4314 case 0x56: /* O R1,D2(X2,B2) [RX] */
4315 case 0x57: /* X R1,D2(X2,B2) [RX] */
4316 insn
= ld_code4(s
->pc
);
4317 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4318 tmp2
= tcg_temp_new_i64();
4319 tmp32_1
= load_reg32(r1
);
4320 tmp32_2
= tcg_temp_new_i32();
4321 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4322 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4323 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4324 store_reg32(r1
, tmp32_1
);
4325 set_cc_nz_u32(s
, tmp32_1
);
4326 tcg_temp_free_i64(tmp
);
4327 tcg_temp_free_i64(tmp2
);
4328 tcg_temp_free_i32(tmp32_1
);
4329 tcg_temp_free_i32(tmp32_2
);
4331 case 0x58: /* l r1, d2(x2, b2) */
4332 insn
= ld_code4(s
->pc
);
4333 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4334 tmp2
= tcg_temp_new_i64();
4335 tmp32_1
= tcg_temp_new_i32();
4336 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4337 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4338 store_reg32(r1
, tmp32_1
);
4339 tcg_temp_free_i64(tmp
);
4340 tcg_temp_free_i64(tmp2
);
4341 tcg_temp_free_i32(tmp32_1
);
4343 case 0x59: /* C R1,D2(X2,B2) [RX] */
4344 insn
= ld_code4(s
->pc
);
4345 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4346 tmp2
= tcg_temp_new_i64();
4347 tmp32_1
= tcg_temp_new_i32();
4348 tmp32_2
= load_reg32(r1
);
4349 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4350 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4351 cmp_s32(s
, tmp32_2
, tmp32_1
);
4352 tcg_temp_free_i64(tmp
);
4353 tcg_temp_free_i64(tmp2
);
4354 tcg_temp_free_i32(tmp32_1
);
4355 tcg_temp_free_i32(tmp32_2
);
4357 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4358 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4359 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4360 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4361 insn
= ld_code4(s
->pc
);
4362 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4363 tmp32_1
= load_reg32(r1
);
4364 tmp32_2
= tcg_temp_new_i32();
4365 tmp32_3
= tcg_temp_new_i32();
4366 tcg_gen_qemu_ld32s(tmp
, tmp
, get_mem_index(s
));
4367 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4371 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4375 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4380 store_reg32(r1
, tmp32_3
);
4383 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4386 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4389 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4392 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4397 tcg_temp_free_i64(tmp
);
4398 tcg_temp_free_i32(tmp32_1
);
4399 tcg_temp_free_i32(tmp32_2
);
4400 tcg_temp_free_i32(tmp32_3
);
4402 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4403 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4404 insn
= ld_code4(s
->pc
);
4405 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4406 tmp2
= tcg_temp_new_i64();
4407 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4408 tmp3
= load_reg((r1
+ 1) & 15);
4409 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4410 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4411 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4412 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4413 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4414 store_reg32_i64(r1
, tmp2
);
4415 tcg_temp_free_i64(tmp
);
4416 tcg_temp_free_i64(tmp2
);
4417 tcg_temp_free_i64(tmp3
);
4419 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4420 insn
= ld_code4(s
->pc
);
4421 tmp3
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4422 tmp32_1
= load_reg32(r1
);
4423 tmp32_2
= load_reg32(r1
+ 1);
4425 tmp
= tcg_temp_new_i64();
4426 tmp2
= tcg_temp_new_i64();
4428 /* dividend is r(r1 << 32) | r(r1 + 1) */
4429 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4430 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4431 tcg_gen_shli_i64(tmp
, tmp
, 32);
4432 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4434 /* divisor is in memory */
4435 tcg_gen_qemu_ld32s(tmp2
, tmp3
, get_mem_index(s
));
4437 /* XXX divisor == 0 -> FixP divide exception */
4439 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4440 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4442 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4443 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4445 store_reg32(r1
, tmp32_1
); /* remainder */
4446 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4447 tcg_temp_free_i32(tmp32_1
);
4448 tcg_temp_free_i32(tmp32_2
);
4449 tcg_temp_free_i64(tmp
);
4450 tcg_temp_free_i64(tmp2
);
4451 tcg_temp_free_i64(tmp3
);
4453 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4454 insn
= ld_code4(s
->pc
);
4455 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4456 tmp2
= load_freg(r1
);
4457 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4458 tcg_temp_free_i64(tmp
);
4459 tcg_temp_free_i64(tmp2
);
4461 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4462 insn
= ld_code4(s
->pc
);
4463 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4464 tmp2
= tcg_temp_new_i64();
4465 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
4466 store_freg(r1
, tmp2
);
4467 tcg_temp_free_i64(tmp
);
4468 tcg_temp_free_i64(tmp2
);
4470 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4471 insn
= ld_code4(s
->pc
);
4472 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4473 tmp2
= tcg_temp_new_i64();
4474 tmp32_1
= load_freg32(r1
);
4475 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
4476 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4477 tcg_temp_free_i64(tmp
);
4478 tcg_temp_free_i64(tmp2
);
4479 tcg_temp_free_i32(tmp32_1
);
4481 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4482 insn
= ld_code4(s
->pc
);
4483 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4484 tmp2
= tcg_temp_new_i64();
4485 tmp32_1
= load_reg32(r1
);
4486 tmp32_2
= tcg_temp_new_i32();
4487 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4488 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4489 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4490 store_reg32(r1
, tmp32_1
);
4491 tcg_temp_free_i64(tmp
);
4492 tcg_temp_free_i64(tmp2
);
4493 tcg_temp_free_i32(tmp32_1
);
4494 tcg_temp_free_i32(tmp32_2
);
4496 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4497 insn
= ld_code4(s
->pc
);
4498 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4499 tmp2
= tcg_temp_new_i64();
4500 tmp32_1
= tcg_temp_new_i32();
4501 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4502 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4503 store_freg32(r1
, tmp32_1
);
4504 tcg_temp_free_i64(tmp
);
4505 tcg_temp_free_i64(tmp2
);
4506 tcg_temp_free_i32(tmp32_1
);
4508 #ifndef CONFIG_USER_ONLY
4509 case 0x80: /* SSM D2(B2) [S] */
4510 /* Set System Mask */
4511 check_privileged(s
, ilc
);
4512 insn
= ld_code4(s
->pc
);
4513 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4514 tmp
= get_address(s
, 0, b2
, d2
);
4515 tmp2
= tcg_temp_new_i64();
4516 tmp3
= tcg_temp_new_i64();
4517 tcg_gen_andi_i64(tmp3
, psw_mask
, ~0xff00000000000000ULL
);
4518 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4519 tcg_gen_shli_i64(tmp2
, tmp2
, 56);
4520 tcg_gen_or_i64(psw_mask
, tmp3
, tmp2
);
4521 tcg_temp_free_i64(tmp
);
4522 tcg_temp_free_i64(tmp2
);
4523 tcg_temp_free_i64(tmp3
);
4525 case 0x82: /* LPSW D2(B2) [S] */
4527 check_privileged(s
, ilc
);
4528 insn
= ld_code4(s
->pc
);
4529 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4530 tmp
= get_address(s
, 0, b2
, d2
);
4531 tmp2
= tcg_temp_new_i64();
4532 tmp3
= tcg_temp_new_i64();
4533 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4534 tcg_gen_addi_i64(tmp
, tmp
, 4);
4535 tcg_gen_qemu_ld32u(tmp3
, tmp
, get_mem_index(s
));
4536 gen_helper_load_psw(tmp2
, tmp3
);
4537 tcg_temp_free_i64(tmp
);
4538 tcg_temp_free_i64(tmp2
);
4539 tcg_temp_free_i64(tmp3
);
4540 /* we need to keep cc_op intact */
4541 s
->is_jmp
= DISAS_JUMP
;
4543 case 0x83: /* DIAG R1,R3,D2 [RS] */
4544 /* Diagnose call (KVM hypercall) */
4545 check_privileged(s
, ilc
);
4546 potential_page_fault(s
);
4547 insn
= ld_code4(s
->pc
);
4548 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4549 tmp32_1
= tcg_const_i32(insn
& 0xfff);
4552 gen_helper_diag(tmp2
, tmp32_1
, tmp2
, tmp3
);
4554 tcg_temp_free_i32(tmp32_1
);
4555 tcg_temp_free_i64(tmp2
);
4556 tcg_temp_free_i64(tmp3
);
4559 case 0x88: /* SRL R1,D2(B2) [RS] */
4560 case 0x89: /* SLL R1,D2(B2) [RS] */
4561 case 0x8a: /* SRA R1,D2(B2) [RS] */
4562 insn
= ld_code4(s
->pc
);
4563 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4564 tmp
= get_address(s
, 0, b2
, d2
);
4565 tmp32_1
= load_reg32(r1
);
4566 tmp32_2
= tcg_temp_new_i32();
4567 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4568 tcg_gen_andi_i32(tmp32_2
, tmp32_2
, 0x3f);
4571 tcg_gen_shr_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4574 tcg_gen_shl_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4577 tcg_gen_sar_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4578 set_cc_s32(s
, tmp32_1
);
4583 store_reg32(r1
, tmp32_1
);
4584 tcg_temp_free_i64(tmp
);
4585 tcg_temp_free_i32(tmp32_1
);
4586 tcg_temp_free_i32(tmp32_2
);
4588 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4589 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4590 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4591 insn
= ld_code4(s
->pc
);
4592 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4593 tmp
= get_address(s
, 0, b2
, d2
); /* shift */
4594 tmp2
= tcg_temp_new_i64();
4595 tmp32_1
= load_reg32(r1
);
4596 tmp32_2
= load_reg32(r1
+ 1);
4597 tcg_gen_concat_i32_i64(tmp2
, tmp32_2
, tmp32_1
); /* operand */
4600 tcg_gen_shr_i64(tmp2
, tmp2
, tmp
);
4603 tcg_gen_shl_i64(tmp2
, tmp2
, tmp
);
4606 tcg_gen_sar_i64(tmp2
, tmp2
, tmp
);
4607 set_cc_s64(s
, tmp2
);
4610 tcg_gen_shri_i64(tmp
, tmp2
, 32);
4611 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4612 store_reg32(r1
, tmp32_1
);
4613 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4614 store_reg32(r1
+ 1, tmp32_2
);
4615 tcg_temp_free_i64(tmp
);
4616 tcg_temp_free_i64(tmp2
);
4618 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4619 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4620 insn
= ld_code4(s
->pc
);
4621 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4623 tmp
= get_address(s
, 0, b2
, d2
);
4624 tmp2
= tcg_temp_new_i64();
4625 tmp3
= tcg_const_i64(4);
4626 tmp4
= tcg_const_i64(0xffffffff00000000ULL
);
4627 for (i
= r1
;; i
= (i
+ 1) % 16) {
4629 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4630 tcg_gen_and_i64(regs
[i
], regs
[i
], tmp4
);
4631 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
4633 tcg_gen_qemu_st32(regs
[i
], tmp
, get_mem_index(s
));
4638 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
4640 tcg_temp_free_i64(tmp
);
4641 tcg_temp_free_i64(tmp2
);
4642 tcg_temp_free_i64(tmp3
);
4643 tcg_temp_free_i64(tmp4
);
4645 case 0x91: /* TM D1(B1),I2 [SI] */
4646 insn
= ld_code4(s
->pc
);
4647 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4648 tmp2
= tcg_const_i64(i2
);
4649 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
4650 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
4651 tcg_temp_free_i64(tmp
);
4652 tcg_temp_free_i64(tmp2
);
4654 case 0x92: /* MVI D1(B1),I2 [SI] */
4655 insn
= ld_code4(s
->pc
);
4656 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4657 tmp2
= tcg_const_i64(i2
);
4658 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4659 tcg_temp_free_i64(tmp
);
4660 tcg_temp_free_i64(tmp2
);
4662 case 0x94: /* NI D1(B1),I2 [SI] */
4663 case 0x96: /* OI D1(B1),I2 [SI] */
4664 case 0x97: /* XI D1(B1),I2 [SI] */
4665 insn
= ld_code4(s
->pc
);
4666 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4667 tmp2
= tcg_temp_new_i64();
4668 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4671 tcg_gen_andi_i64(tmp2
, tmp2
, i2
);
4674 tcg_gen_ori_i64(tmp2
, tmp2
, i2
);
4677 tcg_gen_xori_i64(tmp2
, tmp2
, i2
);
4682 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4683 set_cc_nz_u64(s
, tmp2
);
4684 tcg_temp_free_i64(tmp
);
4685 tcg_temp_free_i64(tmp2
);
4687 case 0x95: /* CLI D1(B1),I2 [SI] */
4688 insn
= ld_code4(s
->pc
);
4689 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4690 tmp2
= tcg_temp_new_i64();
4691 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4692 cmp_u64c(s
, tmp2
, i2
);
4693 tcg_temp_free_i64(tmp
);
4694 tcg_temp_free_i64(tmp2
);
4696 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4697 insn
= ld_code4(s
->pc
);
4698 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4699 tmp
= get_address(s
, 0, b2
, d2
);
4700 tmp32_1
= tcg_const_i32(r1
);
4701 tmp32_2
= tcg_const_i32(r3
);
4702 potential_page_fault(s
);
4703 gen_helper_lam(tmp32_1
, tmp
, tmp32_2
);
4704 tcg_temp_free_i64(tmp
);
4705 tcg_temp_free_i32(tmp32_1
);
4706 tcg_temp_free_i32(tmp32_2
);
4708 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4709 insn
= ld_code4(s
->pc
);
4710 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4711 tmp
= get_address(s
, 0, b2
, d2
);
4712 tmp32_1
= tcg_const_i32(r1
);
4713 tmp32_2
= tcg_const_i32(r3
);
4714 potential_page_fault(s
);
4715 gen_helper_stam(tmp32_1
, tmp
, tmp32_2
);
4716 tcg_temp_free_i64(tmp
);
4717 tcg_temp_free_i32(tmp32_1
);
4718 tcg_temp_free_i32(tmp32_2
);
4721 insn
= ld_code4(s
->pc
);
4722 r1
= (insn
>> 20) & 0xf;
4723 op
= (insn
>> 16) & 0xf;
4725 disas_a5(s
, op
, r1
, i2
);
4728 insn
= ld_code4(s
->pc
);
4729 r1
= (insn
>> 20) & 0xf;
4730 op
= (insn
>> 16) & 0xf;
4732 disas_a7(s
, op
, r1
, i2
);
4734 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4735 insn
= ld_code4(s
->pc
);
4736 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4737 tmp
= get_address(s
, 0, b2
, d2
);
4738 tmp32_1
= tcg_const_i32(r1
);
4739 tmp32_2
= tcg_const_i32(r3
);
4740 potential_page_fault(s
);
4741 gen_helper_mvcle(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4743 tcg_temp_free_i64(tmp
);
4744 tcg_temp_free_i32(tmp32_1
);
4745 tcg_temp_free_i32(tmp32_2
);
4747 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4748 insn
= ld_code4(s
->pc
);
4749 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4750 tmp
= get_address(s
, 0, b2
, d2
);
4751 tmp32_1
= tcg_const_i32(r1
);
4752 tmp32_2
= tcg_const_i32(r3
);
4753 potential_page_fault(s
);
4754 gen_helper_clcle(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4756 tcg_temp_free_i64(tmp
);
4757 tcg_temp_free_i32(tmp32_1
);
4758 tcg_temp_free_i32(tmp32_2
);
4760 #ifndef CONFIG_USER_ONLY
4761 case 0xac: /* STNSM D1(B1),I2 [SI] */
4762 case 0xad: /* STOSM D1(B1),I2 [SI] */
4763 check_privileged(s
, ilc
);
4764 insn
= ld_code4(s
->pc
);
4765 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4766 tmp2
= tcg_temp_new_i64();
4767 tcg_gen_shri_i64(tmp2
, psw_mask
, 56);
4768 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4770 tcg_gen_andi_i64(psw_mask
, psw_mask
,
4771 ((uint64_t)i2
<< 56) | 0x00ffffffffffffffULL
);
4773 tcg_gen_ori_i64(psw_mask
, psw_mask
, (uint64_t)i2
<< 56);
4775 tcg_temp_free_i64(tmp
);
4776 tcg_temp_free_i64(tmp2
);
4778 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4779 check_privileged(s
, ilc
);
4780 insn
= ld_code4(s
->pc
);
4781 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4782 tmp
= get_address(s
, 0, b2
, d2
);
4783 tmp2
= load_reg(r3
);
4784 tmp32_1
= tcg_const_i32(r1
);
4785 potential_page_fault(s
);
4786 gen_helper_sigp(cc_op
, tmp
, tmp32_1
, tmp2
);
4788 tcg_temp_free_i64(tmp
);
4789 tcg_temp_free_i64(tmp2
);
4790 tcg_temp_free_i32(tmp32_1
);
4792 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4793 check_privileged(s
, ilc
);
4794 insn
= ld_code4(s
->pc
);
4795 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4796 tmp32_1
= tcg_const_i32(r1
);
4797 potential_page_fault(s
);
4798 gen_helper_lra(cc_op
, tmp
, tmp32_1
);
4800 tcg_temp_free_i64(tmp
);
4801 tcg_temp_free_i32(tmp32_1
);
4805 insn
= ld_code4(s
->pc
);
4806 op
= (insn
>> 16) & 0xff;
4808 case 0x9c: /* STFPC D2(B2) [S] */
4810 b2
= (insn
>> 12) & 0xf;
4811 tmp32_1
= tcg_temp_new_i32();
4812 tmp
= tcg_temp_new_i64();
4813 tmp2
= get_address(s
, 0, b2
, d2
);
4814 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
4815 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4816 tcg_gen_qemu_st32(tmp
, tmp2
, get_mem_index(s
));
4817 tcg_temp_free_i32(tmp32_1
);
4818 tcg_temp_free_i64(tmp
);
4819 tcg_temp_free_i64(tmp2
);
4822 disas_b2(s
, op
, insn
);
4827 insn
= ld_code4(s
->pc
);
4828 op
= (insn
>> 16) & 0xff;
4829 r3
= (insn
>> 12) & 0xf; /* aka m3 */
4830 r1
= (insn
>> 4) & 0xf;
4832 disas_b3(s
, op
, r3
, r1
, r2
);
4834 #ifndef CONFIG_USER_ONLY
4835 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4837 check_privileged(s
, ilc
);
4838 insn
= ld_code4(s
->pc
);
4839 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4840 tmp
= get_address(s
, 0, b2
, d2
);
4841 tmp32_1
= tcg_const_i32(r1
);
4842 tmp32_2
= tcg_const_i32(r3
);
4843 potential_page_fault(s
);
4844 gen_helper_stctl(tmp32_1
, tmp
, tmp32_2
);
4845 tcg_temp_free_i64(tmp
);
4846 tcg_temp_free_i32(tmp32_1
);
4847 tcg_temp_free_i32(tmp32_2
);
4849 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4851 check_privileged(s
, ilc
);
4852 insn
= ld_code4(s
->pc
);
4853 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4854 tmp
= get_address(s
, 0, b2
, d2
);
4855 tmp32_1
= tcg_const_i32(r1
);
4856 tmp32_2
= tcg_const_i32(r3
);
4857 potential_page_fault(s
);
4858 gen_helper_lctl(tmp32_1
, tmp
, tmp32_2
);
4859 tcg_temp_free_i64(tmp
);
4860 tcg_temp_free_i32(tmp32_1
);
4861 tcg_temp_free_i32(tmp32_2
);
4865 insn
= ld_code4(s
->pc
);
4866 r1
= (insn
>> 4) & 0xf;
4868 op
= (insn
>> 16) & 0xff;
4869 disas_b9(s
, op
, r1
, r2
);
4871 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4872 insn
= ld_code4(s
->pc
);
4873 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4874 tmp
= get_address(s
, 0, b2
, d2
);
4875 tmp32_1
= tcg_const_i32(r1
);
4876 tmp32_2
= tcg_const_i32(r3
);
4877 potential_page_fault(s
);
4878 gen_helper_cs(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4880 tcg_temp_free_i64(tmp
);
4881 tcg_temp_free_i32(tmp32_1
);
4882 tcg_temp_free_i32(tmp32_2
);
4884 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4885 insn
= ld_code4(s
->pc
);
4886 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4887 tmp
= get_address(s
, 0, b2
, d2
);
4888 tmp32_1
= load_reg32(r1
);
4889 tmp32_2
= tcg_const_i32(r3
);
4890 potential_page_fault(s
);
4891 gen_helper_clm(cc_op
, tmp32_1
, tmp32_2
, tmp
);
4893 tcg_temp_free_i64(tmp
);
4894 tcg_temp_free_i32(tmp32_1
);
4895 tcg_temp_free_i32(tmp32_2
);
4897 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4898 insn
= ld_code4(s
->pc
);
4899 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4900 tmp
= get_address(s
, 0, b2
, d2
);
4901 tmp32_1
= load_reg32(r1
);
4902 tmp32_2
= tcg_const_i32(r3
);
4903 potential_page_fault(s
);
4904 gen_helper_stcm(tmp32_1
, tmp32_2
, tmp
);
4905 tcg_temp_free_i64(tmp
);
4906 tcg_temp_free_i32(tmp32_1
);
4907 tcg_temp_free_i32(tmp32_2
);
4909 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4910 insn
= ld_code4(s
->pc
);
4911 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4913 /* effectively a 32-bit load */
4914 tmp
= get_address(s
, 0, b2
, d2
);
4915 tmp32_1
= tcg_temp_new_i32();
4916 tmp32_2
= tcg_const_i32(r3
);
4917 tcg_gen_qemu_ld32u(tmp
, tmp
, get_mem_index(s
));
4918 store_reg32_i64(r1
, tmp
);
4919 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4920 set_cc_icm(s
, tmp32_2
, tmp32_1
);
4921 tcg_temp_free_i64(tmp
);
4922 tcg_temp_free_i32(tmp32_1
);
4923 tcg_temp_free_i32(tmp32_2
);
4925 uint32_t mask
= 0x00ffffffUL
;
4926 uint32_t shift
= 24;
4928 tmp
= get_address(s
, 0, b2
, d2
);
4929 tmp2
= tcg_temp_new_i64();
4930 tmp32_1
= load_reg32(r1
);
4931 tmp32_2
= tcg_temp_new_i32();
4932 tmp32_3
= tcg_const_i32(r3
);
4933 tmp32_4
= tcg_const_i32(0);
4936 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4937 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4939 tcg_gen_shli_i32(tmp32_2
, tmp32_2
, shift
);
4941 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, mask
);
4942 tcg_gen_or_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4943 tcg_gen_or_i32(tmp32_4
, tmp32_4
, tmp32_2
);
4944 tcg_gen_addi_i64(tmp
, tmp
, 1);
4946 m3
= (m3
<< 1) & 0xf;
4947 mask
= (mask
>> 8) | 0xff000000UL
;
4950 store_reg32(r1
, tmp32_1
);
4951 set_cc_icm(s
, tmp32_3
, tmp32_4
);
4952 tcg_temp_free_i64(tmp
);
4953 tcg_temp_free_i64(tmp2
);
4954 tcg_temp_free_i32(tmp32_1
);
4955 tcg_temp_free_i32(tmp32_2
);
4956 tcg_temp_free_i32(tmp32_3
);
4957 tcg_temp_free_i32(tmp32_4
);
4959 /* i.e. env->cc = 0 */
4960 gen_op_movi_cc(s
, 0);
4965 insn
= ld_code6(s
->pc
);
4966 r1
= (insn
>> 36) & 0xf;
4967 op
= (insn
>> 32) & 0xf;
4971 disas_c0(s
, op
, r1
, i2
);
4974 disas_c2(s
, op
, r1
, i2
);
4980 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4981 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4982 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4983 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4984 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4985 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4986 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4987 insn
= ld_code6(s
->pc
);
4988 vl
= tcg_const_i32((insn
>> 32) & 0xff);
4989 b1
= (insn
>> 28) & 0xf;
4990 b2
= (insn
>> 12) & 0xf;
4991 d1
= (insn
>> 16) & 0xfff;
4993 tmp
= get_address(s
, 0, b1
, d1
);
4994 tmp2
= get_address(s
, 0, b2
, d2
);
4997 gen_op_mvc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
5000 potential_page_fault(s
);
5001 gen_helper_nc(cc_op
, vl
, tmp
, tmp2
);
5005 gen_op_clc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
5008 potential_page_fault(s
);
5009 gen_helper_oc(cc_op
, vl
, tmp
, tmp2
);
5013 potential_page_fault(s
);
5014 gen_helper_xc(cc_op
, vl
, tmp
, tmp2
);
5018 potential_page_fault(s
);
5019 gen_helper_tr(vl
, tmp
, tmp2
);
5023 potential_page_fault(s
);
5024 gen_helper_unpk(vl
, tmp
, tmp2
);
5029 tcg_temp_free_i64(tmp
);
5030 tcg_temp_free_i64(tmp2
);
5032 #ifndef CONFIG_USER_ONLY
5033 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5034 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5035 check_privileged(s
, ilc
);
5036 potential_page_fault(s
);
5037 insn
= ld_code6(s
->pc
);
5038 r1
= (insn
>> 36) & 0xf;
5039 r3
= (insn
>> 32) & 0xf;
5040 b1
= (insn
>> 28) & 0xf;
5041 d1
= (insn
>> 16) & 0xfff;
5042 b2
= (insn
>> 12) & 0xf;
5046 tmp2
= get_address(s
, 0, b1
, d1
);
5047 tmp3
= get_address(s
, 0, b2
, d2
);
5049 gen_helper_mvcp(cc_op
, tmp
, tmp2
, tmp3
);
5051 gen_helper_mvcs(cc_op
, tmp
, tmp2
, tmp3
);
5054 tcg_temp_free_i64(tmp
);
5055 tcg_temp_free_i64(tmp2
);
5056 tcg_temp_free_i64(tmp3
);
5060 insn
= ld_code6(s
->pc
);
5063 r1
= (insn
>> 36) & 0xf;
5064 x2
= (insn
>> 32) & 0xf;
5065 b2
= (insn
>> 28) & 0xf;
5066 d2
= ((int)((((insn
>> 16) & 0xfff)
5067 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5068 disas_e3(s
, op
, r1
, x2
, b2
, d2
);
5070 #ifndef CONFIG_USER_ONLY
5072 /* Test Protection */
5073 check_privileged(s
, ilc
);
5074 insn
= ld_code6(s
->pc
);
5080 insn
= ld_code6(s
->pc
);
5083 r1
= (insn
>> 36) & 0xf;
5084 r3
= (insn
>> 32) & 0xf;
5085 b2
= (insn
>> 28) & 0xf;
5086 d2
= ((int)((((insn
>> 16) & 0xfff)
5087 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5088 disas_eb(s
, op
, r1
, r3
, b2
, d2
);
5091 insn
= ld_code6(s
->pc
);
5094 r1
= (insn
>> 36) & 0xf;
5095 x2
= (insn
>> 32) & 0xf;
5096 b2
= (insn
>> 28) & 0xf;
5097 d2
= (short)((insn
>> 16) & 0xfff);
5098 r1b
= (insn
>> 12) & 0xf;
5099 disas_ed(s
, op
, r1
, x2
, b2
, d2
, r1b
);
5102 LOG_DISAS("unimplemented opcode 0x%x\n", opc
);
5103 gen_illegal_opcode(s
, ilc
);
5107 /* Instruction length is encoded in the opcode */
5111 static inline void gen_intermediate_code_internal(CPUState
*env
,
5112 TranslationBlock
*tb
,
5116 target_ulong pc_start
;
5117 uint64_t next_page_start
;
5118 uint16_t *gen_opc_end
;
5120 int num_insns
, max_insns
;
5126 if (!(tb
->flags
& FLAG_MASK_64
)) {
5127 pc_start
&= 0x7fffffff;
5131 dc
.is_jmp
= DISAS_NEXT
;
5133 dc
.cc_op
= CC_OP_DYNAMIC
;
5135 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5137 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
5140 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5141 if (max_insns
== 0) {
5142 max_insns
= CF_COUNT_MASK
;
5148 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5149 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5150 if (bp
->pc
== dc
.pc
) {
5157 j
= gen_opc_ptr
- gen_opc_buf
;
5161 gen_opc_instr_start
[lj
++] = 0;
5164 gen_opc_pc
[lj
] = dc
.pc
;
5165 gen_opc_cc_op
[lj
] = dc
.cc_op
;
5166 gen_opc_instr_start
[lj
] = 1;
5167 gen_opc_icount
[lj
] = num_insns
;
5169 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
5172 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5173 LOG_DISAS("pc " TARGET_FMT_lx
"\n",
5176 disas_s390_insn(&dc
);
5179 if (env
->singlestep_enabled
) {
5182 } while (!dc
.is_jmp
&& gen_opc_ptr
< gen_opc_end
&& dc
.pc
< next_page_start
5183 && num_insns
< max_insns
&& !env
->singlestep_enabled
5187 update_psw_addr(&dc
);
5190 if (singlestep
&& dc
.cc_op
!= CC_OP_DYNAMIC
) {
5191 gen_op_calc_cc(&dc
);
5193 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5195 gen_op_set_cc_op(&dc
);
5198 if (tb
->cflags
& CF_LAST_IO
) {
5201 /* Generate the return instruction */
5202 if (dc
.is_jmp
!= DISAS_TB_JUMP
) {
5205 gen_icount_end(tb
, num_insns
);
5206 *gen_opc_ptr
= INDEX_op_end
;
5208 j
= gen_opc_ptr
- gen_opc_buf
;
5211 gen_opc_instr_start
[lj
++] = 0;
5214 tb
->size
= dc
.pc
- pc_start
;
5215 tb
->icount
= num_insns
;
5217 #if defined(S390X_DEBUG_DISAS)
5218 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
5219 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5220 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5221 log_target_disas(pc_start
, dc
.pc
- pc_start
, 1);
5227 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
5229 gen_intermediate_code_internal(env
, tb
, 0);
5232 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
5234 gen_intermediate_code_internal(env
, tb
, 1);
5237 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5240 env
->psw
.addr
= gen_opc_pc
[pc_pos
];
5241 cc_op
= gen_opc_cc_op
[pc_pos
];
5242 if ((cc_op
!= CC_OP_DYNAMIC
) && (cc_op
!= CC_OP_STATIC
)) {