4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
26 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
27 /* #define DEBUG_INLINE_BRANCHES */
28 #define S390X_DEBUG_DISAS
29 /* #define S390X_DEBUG_DISAS_VERBOSE */
31 #ifdef S390X_DEBUG_DISAS_VERBOSE
32 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
34 # define LOG_DISAS(...) do { } while (0)
43 /* global register indexes */
44 static TCGv_ptr cpu_env
;
46 #include "gen-icount.h"
51 typedef struct DisasContext DisasContext
;
56 struct TranslationBlock
*tb
;
61 static void gen_op_calc_cc(DisasContext
*s
);
63 #ifdef DEBUG_INLINE_BRANCHES
64 static uint64_t inline_branch_hit
[CC_OP_MAX
];
65 static uint64_t inline_branch_miss
[CC_OP_MAX
];
68 static inline void debug_insn(uint64_t insn
)
70 LOG_DISAS("insn: 0x%" PRIx64
"\n", insn
);
73 static inline uint64_t pc_to_link_info(DisasContext
*s
, uint64_t pc
)
75 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
76 if (s
->tb
->flags
& FLAG_MASK_32
) {
77 return pc
| 0x80000000;
83 void cpu_dump_state(CPUState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
88 for (i
= 0; i
< 16; i
++) {
89 cpu_fprintf(f
, "R%02d=%016" PRIx64
, i
, env
->regs
[i
]);
97 for (i
= 0; i
< 16; i
++) {
98 cpu_fprintf(f
, "F%02d=%016" PRIx64
, i
, *(uint64_t *)&env
->fregs
[i
]);
100 cpu_fprintf(f
, "\n");
106 cpu_fprintf(f
, "\n");
108 #ifndef CONFIG_USER_ONLY
109 for (i
= 0; i
< 16; i
++) {
110 cpu_fprintf(f
, "C%02d=%016" PRIx64
, i
, env
->cregs
[i
]);
112 cpu_fprintf(f
, "\n");
119 cpu_fprintf(f
, "\n");
121 if (env
->cc_op
> 3) {
122 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %15s\n",
123 env
->psw
.mask
, env
->psw
.addr
, cc_name(env
->cc_op
));
125 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %02x\n",
126 env
->psw
.mask
, env
->psw
.addr
, env
->cc_op
);
129 #ifdef DEBUG_INLINE_BRANCHES
130 for (i
= 0; i
< CC_OP_MAX
; i
++) {
131 cpu_fprintf(f
, " %15s = %10ld\t%10ld\n", cc_name(i
),
132 inline_branch_miss
[i
], inline_branch_hit
[i
]);
137 static TCGv_i64 psw_addr
;
138 static TCGv_i64 psw_mask
;
140 static TCGv_i32 cc_op
;
141 static TCGv_i64 cc_src
;
142 static TCGv_i64 cc_dst
;
143 static TCGv_i64 cc_vr
;
145 static char cpu_reg_names
[10*3 + 6*4];
146 static TCGv_i64 regs
[16];
148 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
150 void s390x_translate_init(void)
153 size_t cpu_reg_names_size
= sizeof(cpu_reg_names
);
156 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
157 psw_addr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, psw
.addr
),
159 psw_mask
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, psw
.mask
),
162 cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
164 cc_src
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_src
),
166 cc_dst
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
168 cc_vr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUState
, cc_vr
),
172 for (i
= 0; i
< 16; i
++) {
173 snprintf(p
, cpu_reg_names_size
, "r%d", i
);
174 regs
[i
] = tcg_global_mem_new(TCG_AREG0
,
175 offsetof(CPUState
, regs
[i
]), p
);
176 p
+= (i
< 10) ? 3 : 4;
177 cpu_reg_names_size
-= (i
< 10) ? 3 : 4;
181 static inline TCGv_i64
load_reg(int reg
)
183 TCGv_i64 r
= tcg_temp_new_i64();
184 tcg_gen_mov_i64(r
, regs
[reg
]);
188 static inline TCGv_i64
load_freg(int reg
)
190 TCGv_i64 r
= tcg_temp_new_i64();
191 tcg_gen_ld_i64(r
, cpu_env
, offsetof(CPUState
, fregs
[reg
].d
));
195 static inline TCGv_i32
load_freg32(int reg
)
197 TCGv_i32 r
= tcg_temp_new_i32();
198 tcg_gen_ld_i32(r
, cpu_env
, offsetof(CPUState
, fregs
[reg
].l
.upper
));
202 static inline TCGv_i32
load_reg32(int reg
)
204 TCGv_i32 r
= tcg_temp_new_i32();
205 tcg_gen_trunc_i64_i32(r
, regs
[reg
]);
209 static inline TCGv_i64
load_reg32_i64(int reg
)
211 TCGv_i64 r
= tcg_temp_new_i64();
212 tcg_gen_ext32s_i64(r
, regs
[reg
]);
216 static inline void store_reg(int reg
, TCGv_i64 v
)
218 tcg_gen_mov_i64(regs
[reg
], v
);
221 static inline void store_freg(int reg
, TCGv_i64 v
)
223 tcg_gen_st_i64(v
, cpu_env
, offsetof(CPUState
, fregs
[reg
].d
));
226 static inline void store_reg32(int reg
, TCGv_i32 v
)
228 #if HOST_LONG_BITS == 32
229 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), v
);
231 TCGv_i64 tmp
= tcg_temp_new_i64();
232 tcg_gen_extu_i32_i64(tmp
, v
);
233 /* 32 bit register writes keep the upper half */
234 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 32);
235 tcg_temp_free_i64(tmp
);
239 static inline void store_reg32_i64(int reg
, TCGv_i64 v
)
241 /* 32 bit register writes keep the upper half */
242 #if HOST_LONG_BITS == 32
243 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), TCGV_LOW(v
));
245 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 32);
249 static inline void store_reg16(int reg
, TCGv_i32 v
)
251 TCGv_i64 tmp
= tcg_temp_new_i64();
252 tcg_gen_extu_i32_i64(tmp
, v
);
253 /* 16 bit register writes keep the upper bytes */
254 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 16);
255 tcg_temp_free_i64(tmp
);
258 static inline void store_reg8(int reg
, TCGv_i64 v
)
260 /* 8 bit register writes keep the upper bytes */
261 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 8);
264 static inline void store_freg32(int reg
, TCGv_i32 v
)
266 tcg_gen_st_i32(v
, cpu_env
, offsetof(CPUState
, fregs
[reg
].l
.upper
));
269 static inline void update_psw_addr(DisasContext
*s
)
272 tcg_gen_movi_i64(psw_addr
, s
->pc
);
275 static inline void potential_page_fault(DisasContext
*s
)
277 #ifndef CONFIG_USER_ONLY
283 static inline uint64_t ld_code2(uint64_t pc
)
285 return (uint64_t)lduw_code(pc
);
288 static inline uint64_t ld_code4(uint64_t pc
)
290 return (uint64_t)ldl_code(pc
);
293 static inline uint64_t ld_code6(uint64_t pc
)
296 opc
= (uint64_t)lduw_code(pc
) << 32;
297 opc
|= (uint64_t)(uint32_t)ldl_code(pc
+2);
301 static inline int get_mem_index(DisasContext
*s
)
303 switch (s
->tb
->flags
& FLAG_MASK_ASC
) {
304 case PSW_ASC_PRIMARY
>> 32:
306 case PSW_ASC_SECONDARY
>> 32:
308 case PSW_ASC_HOME
>> 32:
316 static inline void gen_debug(DisasContext
*s
)
318 TCGv_i32 tmp
= tcg_const_i32(EXCP_DEBUG
);
321 gen_helper_exception(tmp
);
322 tcg_temp_free_i32(tmp
);
323 s
->is_jmp
= DISAS_EXCP
;
326 #ifdef CONFIG_USER_ONLY
328 static void gen_illegal_opcode(DisasContext
*s
, int ilc
)
330 TCGv_i32 tmp
= tcg_const_i32(EXCP_SPEC
);
333 gen_helper_exception(tmp
);
334 tcg_temp_free_i32(tmp
);
335 s
->is_jmp
= DISAS_EXCP
;
338 #else /* CONFIG_USER_ONLY */
340 static void debug_print_inst(DisasContext
*s
, int ilc
)
342 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
347 inst
= ld_code2(s
->pc
);
350 inst
= ld_code4(s
->pc
);
353 inst
= ld_code6(s
->pc
);
357 fprintf(stderr
, "Illegal instruction [%d at %016" PRIx64
"]: 0x%016"
358 PRIx64
"\n", ilc
, s
->pc
, inst
);
362 static void gen_program_exception(DisasContext
*s
, int ilc
, int code
)
366 debug_print_inst(s
, ilc
);
368 /* remember what pgm exeption this was */
369 tmp
= tcg_const_i32(code
);
370 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUState
, int_pgm_code
));
371 tcg_temp_free_i32(tmp
);
373 tmp
= tcg_const_i32(ilc
);
374 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUState
, int_pgm_ilc
));
375 tcg_temp_free_i32(tmp
);
377 /* advance past instruction */
384 /* trigger exception */
385 tmp
= tcg_const_i32(EXCP_PGM
);
386 gen_helper_exception(tmp
);
387 tcg_temp_free_i32(tmp
);
390 s
->is_jmp
= DISAS_EXCP
;
394 static void gen_illegal_opcode(DisasContext
*s
, int ilc
)
396 gen_program_exception(s
, ilc
, PGM_SPECIFICATION
);
399 static void gen_privileged_exception(DisasContext
*s
, int ilc
)
401 gen_program_exception(s
, ilc
, PGM_PRIVILEGED
);
404 static void check_privileged(DisasContext
*s
, int ilc
)
406 if (s
->tb
->flags
& (PSW_MASK_PSTATE
>> 32)) {
407 gen_privileged_exception(s
, ilc
);
411 #endif /* CONFIG_USER_ONLY */
413 static TCGv_i64
get_address(DisasContext
*s
, int x2
, int b2
, int d2
)
417 /* 31-bitify the immediate part; register contents are dealt with below */
418 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
424 tmp
= tcg_const_i64(d2
);
425 tcg_gen_add_i64(tmp
, tmp
, regs
[x2
]);
430 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
434 tmp
= tcg_const_i64(d2
);
435 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
440 tmp
= tcg_const_i64(d2
);
443 /* 31-bit mode mask if there are values loaded from registers */
444 if (!(s
->tb
->flags
& FLAG_MASK_64
) && (x2
|| b2
)) {
445 tcg_gen_andi_i64(tmp
, tmp
, 0x7fffffffUL
);
451 static void gen_op_movi_cc(DisasContext
*s
, uint32_t val
)
453 s
->cc_op
= CC_OP_CONST0
+ val
;
456 static void gen_op_update1_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 dst
)
458 tcg_gen_discard_i64(cc_src
);
459 tcg_gen_mov_i64(cc_dst
, dst
);
460 tcg_gen_discard_i64(cc_vr
);
464 static void gen_op_update1_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 dst
)
466 tcg_gen_discard_i64(cc_src
);
467 tcg_gen_extu_i32_i64(cc_dst
, dst
);
468 tcg_gen_discard_i64(cc_vr
);
472 static void gen_op_update2_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
475 tcg_gen_mov_i64(cc_src
, src
);
476 tcg_gen_mov_i64(cc_dst
, dst
);
477 tcg_gen_discard_i64(cc_vr
);
481 static void gen_op_update2_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
484 tcg_gen_extu_i32_i64(cc_src
, src
);
485 tcg_gen_extu_i32_i64(cc_dst
, dst
);
486 tcg_gen_discard_i64(cc_vr
);
490 static void gen_op_update3_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
491 TCGv_i64 dst
, TCGv_i64 vr
)
493 tcg_gen_mov_i64(cc_src
, src
);
494 tcg_gen_mov_i64(cc_dst
, dst
);
495 tcg_gen_mov_i64(cc_vr
, vr
);
499 static void gen_op_update3_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
500 TCGv_i32 dst
, TCGv_i32 vr
)
502 tcg_gen_extu_i32_i64(cc_src
, src
);
503 tcg_gen_extu_i32_i64(cc_dst
, dst
);
504 tcg_gen_extu_i32_i64(cc_vr
, vr
);
508 static inline void set_cc_nz_u32(DisasContext
*s
, TCGv_i32 val
)
510 gen_op_update1_cc_i32(s
, CC_OP_NZ
, val
);
513 static inline void set_cc_nz_u64(DisasContext
*s
, TCGv_i64 val
)
515 gen_op_update1_cc_i64(s
, CC_OP_NZ
, val
);
518 static inline void cmp_32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
521 gen_op_update2_cc_i32(s
, cond
, v1
, v2
);
524 static inline void cmp_64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
527 gen_op_update2_cc_i64(s
, cond
, v1
, v2
);
530 static inline void cmp_s32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
532 cmp_32(s
, v1
, v2
, CC_OP_LTGT_32
);
535 static inline void cmp_u32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
537 cmp_32(s
, v1
, v2
, CC_OP_LTUGTU_32
);
540 static inline void cmp_s32c(DisasContext
*s
, TCGv_i32 v1
, int32_t v2
)
542 /* XXX optimize for the constant? put it in s? */
543 TCGv_i32 tmp
= tcg_const_i32(v2
);
544 cmp_32(s
, v1
, tmp
, CC_OP_LTGT_32
);
545 tcg_temp_free_i32(tmp
);
548 static inline void cmp_u32c(DisasContext
*s
, TCGv_i32 v1
, uint32_t v2
)
550 TCGv_i32 tmp
= tcg_const_i32(v2
);
551 cmp_32(s
, v1
, tmp
, CC_OP_LTUGTU_32
);
552 tcg_temp_free_i32(tmp
);
555 static inline void cmp_s64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
557 cmp_64(s
, v1
, v2
, CC_OP_LTGT_64
);
560 static inline void cmp_u64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
562 cmp_64(s
, v1
, v2
, CC_OP_LTUGTU_64
);
565 static inline void cmp_s64c(DisasContext
*s
, TCGv_i64 v1
, int64_t v2
)
567 TCGv_i64 tmp
= tcg_const_i64(v2
);
569 tcg_temp_free_i64(tmp
);
572 static inline void cmp_u64c(DisasContext
*s
, TCGv_i64 v1
, uint64_t v2
)
574 TCGv_i64 tmp
= tcg_const_i64(v2
);
576 tcg_temp_free_i64(tmp
);
579 static inline void set_cc_s32(DisasContext
*s
, TCGv_i32 val
)
581 gen_op_update1_cc_i32(s
, CC_OP_LTGT0_32
, val
);
584 static inline void set_cc_s64(DisasContext
*s
, TCGv_i64 val
)
586 gen_op_update1_cc_i64(s
, CC_OP_LTGT0_64
, val
);
589 static void set_cc_add64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
591 gen_op_update3_cc_i64(s
, CC_OP_ADD_64
, v1
, v2
, vr
);
594 static void set_cc_addu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
597 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, v1
, v2
, vr
);
600 static void set_cc_sub64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
602 gen_op_update3_cc_i64(s
, CC_OP_SUB_64
, v1
, v2
, vr
);
605 static void set_cc_subu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
608 gen_op_update3_cc_i64(s
, CC_OP_SUBU_64
, v1
, v2
, vr
);
611 static void set_cc_abs64(DisasContext
*s
, TCGv_i64 v1
)
613 gen_op_update1_cc_i64(s
, CC_OP_ABS_64
, v1
);
616 static void set_cc_nabs64(DisasContext
*s
, TCGv_i64 v1
)
618 gen_op_update1_cc_i64(s
, CC_OP_NABS_64
, v1
);
621 static void set_cc_add32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
623 gen_op_update3_cc_i32(s
, CC_OP_ADD_32
, v1
, v2
, vr
);
626 static void set_cc_addu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
629 gen_op_update3_cc_i32(s
, CC_OP_ADDU_32
, v1
, v2
, vr
);
632 static void set_cc_sub32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
634 gen_op_update3_cc_i32(s
, CC_OP_SUB_32
, v1
, v2
, vr
);
637 static void set_cc_subu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
640 gen_op_update3_cc_i32(s
, CC_OP_SUBU_32
, v1
, v2
, vr
);
643 static void set_cc_abs32(DisasContext
*s
, TCGv_i32 v1
)
645 gen_op_update1_cc_i32(s
, CC_OP_ABS_32
, v1
);
648 static void set_cc_nabs32(DisasContext
*s
, TCGv_i32 v1
)
650 gen_op_update1_cc_i32(s
, CC_OP_NABS_32
, v1
);
653 static void set_cc_comp32(DisasContext
*s
, TCGv_i32 v1
)
655 gen_op_update1_cc_i32(s
, CC_OP_COMP_32
, v1
);
658 static void set_cc_comp64(DisasContext
*s
, TCGv_i64 v1
)
660 gen_op_update1_cc_i64(s
, CC_OP_COMP_64
, v1
);
663 static void set_cc_icm(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
665 gen_op_update2_cc_i32(s
, CC_OP_ICM
, v1
, v2
);
668 static void set_cc_cmp_f32_i64(DisasContext
*s
, TCGv_i32 v1
, TCGv_i64 v2
)
670 tcg_gen_extu_i32_i64(cc_src
, v1
);
671 tcg_gen_mov_i64(cc_dst
, v2
);
672 tcg_gen_discard_i64(cc_vr
);
673 s
->cc_op
= CC_OP_LTGT_F32
;
676 static void set_cc_nz_f32(DisasContext
*s
, TCGv_i32 v1
)
678 gen_op_update1_cc_i32(s
, CC_OP_NZ_F32
, v1
);
681 static inline void set_cc_nz_f64(DisasContext
*s
, TCGv_i64 v1
)
683 gen_op_update1_cc_i64(s
, CC_OP_NZ_F64
, v1
);
686 /* CC value is in env->cc_op */
687 static inline void set_cc_static(DisasContext
*s
)
689 tcg_gen_discard_i64(cc_src
);
690 tcg_gen_discard_i64(cc_dst
);
691 tcg_gen_discard_i64(cc_vr
);
692 s
->cc_op
= CC_OP_STATIC
;
695 static inline void gen_op_set_cc_op(DisasContext
*s
)
697 if (s
->cc_op
!= CC_OP_DYNAMIC
&& s
->cc_op
!= CC_OP_STATIC
) {
698 tcg_gen_movi_i32(cc_op
, s
->cc_op
);
702 static inline void gen_update_cc_op(DisasContext
*s
)
707 /* calculates cc into cc_op */
708 static void gen_op_calc_cc(DisasContext
*s
)
710 TCGv_i32 local_cc_op
= tcg_const_i32(s
->cc_op
);
711 TCGv_i64 dummy
= tcg_const_i64(0);
718 /* s->cc_op is the cc value */
719 tcg_gen_movi_i32(cc_op
, s
->cc_op
- CC_OP_CONST0
);
722 /* env->cc_op already is the cc value */
736 gen_helper_calc_cc(cc_op
, local_cc_op
, dummy
, cc_dst
, dummy
);
741 case CC_OP_LTUGTU_32
:
742 case CC_OP_LTUGTU_64
:
749 gen_helper_calc_cc(cc_op
, local_cc_op
, cc_src
, cc_dst
, dummy
);
760 gen_helper_calc_cc(cc_op
, local_cc_op
, cc_src
, cc_dst
, cc_vr
);
763 /* unknown operation - assume 3 arguments and cc_op in env */
764 gen_helper_calc_cc(cc_op
, cc_op
, cc_src
, cc_dst
, cc_vr
);
770 tcg_temp_free_i32(local_cc_op
);
772 /* We now have cc in cc_op as constant */
776 static inline void decode_rr(DisasContext
*s
, uint64_t insn
, int *r1
, int *r2
)
780 *r1
= (insn
>> 4) & 0xf;
784 static inline TCGv_i64
decode_rx(DisasContext
*s
, uint64_t insn
, int *r1
,
785 int *x2
, int *b2
, int *d2
)
789 *r1
= (insn
>> 20) & 0xf;
790 *x2
= (insn
>> 16) & 0xf;
791 *b2
= (insn
>> 12) & 0xf;
794 return get_address(s
, *x2
, *b2
, *d2
);
797 static inline void decode_rs(DisasContext
*s
, uint64_t insn
, int *r1
, int *r3
,
802 *r1
= (insn
>> 20) & 0xf;
804 *r3
= (insn
>> 16) & 0xf;
805 *b2
= (insn
>> 12) & 0xf;
809 static inline TCGv_i64
decode_si(DisasContext
*s
, uint64_t insn
, int *i2
,
814 *i2
= (insn
>> 16) & 0xff;
815 *b1
= (insn
>> 12) & 0xf;
818 return get_address(s
, 0, *b1
, *d1
);
821 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong pc
)
823 TranslationBlock
*tb
;
828 /* NOTE: we handle the case where the TB spans two pages here */
829 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
830 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
831 /* jump to same page: we can use a direct jump */
832 tcg_gen_goto_tb(tb_num
);
833 tcg_gen_movi_i64(psw_addr
, pc
);
834 tcg_gen_exit_tb((long)tb
+ tb_num
);
836 /* jump to another page: currently not optimized */
837 tcg_gen_movi_i64(psw_addr
, pc
);
842 static inline void account_noninline_branch(DisasContext
*s
, int cc_op
)
844 #ifdef DEBUG_INLINE_BRANCHES
845 inline_branch_miss
[cc_op
]++;
849 static inline void account_inline_branch(DisasContext
*s
)
851 #ifdef DEBUG_INLINE_BRANCHES
852 inline_branch_hit
[s
->cc_op
]++;
856 static void gen_jcc(DisasContext
*s
, uint32_t mask
, int skip
)
858 TCGv_i32 tmp
, tmp2
, r
;
864 tmp
= tcg_temp_new_i32();
865 tcg_gen_trunc_i64_i32(tmp
, cc_dst
);
867 case 0x8 | 0x4: /* dst <= 0 */
868 tcg_gen_brcondi_i32(TCG_COND_GT
, tmp
, 0, skip
);
870 case 0x8 | 0x2: /* dst >= 0 */
871 tcg_gen_brcondi_i32(TCG_COND_LT
, tmp
, 0, skip
);
873 case 0x8: /* dst == 0 */
874 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
876 case 0x7: /* dst != 0 */
877 case 0x6: /* dst != 0 */
878 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
880 case 0x4: /* dst < 0 */
881 tcg_gen_brcondi_i32(TCG_COND_GE
, tmp
, 0, skip
);
883 case 0x2: /* dst > 0 */
884 tcg_gen_brcondi_i32(TCG_COND_LE
, tmp
, 0, skip
);
887 tcg_temp_free_i32(tmp
);
890 account_inline_branch(s
);
891 tcg_temp_free_i32(tmp
);
895 case 0x8 | 0x4: /* dst <= 0 */
896 tcg_gen_brcondi_i64(TCG_COND_GT
, cc_dst
, 0, skip
);
898 case 0x8 | 0x2: /* dst >= 0 */
899 tcg_gen_brcondi_i64(TCG_COND_LT
, cc_dst
, 0, skip
);
901 case 0x8: /* dst == 0 */
902 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
904 case 0x7: /* dst != 0 */
905 case 0x6: /* dst != 0 */
906 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
908 case 0x4: /* dst < 0 */
909 tcg_gen_brcondi_i64(TCG_COND_GE
, cc_dst
, 0, skip
);
911 case 0x2: /* dst > 0 */
912 tcg_gen_brcondi_i64(TCG_COND_LE
, cc_dst
, 0, skip
);
917 account_inline_branch(s
);
920 tmp
= tcg_temp_new_i32();
921 tmp2
= tcg_temp_new_i32();
922 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
923 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
925 case 0x8 | 0x4: /* src <= dst */
926 tcg_gen_brcond_i32(TCG_COND_GT
, tmp
, tmp2
, skip
);
928 case 0x8 | 0x2: /* src >= dst */
929 tcg_gen_brcond_i32(TCG_COND_LT
, tmp
, tmp2
, skip
);
931 case 0x8: /* src == dst */
932 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
934 case 0x7: /* src != dst */
935 case 0x6: /* src != dst */
936 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
938 case 0x4: /* src < dst */
939 tcg_gen_brcond_i32(TCG_COND_GE
, tmp
, tmp2
, skip
);
941 case 0x2: /* src > dst */
942 tcg_gen_brcond_i32(TCG_COND_LE
, tmp
, tmp2
, skip
);
945 tcg_temp_free_i32(tmp
);
946 tcg_temp_free_i32(tmp2
);
949 account_inline_branch(s
);
950 tcg_temp_free_i32(tmp
);
951 tcg_temp_free_i32(tmp2
);
955 case 0x8 | 0x4: /* src <= dst */
956 tcg_gen_brcond_i64(TCG_COND_GT
, cc_src
, cc_dst
, skip
);
958 case 0x8 | 0x2: /* src >= dst */
959 tcg_gen_brcond_i64(TCG_COND_LT
, cc_src
, cc_dst
, skip
);
961 case 0x8: /* src == dst */
962 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
964 case 0x7: /* src != dst */
965 case 0x6: /* src != dst */
966 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
968 case 0x4: /* src < dst */
969 tcg_gen_brcond_i64(TCG_COND_GE
, cc_src
, cc_dst
, skip
);
971 case 0x2: /* src > dst */
972 tcg_gen_brcond_i64(TCG_COND_LE
, cc_src
, cc_dst
, skip
);
977 account_inline_branch(s
);
979 case CC_OP_LTUGTU_32
:
980 tmp
= tcg_temp_new_i32();
981 tmp2
= tcg_temp_new_i32();
982 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
983 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
985 case 0x8 | 0x4: /* src <= dst */
986 tcg_gen_brcond_i32(TCG_COND_GTU
, tmp
, tmp2
, skip
);
988 case 0x8 | 0x2: /* src >= dst */
989 tcg_gen_brcond_i32(TCG_COND_LTU
, tmp
, tmp2
, skip
);
991 case 0x8: /* src == dst */
992 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
994 case 0x7: /* src != dst */
995 case 0x6: /* src != dst */
996 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
998 case 0x4: /* src < dst */
999 tcg_gen_brcond_i32(TCG_COND_GEU
, tmp
, tmp2
, skip
);
1001 case 0x2: /* src > dst */
1002 tcg_gen_brcond_i32(TCG_COND_LEU
, tmp
, tmp2
, skip
);
1005 tcg_temp_free_i32(tmp
);
1006 tcg_temp_free_i32(tmp2
);
1009 account_inline_branch(s
);
1010 tcg_temp_free_i32(tmp
);
1011 tcg_temp_free_i32(tmp2
);
1013 case CC_OP_LTUGTU_64
:
1015 case 0x8 | 0x4: /* src <= dst */
1016 tcg_gen_brcond_i64(TCG_COND_GTU
, cc_src
, cc_dst
, skip
);
1018 case 0x8 | 0x2: /* src >= dst */
1019 tcg_gen_brcond_i64(TCG_COND_LTU
, cc_src
, cc_dst
, skip
);
1021 case 0x8: /* src == dst */
1022 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
1024 case 0x7: /* src != dst */
1025 case 0x6: /* src != dst */
1026 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
1028 case 0x4: /* src < dst */
1029 tcg_gen_brcond_i64(TCG_COND_GEU
, cc_src
, cc_dst
, skip
);
1031 case 0x2: /* src > dst */
1032 tcg_gen_brcond_i64(TCG_COND_LEU
, cc_src
, cc_dst
, skip
);
1037 account_inline_branch(s
);
1041 /* dst == 0 || dst != 0 */
1043 case 0x8 | 0x4 | 0x2:
1044 case 0x8 | 0x4 | 0x2 | 0x1:
1045 case 0x8 | 0x4 | 0x1:
1050 case 0x8 | 0x2 | 0x1:
1052 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1057 case 0x4 | 0x2 | 0x1:
1059 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1064 account_inline_branch(s
);
1067 tmp
= tcg_temp_new_i32();
1068 tmp2
= tcg_temp_new_i32();
1070 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
1071 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
1072 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
1074 case 0x8: /* val & mask == 0 */
1075 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1077 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1078 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1081 tcg_temp_free_i32(tmp
);
1082 tcg_temp_free_i32(tmp2
);
1085 tcg_temp_free_i32(tmp
);
1086 tcg_temp_free_i32(tmp2
);
1087 account_inline_branch(s
);
1090 tmp64
= tcg_temp_new_i64();
1092 tcg_gen_and_i64(tmp64
, cc_src
, cc_dst
);
1094 case 0x8: /* val & mask == 0 */
1095 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp64
, 0, skip
);
1097 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1098 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp64
, 0, skip
);
1101 tcg_temp_free_i64(tmp64
);
1104 tcg_temp_free_i64(tmp64
);
1105 account_inline_branch(s
);
1109 case 0x8: /* val == 0 */
1110 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1112 case 0x4 | 0x2 | 0x1: /* val != 0 */
1113 case 0x4 | 0x2: /* val != 0 */
1114 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1119 account_inline_branch(s
);
1122 old_cc_op
= s
->cc_op
;
1123 goto do_dynamic_nocccalc
;
1127 old_cc_op
= s
->cc_op
;
1128 /* calculate cc value */
1131 do_dynamic_nocccalc
:
1132 /* jump based on cc */
1133 account_noninline_branch(s
, old_cc_op
);
1136 case 0x8 | 0x4 | 0x2 | 0x1:
1139 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1140 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 3, skip
);
1142 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1143 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 2, skip
);
1145 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1146 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 1, skip
);
1148 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 */
1149 tmp
= tcg_temp_new_i32();
1150 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1151 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1152 tcg_temp_free_i32(tmp
);
1154 case 0x8 | 0x4: /* cc < 2 */
1155 tcg_gen_brcondi_i32(TCG_COND_GEU
, cc_op
, 2, skip
);
1157 case 0x8: /* cc == 0 */
1158 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 0, skip
);
1160 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1161 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 0, skip
);
1163 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 */
1164 tmp
= tcg_temp_new_i32();
1165 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1166 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1167 tcg_temp_free_i32(tmp
);
1169 case 0x4: /* cc == 1 */
1170 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 1, skip
);
1172 case 0x2 | 0x1: /* cc > 1 */
1173 tcg_gen_brcondi_i32(TCG_COND_LEU
, cc_op
, 1, skip
);
1175 case 0x2: /* cc == 2 */
1176 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 2, skip
);
1178 case 0x1: /* cc == 3 */
1179 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 3, skip
);
1181 default: /* cc is masked by something else */
1182 tmp
= tcg_const_i32(3);
1184 tcg_gen_sub_i32(tmp
, tmp
, cc_op
);
1185 tmp2
= tcg_const_i32(1);
1187 tcg_gen_shl_i32(tmp2
, tmp2
, tmp
);
1188 r
= tcg_const_i32(mask
);
1189 /* mask & (1 << (3 - cc)) */
1190 tcg_gen_and_i32(r
, r
, tmp2
);
1191 tcg_temp_free_i32(tmp
);
1192 tcg_temp_free_i32(tmp2
);
1194 tcg_gen_brcondi_i32(TCG_COND_EQ
, r
, 0, skip
);
1195 tcg_temp_free_i32(r
);
1202 static void gen_bcr(DisasContext
*s
, uint32_t mask
, TCGv_i64 target
,
1209 tcg_gen_mov_i64(psw_addr
, target
);
1211 } else if (mask
== 0) {
1212 /* ignore cc and never match */
1213 gen_goto_tb(s
, 0, offset
+ 2);
1215 TCGv_i64 new_addr
= tcg_temp_local_new_i64();
1217 tcg_gen_mov_i64(new_addr
, target
);
1218 skip
= gen_new_label();
1219 gen_jcc(s
, mask
, skip
);
1220 tcg_gen_mov_i64(psw_addr
, new_addr
);
1221 tcg_temp_free_i64(new_addr
);
1223 gen_set_label(skip
);
1224 tcg_temp_free_i64(new_addr
);
1225 gen_goto_tb(s
, 1, offset
+ 2);
1229 static void gen_brc(uint32_t mask
, DisasContext
*s
, int32_t offset
)
1235 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1236 } else if (mask
== 0) {
1237 /* ignore cc and never match */
1238 gen_goto_tb(s
, 0, s
->pc
+ 4);
1240 skip
= gen_new_label();
1241 gen_jcc(s
, mask
, skip
);
1242 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1243 gen_set_label(skip
);
1244 gen_goto_tb(s
, 1, s
->pc
+ 4);
1246 s
->is_jmp
= DISAS_TB_JUMP
;
1249 static void gen_op_mvc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1253 int l_memset
= gen_new_label();
1254 int l_out
= gen_new_label();
1255 TCGv_i64 dest
= tcg_temp_local_new_i64();
1256 TCGv_i64 src
= tcg_temp_local_new_i64();
1259 /* Find out if we should use the inline version of mvc */
1274 /* Fall back to helper */
1275 vl
= tcg_const_i32(l
);
1276 potential_page_fault(s
);
1277 gen_helper_mvc(vl
, s1
, s2
);
1278 tcg_temp_free_i32(vl
);
1282 tcg_gen_mov_i64(dest
, s1
);
1283 tcg_gen_mov_i64(src
, s2
);
1285 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
1286 /* XXX what if we overflow while moving? */
1287 tcg_gen_andi_i64(dest
, dest
, 0x7fffffffUL
);
1288 tcg_gen_andi_i64(src
, src
, 0x7fffffffUL
);
1291 tmp
= tcg_temp_new_i64();
1292 tcg_gen_addi_i64(tmp
, src
, 1);
1293 tcg_gen_brcond_i64(TCG_COND_EQ
, dest
, tmp
, l_memset
);
1294 tcg_temp_free_i64(tmp
);
1298 tmp
= tcg_temp_new_i64();
1300 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1301 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1303 tcg_temp_free_i64(tmp
);
1306 tmp
= tcg_temp_new_i64();
1308 tcg_gen_qemu_ld16u(tmp
, src
, get_mem_index(s
));
1309 tcg_gen_qemu_st16(tmp
, dest
, get_mem_index(s
));
1311 tcg_temp_free_i64(tmp
);
1314 tmp
= tcg_temp_new_i64();
1316 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1317 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1319 tcg_temp_free_i64(tmp
);
1322 tmp
= tcg_temp_new_i64();
1323 tmp2
= tcg_temp_new_i64();
1325 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1326 tcg_gen_addi_i64(src
, src
, 4);
1327 tcg_gen_qemu_ld8u(tmp2
, src
, get_mem_index(s
));
1328 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1329 tcg_gen_addi_i64(dest
, dest
, 4);
1330 tcg_gen_qemu_st8(tmp2
, dest
, get_mem_index(s
));
1332 tcg_temp_free_i64(tmp
);
1333 tcg_temp_free_i64(tmp2
);
1336 tmp
= tcg_temp_new_i64();
1338 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1339 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1341 tcg_temp_free_i64(tmp
);
1344 /* The inline version can become too big for too uneven numbers, only
1345 use it on known good lengths */
1346 tmp
= tcg_temp_new_i64();
1347 tmp2
= tcg_const_i64(8);
1348 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1349 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1350 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1352 tcg_gen_add_i64(src
, src
, tmp2
);
1353 tcg_gen_add_i64(dest
, dest
, tmp2
);
1356 tcg_temp_free_i64(tmp2
);
1357 tmp2
= tcg_const_i64(1);
1359 for (; i
<= l
; i
++) {
1360 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1361 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1363 tcg_gen_add_i64(src
, src
, tmp2
);
1364 tcg_gen_add_i64(dest
, dest
, tmp2
);
1367 tcg_temp_free_i64(tmp2
);
1368 tcg_temp_free_i64(tmp
);
1374 gen_set_label(l_memset
);
1375 /* memset case (dest == (src + 1)) */
1377 tmp
= tcg_temp_new_i64();
1378 tmp2
= tcg_temp_new_i64();
1379 /* fill tmp with the byte */
1380 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1381 tcg_gen_shli_i64(tmp2
, tmp
, 8);
1382 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1383 tcg_gen_shli_i64(tmp2
, tmp
, 16);
1384 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1385 tcg_gen_shli_i64(tmp2
, tmp
, 32);
1386 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1387 tcg_temp_free_i64(tmp2
);
1389 tmp2
= tcg_const_i64(8);
1391 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1392 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1393 tcg_gen_addi_i64(dest
, dest
, 8);
1396 tcg_temp_free_i64(tmp2
);
1397 tmp2
= tcg_const_i64(1);
1399 for (; i
<= l
; i
++) {
1400 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1401 tcg_gen_addi_i64(dest
, dest
, 1);
1404 tcg_temp_free_i64(tmp2
);
1405 tcg_temp_free_i64(tmp
);
1407 gen_set_label(l_out
);
1409 tcg_temp_free(dest
);
1413 static void gen_op_clc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1419 /* check for simple 32bit or 64bit match */
1422 tmp
= tcg_temp_new_i64();
1423 tmp2
= tcg_temp_new_i64();
1425 tcg_gen_qemu_ld8u(tmp
, s1
, get_mem_index(s
));
1426 tcg_gen_qemu_ld8u(tmp2
, s2
, get_mem_index(s
));
1427 cmp_u64(s
, tmp
, tmp2
);
1429 tcg_temp_free_i64(tmp
);
1430 tcg_temp_free_i64(tmp2
);
1433 tmp
= tcg_temp_new_i64();
1434 tmp2
= tcg_temp_new_i64();
1436 tcg_gen_qemu_ld16u(tmp
, s1
, get_mem_index(s
));
1437 tcg_gen_qemu_ld16u(tmp2
, s2
, get_mem_index(s
));
1438 cmp_u64(s
, tmp
, tmp2
);
1440 tcg_temp_free_i64(tmp
);
1441 tcg_temp_free_i64(tmp2
);
1444 tmp
= tcg_temp_new_i64();
1445 tmp2
= tcg_temp_new_i64();
1447 tcg_gen_qemu_ld32u(tmp
, s1
, get_mem_index(s
));
1448 tcg_gen_qemu_ld32u(tmp2
, s2
, get_mem_index(s
));
1449 cmp_u64(s
, tmp
, tmp2
);
1451 tcg_temp_free_i64(tmp
);
1452 tcg_temp_free_i64(tmp2
);
1455 tmp
= tcg_temp_new_i64();
1456 tmp2
= tcg_temp_new_i64();
1458 tcg_gen_qemu_ld64(tmp
, s1
, get_mem_index(s
));
1459 tcg_gen_qemu_ld64(tmp2
, s2
, get_mem_index(s
));
1460 cmp_u64(s
, tmp
, tmp2
);
1462 tcg_temp_free_i64(tmp
);
1463 tcg_temp_free_i64(tmp2
);
1467 potential_page_fault(s
);
1468 vl
= tcg_const_i32(l
);
1469 gen_helper_clc(cc_op
, vl
, s1
, s2
);
1470 tcg_temp_free_i32(vl
);
1474 static void disas_e3(DisasContext
* s
, int op
, int r1
, int x2
, int b2
, int d2
)
1476 TCGv_i64 addr
, tmp
, tmp2
, tmp3
, tmp4
;
1477 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
1479 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1480 op
, r1
, x2
, b2
, d2
);
1481 addr
= get_address(s
, x2
, b2
, d2
);
1483 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1484 case 0x4: /* lg r1,d2(x2,b2) */
1485 tcg_gen_qemu_ld64(regs
[r1
], addr
, get_mem_index(s
));
1487 set_cc_s64(s
, regs
[r1
]);
1490 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1491 tmp2
= tcg_temp_new_i64();
1492 tmp32_1
= tcg_temp_new_i32();
1493 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1494 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1495 store_reg32(r1
, tmp32_1
);
1496 set_cc_s32(s
, tmp32_1
);
1497 tcg_temp_free_i64(tmp2
);
1498 tcg_temp_free_i32(tmp32_1
);
1500 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1501 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1502 tmp2
= tcg_temp_new_i64();
1504 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1506 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1508 tcg_gen_mul_i64(regs
[r1
], regs
[r1
], tmp2
);
1509 tcg_temp_free_i64(tmp2
);
1511 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1512 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1513 tmp2
= tcg_temp_new_i64();
1515 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1517 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1519 tmp4
= load_reg(r1
+ 1);
1520 tmp3
= tcg_temp_new_i64();
1521 tcg_gen_div_i64(tmp3
, tmp4
, tmp2
);
1522 store_reg(r1
+ 1, tmp3
);
1523 tcg_gen_rem_i64(tmp3
, tmp4
, tmp2
);
1524 store_reg(r1
, tmp3
);
1525 tcg_temp_free_i64(tmp2
);
1526 tcg_temp_free_i64(tmp3
);
1527 tcg_temp_free_i64(tmp4
);
1529 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1530 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1531 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1532 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1534 tmp2
= tcg_temp_new_i64();
1535 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1536 } else if (op
== 0x18) {
1537 tmp2
= tcg_temp_new_i64();
1538 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1540 tmp2
= tcg_temp_new_i64();
1541 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1543 tmp4
= load_reg(r1
);
1544 tmp3
= tcg_temp_new_i64();
1545 tcg_gen_add_i64(tmp3
, tmp4
, tmp2
);
1546 store_reg(r1
, tmp3
);
1550 set_cc_add64(s
, tmp4
, tmp2
, tmp3
);
1554 set_cc_addu64(s
, tmp4
, tmp2
, tmp3
);
1559 tcg_temp_free_i64(tmp2
);
1560 tcg_temp_free_i64(tmp3
);
1561 tcg_temp_free_i64(tmp4
);
1563 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1564 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1565 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1566 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1567 tmp2
= tcg_temp_new_i64();
1569 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1570 } else if (op
== 0x1b) {
1571 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1573 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1575 tmp4
= load_reg(r1
);
1576 tmp3
= tcg_temp_new_i64();
1577 tcg_gen_sub_i64(tmp3
, tmp4
, tmp2
);
1578 store_reg(r1
, tmp3
);
1582 set_cc_sub64(s
, tmp4
, tmp2
, tmp3
);
1586 set_cc_subu64(s
, tmp4
, tmp2
, tmp3
);
1591 tcg_temp_free_i64(tmp2
);
1592 tcg_temp_free_i64(tmp3
);
1593 tcg_temp_free_i64(tmp4
);
1595 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1596 tmp2
= tcg_temp_new_i64();
1597 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1598 tcg_gen_bswap64_i64(tmp2
, tmp2
);
1599 store_reg(r1
, tmp2
);
1600 tcg_temp_free_i64(tmp2
);
1602 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1603 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1604 tmp2
= tcg_temp_new_i64();
1605 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1607 tcg_gen_ext32s_i64(tmp2
, tmp2
);
1609 store_reg(r1
, tmp2
);
1610 tcg_temp_free_i64(tmp2
);
1612 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1613 tmp2
= tcg_temp_new_i64();
1614 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1615 store_reg(r1
, tmp2
);
1616 tcg_temp_free_i64(tmp2
);
1618 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1619 tmp2
= tcg_temp_new_i64();
1620 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1621 tcg_gen_andi_i64(tmp2
, tmp2
, 0x7fffffffULL
);
1622 store_reg(r1
, tmp2
);
1623 tcg_temp_free_i64(tmp2
);
1625 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1626 tmp2
= tcg_temp_new_i64();
1627 tmp32_1
= tcg_temp_new_i32();
1628 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1629 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1630 tcg_temp_free_i64(tmp2
);
1631 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1632 store_reg32(r1
, tmp32_1
);
1633 tcg_temp_free_i32(tmp32_1
);
1635 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1636 tmp2
= tcg_temp_new_i64();
1637 tmp32_1
= tcg_temp_new_i32();
1638 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1639 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1640 tcg_temp_free_i64(tmp2
);
1641 tcg_gen_bswap16_i32(tmp32_1
, tmp32_1
);
1642 store_reg16(r1
, tmp32_1
);
1643 tcg_temp_free_i32(tmp32_1
);
1645 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1646 case 0x21: /* CLG R1,D2(X2,B2) */
1647 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1648 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1649 tmp2
= tcg_temp_new_i64();
1653 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1656 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1659 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1667 cmp_s64(s
, regs
[r1
], tmp2
);
1671 cmp_u64(s
, regs
[r1
], tmp2
);
1676 tcg_temp_free_i64(tmp2
);
1678 case 0x24: /* stg r1, d2(x2,b2) */
1679 tcg_gen_qemu_st64(regs
[r1
], addr
, get_mem_index(s
));
1681 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1682 tmp32_1
= load_reg32(r1
);
1683 tmp2
= tcg_temp_new_i64();
1684 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1685 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1686 tcg_temp_free_i32(tmp32_1
);
1687 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1688 tcg_temp_free_i64(tmp2
);
1690 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1691 tmp32_1
= load_reg32(r1
);
1692 tmp2
= tcg_temp_new_i64();
1693 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1694 tcg_temp_free_i32(tmp32_1
);
1695 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1696 tcg_temp_free_i64(tmp2
);
1698 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1699 tmp32_1
= load_reg32(r1
);
1700 tmp32_2
= tcg_temp_new_i32();
1701 tmp2
= tcg_temp_new_i64();
1702 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1703 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1704 tcg_temp_free_i64(tmp2
);
1705 tcg_gen_xor_i32(tmp32_2
, tmp32_1
, tmp32_2
);
1706 store_reg32(r1
, tmp32_2
);
1707 set_cc_nz_u32(s
, tmp32_2
);
1708 tcg_temp_free_i32(tmp32_1
);
1709 tcg_temp_free_i32(tmp32_2
);
1711 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1712 tmp3
= tcg_temp_new_i64();
1713 tcg_gen_qemu_ld32u(tmp3
, addr
, get_mem_index(s
));
1714 store_reg32_i64(r1
, tmp3
);
1715 tcg_temp_free_i64(tmp3
);
1717 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1718 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1719 tmp32_1
= load_reg32(r1
);
1720 tmp32_2
= tcg_temp_new_i32();
1721 tmp32_3
= tcg_temp_new_i32();
1722 tmp2
= tcg_temp_new_i64();
1723 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1724 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1725 tcg_temp_free_i64(tmp2
);
1728 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1731 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1736 store_reg32(r1
, tmp32_3
);
1739 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1742 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1747 tcg_temp_free_i32(tmp32_1
);
1748 tcg_temp_free_i32(tmp32_2
);
1749 tcg_temp_free_i32(tmp32_3
);
1751 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1752 store_reg(r1
, addr
);
1754 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1755 tmp32_1
= load_reg32(r1
);
1756 tmp2
= tcg_temp_new_i64();
1757 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
1758 tcg_gen_qemu_st8(tmp2
, addr
, get_mem_index(s
));
1759 tcg_temp_free_i32(tmp32_1
);
1760 tcg_temp_free_i64(tmp2
);
1762 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1763 tmp3
= tcg_temp_new_i64();
1764 tcg_gen_qemu_ld8u(tmp3
, addr
, get_mem_index(s
));
1765 store_reg8(r1
, tmp3
);
1766 tcg_temp_free_i64(tmp3
);
1768 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1769 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1770 tmp2
= tcg_temp_new_i64();
1771 tcg_gen_qemu_ld8s(tmp2
, addr
, get_mem_index(s
));
1774 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1775 store_reg32_i64(r1
, tmp2
);
1778 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1779 store_reg(r1
, tmp2
);
1784 tcg_temp_free_i64(tmp2
);
1786 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1787 tmp2
= tcg_temp_new_i64();
1788 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1789 store_reg32_i64(r1
, tmp2
);
1790 tcg_temp_free_i64(tmp2
);
1792 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1793 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1794 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1795 tmp3
= tcg_temp_new_i64();
1796 tcg_gen_qemu_ld64(tmp3
, addr
, get_mem_index(s
));
1799 tcg_gen_and_i64(regs
[r1
], regs
[r1
], tmp3
);
1802 tcg_gen_or_i64(regs
[r1
], regs
[r1
], tmp3
);
1805 tcg_gen_xor_i64(regs
[r1
], regs
[r1
], tmp3
);
1810 set_cc_nz_u64(s
, regs
[r1
]);
1811 tcg_temp_free_i64(tmp3
);
1813 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1814 tmp2
= tcg_temp_new_i64();
1815 tmp32_1
= tcg_const_i32(r1
);
1816 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1817 gen_helper_mlg(tmp32_1
, tmp2
);
1818 tcg_temp_free_i64(tmp2
);
1819 tcg_temp_free_i32(tmp32_1
);
1821 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1822 tmp2
= tcg_temp_new_i64();
1823 tmp32_1
= tcg_const_i32(r1
);
1824 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1825 gen_helper_dlg(tmp32_1
, tmp2
);
1826 tcg_temp_free_i64(tmp2
);
1827 tcg_temp_free_i32(tmp32_1
);
1829 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1830 tmp2
= tcg_temp_new_i64();
1831 tmp3
= tcg_temp_new_i64();
1832 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1833 /* XXX possible optimization point */
1835 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
1836 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
1837 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
1838 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
1839 tcg_gen_add_i64(tmp3
, regs
[r1
], tmp3
);
1840 store_reg(r1
, tmp3
);
1841 set_cc_addu64(s
, regs
[r1
], tmp2
, tmp3
);
1842 tcg_temp_free_i64(tmp2
);
1843 tcg_temp_free_i64(tmp3
);
1845 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1846 tmp2
= tcg_temp_new_i64();
1847 tmp32_1
= tcg_const_i32(r1
);
1848 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1849 /* XXX possible optimization point */
1851 gen_helper_slbg(cc_op
, cc_op
, tmp32_1
, regs
[r1
], tmp2
);
1853 tcg_temp_free_i64(tmp2
);
1854 tcg_temp_free_i32(tmp32_1
);
1856 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1857 tcg_gen_qemu_ld8u(regs
[r1
], addr
, get_mem_index(s
));
1859 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1860 tcg_gen_qemu_ld16u(regs
[r1
], addr
, get_mem_index(s
));
1862 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1863 tmp2
= tcg_temp_new_i64();
1864 tcg_gen_qemu_ld8u(tmp2
, addr
, get_mem_index(s
));
1865 store_reg32_i64(r1
, tmp2
);
1866 tcg_temp_free_i64(tmp2
);
1868 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1869 tmp2
= tcg_temp_new_i64();
1870 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1871 store_reg32_i64(r1
, tmp2
);
1872 tcg_temp_free_i64(tmp2
);
1874 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1875 tmp2
= tcg_temp_new_i64();
1876 tmp3
= load_reg((r1
+ 1) & 15);
1877 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1878 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1879 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
1880 store_reg32_i64((r1
+ 1) & 15, tmp2
);
1881 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
1882 store_reg32_i64(r1
, tmp2
);
1883 tcg_temp_free_i64(tmp2
);
1884 tcg_temp_free_i64(tmp3
);
1886 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1887 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1888 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1890 tmp2
= tcg_temp_new_i64();
1891 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1892 tmp3
= load_reg((r1
+ 1) & 15);
1893 tcg_gen_ext32u_i64(tmp2
, tmp2
);
1894 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1895 tcg_gen_shli_i64(tmp
, tmp
, 32);
1896 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
1898 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
1899 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
1900 store_reg32_i64((r1
+ 1) & 15, tmp
);
1901 store_reg32_i64(r1
, tmp3
);
1902 tcg_temp_free_i64(tmp
);
1903 tcg_temp_free_i64(tmp2
);
1904 tcg_temp_free_i64(tmp3
);
1906 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1907 tmp2
= tcg_temp_new_i64();
1908 tmp32_1
= load_reg32(r1
);
1909 tmp32_2
= tcg_temp_new_i32();
1910 tmp32_3
= tcg_temp_new_i32();
1911 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1912 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1913 /* XXX possible optimization point */
1915 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
1916 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1917 store_reg32(r1
, tmp32_3
);
1918 tcg_temp_free_i64(tmp2
);
1919 tcg_temp_free_i32(tmp32_1
);
1920 tcg_temp_free_i32(tmp32_2
);
1921 tcg_temp_free_i32(tmp32_3
);
1923 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1924 tmp2
= tcg_temp_new_i64();
1925 tmp32_1
= tcg_const_i32(r1
);
1926 tmp32_2
= tcg_temp_new_i32();
1927 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1928 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1929 /* XXX possible optimization point */
1931 gen_helper_slb(cc_op
, cc_op
, tmp32_1
, tmp32_2
);
1933 tcg_temp_free_i64(tmp2
);
1934 tcg_temp_free_i32(tmp32_1
);
1935 tcg_temp_free_i32(tmp32_2
);
1938 LOG_DISAS("illegal e3 operation 0x%x\n", op
);
1939 gen_illegal_opcode(s
, 3);
1942 tcg_temp_free_i64(addr
);
1945 #ifndef CONFIG_USER_ONLY
1946 static void disas_e5(DisasContext
* s
, uint64_t insn
)
1949 int op
= (insn
>> 32) & 0xff;
1951 tmp
= get_address(s
, 0, (insn
>> 28) & 0xf, (insn
>> 16) & 0xfff);
1952 tmp2
= get_address(s
, 0, (insn
>> 12) & 0xf, insn
& 0xfff);
1954 LOG_DISAS("disas_e5: insn %" PRIx64
"\n", insn
);
1956 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1957 /* Test Protection */
1958 potential_page_fault(s
);
1959 gen_helper_tprot(cc_op
, tmp
, tmp2
);
1963 LOG_DISAS("illegal e5 operation 0x%x\n", op
);
1964 gen_illegal_opcode(s
, 3);
1968 tcg_temp_free_i64(tmp
);
1969 tcg_temp_free_i64(tmp2
);
1973 static void disas_eb(DisasContext
*s
, int op
, int r1
, int r3
, int b2
, int d2
)
1975 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
1976 TCGv_i32 tmp32_1
, tmp32_2
;
1980 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1981 op
, r1
, r3
, b2
, d2
);
1983 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1984 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1985 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1986 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1987 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1989 tmp
= get_address(s
, 0, b2
, d2
);
1990 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1992 tmp
= tcg_const_i64(d2
& 0x3f);
1996 tcg_gen_shr_i64(regs
[r1
], regs
[r3
], tmp
);
1999 tcg_gen_shl_i64(regs
[r1
], regs
[r3
], tmp
);
2002 tcg_gen_sar_i64(regs
[r1
], regs
[r3
], tmp
);
2005 tmp2
= tcg_temp_new_i64();
2006 tmp3
= tcg_temp_new_i64();
2007 gen_op_update2_cc_i64(s
, CC_OP_SLAG
, regs
[r3
], tmp
);
2008 tcg_gen_shl_i64(tmp2
, regs
[r3
], tmp
);
2009 /* override sign bit with source sign */
2010 tcg_gen_andi_i64(tmp2
, tmp2
, ~0x8000000000000000ULL
);
2011 tcg_gen_andi_i64(tmp3
, regs
[r3
], 0x8000000000000000ULL
);
2012 tcg_gen_or_i64(regs
[r1
], tmp2
, tmp3
);
2013 tcg_temp_free_i64(tmp2
);
2014 tcg_temp_free_i64(tmp3
);
2017 tcg_gen_rotl_i64(regs
[r1
], regs
[r3
], tmp
);
2024 set_cc_s64(s
, regs
[r1
]);
2026 tcg_temp_free_i64(tmp
);
2028 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2030 tmp
= get_address(s
, 0, b2
, d2
);
2031 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
2033 tmp
= tcg_const_i64(d2
& 0x3f);
2035 tmp32_1
= tcg_temp_new_i32();
2036 tmp32_2
= load_reg32(r3
);
2037 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2040 tcg_gen_rotl_i32(tmp32_1
, tmp32_2
, tmp32_1
);
2046 store_reg32(r1
, tmp32_1
);
2047 tcg_temp_free_i64(tmp
);
2048 tcg_temp_free_i32(tmp32_1
);
2049 tcg_temp_free_i32(tmp32_2
);
2051 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2052 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2055 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2056 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2059 /* Apparently, unrolling lmg/stmg of any size gains performance -
2060 even for very long ones... */
2061 tmp
= get_address(s
, 0, b2
, d2
);
2062 tmp3
= tcg_const_i64(stm_len
);
2063 tmp4
= tcg_const_i64(op
== 0x26 ? 32 : 4);
2064 for (i
= r1
;; i
= (i
+ 1) % 16) {
2067 tcg_gen_qemu_ld64(regs
[i
], tmp
, get_mem_index(s
));
2070 tmp2
= tcg_temp_new_i64();
2071 #if HOST_LONG_BITS == 32
2072 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2073 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs
[i
]), tmp2
);
2075 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2076 tcg_gen_shl_i64(tmp2
, tmp2
, tmp4
);
2077 tcg_gen_ext32u_i64(regs
[i
], regs
[i
]);
2078 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
2080 tcg_temp_free_i64(tmp2
);
2083 tcg_gen_qemu_st64(regs
[i
], tmp
, get_mem_index(s
));
2086 tmp2
= tcg_temp_new_i64();
2087 tcg_gen_shr_i64(tmp2
, regs
[i
], tmp4
);
2088 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2089 tcg_temp_free_i64(tmp2
);
2097 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
2099 tcg_temp_free_i64(tmp
);
2100 tcg_temp_free_i64(tmp3
);
2101 tcg_temp_free_i64(tmp4
);
2103 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2104 tmp
= get_address(s
, 0, b2
, d2
);
2105 tmp32_1
= tcg_const_i32(r1
);
2106 tmp32_2
= tcg_const_i32(r3
);
2107 potential_page_fault(s
);
2108 gen_helper_stcmh(tmp32_1
, tmp
, tmp32_2
);
2109 tcg_temp_free_i64(tmp
);
2110 tcg_temp_free_i32(tmp32_1
);
2111 tcg_temp_free_i32(tmp32_2
);
2113 #ifndef CONFIG_USER_ONLY
2114 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2116 check_privileged(s
, ilc
);
2117 tmp
= get_address(s
, 0, b2
, d2
);
2118 tmp32_1
= tcg_const_i32(r1
);
2119 tmp32_2
= tcg_const_i32(r3
);
2120 potential_page_fault(s
);
2121 gen_helper_lctlg(tmp32_1
, tmp
, tmp32_2
);
2122 tcg_temp_free_i64(tmp
);
2123 tcg_temp_free_i32(tmp32_1
);
2124 tcg_temp_free_i32(tmp32_2
);
2126 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2128 check_privileged(s
, ilc
);
2129 tmp
= get_address(s
, 0, b2
, d2
);
2130 tmp32_1
= tcg_const_i32(r1
);
2131 tmp32_2
= tcg_const_i32(r3
);
2132 potential_page_fault(s
);
2133 gen_helper_stctg(tmp32_1
, tmp
, tmp32_2
);
2134 tcg_temp_free_i64(tmp
);
2135 tcg_temp_free_i32(tmp32_1
);
2136 tcg_temp_free_i32(tmp32_2
);
2139 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2140 tmp
= get_address(s
, 0, b2
, d2
);
2141 tmp32_1
= tcg_const_i32(r1
);
2142 tmp32_2
= tcg_const_i32(r3
);
2143 potential_page_fault(s
);
2144 /* XXX rewrite in tcg */
2145 gen_helper_csg(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2147 tcg_temp_free_i64(tmp
);
2148 tcg_temp_free_i32(tmp32_1
);
2149 tcg_temp_free_i32(tmp32_2
);
2151 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2152 tmp
= get_address(s
, 0, b2
, d2
);
2153 tmp32_1
= tcg_const_i32(r1
);
2154 tmp32_2
= tcg_const_i32(r3
);
2155 potential_page_fault(s
);
2156 /* XXX rewrite in tcg */
2157 gen_helper_cdsg(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2159 tcg_temp_free_i64(tmp
);
2160 tcg_temp_free_i32(tmp32_1
);
2161 tcg_temp_free_i32(tmp32_2
);
2163 case 0x51: /* TMY D1(B1),I2 [SIY] */
2164 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2165 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2166 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
2167 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2168 that incurs less conversions */
2169 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
2170 tcg_temp_free_i64(tmp
);
2171 tcg_temp_free_i64(tmp2
);
2173 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2174 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2175 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2176 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
2177 tcg_temp_free_i64(tmp
);
2178 tcg_temp_free_i64(tmp2
);
2180 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2181 tmp3
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the 1st operand */
2182 tmp
= tcg_temp_new_i64();
2183 tmp32_1
= tcg_temp_new_i32();
2184 tcg_gen_qemu_ld8u(tmp
, tmp3
, get_mem_index(s
));
2185 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2186 cmp_u32c(s
, tmp32_1
, (r1
<< 4) | r3
);
2187 tcg_temp_free_i64(tmp
);
2188 tcg_temp_free_i64(tmp3
);
2189 tcg_temp_free_i32(tmp32_1
);
2191 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2192 tmp
= get_address(s
, 0, b2
, d2
);
2193 tmp32_1
= tcg_const_i32(r1
);
2194 tmp32_2
= tcg_const_i32(r3
);
2195 potential_page_fault(s
);
2196 /* XXX split CC calculation out */
2197 gen_helper_icmh(cc_op
, tmp32_1
, tmp
, tmp32_2
);
2199 tcg_temp_free_i64(tmp
);
2200 tcg_temp_free_i32(tmp32_1
);
2201 tcg_temp_free_i32(tmp32_2
);
2204 LOG_DISAS("illegal eb operation 0x%x\n", op
);
2205 gen_illegal_opcode(s
, ilc
);
2210 static void disas_ed(DisasContext
*s
, int op
, int r1
, int x2
, int b2
, int d2
,
2213 TCGv_i32 tmp_r1
, tmp32
;
2215 addr
= get_address(s
, x2
, b2
, d2
);
2216 tmp_r1
= tcg_const_i32(r1
);
2218 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2219 potential_page_fault(s
);
2220 gen_helper_lxdb(tmp_r1
, addr
);
2222 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2223 tmp
= tcg_temp_new_i64();
2224 tmp32
= load_freg32(r1
);
2225 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2226 set_cc_cmp_f32_i64(s
, tmp32
, tmp
);
2227 tcg_temp_free_i64(tmp
);
2228 tcg_temp_free_i32(tmp32
);
2230 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2231 tmp
= tcg_temp_new_i64();
2232 tmp32
= tcg_temp_new_i32();
2233 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2234 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2235 gen_helper_aeb(tmp_r1
, tmp32
);
2236 tcg_temp_free_i64(tmp
);
2237 tcg_temp_free_i32(tmp32
);
2239 tmp32
= load_freg32(r1
);
2240 set_cc_nz_f32(s
, tmp32
);
2241 tcg_temp_free_i32(tmp32
);
2243 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2244 tmp
= tcg_temp_new_i64();
2245 tmp32
= tcg_temp_new_i32();
2246 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2247 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2248 gen_helper_seb(tmp_r1
, tmp32
);
2249 tcg_temp_free_i64(tmp
);
2250 tcg_temp_free_i32(tmp32
);
2252 tmp32
= load_freg32(r1
);
2253 set_cc_nz_f32(s
, tmp32
);
2254 tcg_temp_free_i32(tmp32
);
2256 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2257 tmp
= tcg_temp_new_i64();
2258 tmp32
= tcg_temp_new_i32();
2259 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2260 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2261 gen_helper_deb(tmp_r1
, tmp32
);
2262 tcg_temp_free_i64(tmp
);
2263 tcg_temp_free_i32(tmp32
);
2265 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2266 potential_page_fault(s
);
2267 gen_helper_tceb(cc_op
, tmp_r1
, addr
);
2270 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2271 potential_page_fault(s
);
2272 gen_helper_tcdb(cc_op
, tmp_r1
, addr
);
2275 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2276 potential_page_fault(s
);
2277 gen_helper_tcxb(cc_op
, tmp_r1
, addr
);
2280 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2281 tmp
= tcg_temp_new_i64();
2282 tmp32
= tcg_temp_new_i32();
2283 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2284 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2285 gen_helper_meeb(tmp_r1
, tmp32
);
2286 tcg_temp_free_i64(tmp
);
2287 tcg_temp_free_i32(tmp32
);
2289 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2290 potential_page_fault(s
);
2291 gen_helper_cdb(cc_op
, tmp_r1
, addr
);
2294 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2295 potential_page_fault(s
);
2296 gen_helper_adb(cc_op
, tmp_r1
, addr
);
2299 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2300 potential_page_fault(s
);
2301 gen_helper_sdb(cc_op
, tmp_r1
, addr
);
2304 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2305 potential_page_fault(s
);
2306 gen_helper_mdb(tmp_r1
, addr
);
2308 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2309 potential_page_fault(s
);
2310 gen_helper_ddb(tmp_r1
, addr
);
2312 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2313 /* for RXF insns, r1 is R3 and r1b is R1 */
2314 tmp32
= tcg_const_i32(r1b
);
2315 potential_page_fault(s
);
2316 gen_helper_madb(tmp32
, addr
, tmp_r1
);
2317 tcg_temp_free_i32(tmp32
);
2320 LOG_DISAS("illegal ed operation 0x%x\n", op
);
2321 gen_illegal_opcode(s
, 3);
2324 tcg_temp_free_i32(tmp_r1
);
2325 tcg_temp_free_i64(addr
);
2328 static void disas_a5(DisasContext
*s
, int op
, int r1
, int i2
)
2332 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2334 case 0x0: /* IIHH R1,I2 [RI] */
2335 tmp
= tcg_const_i64(i2
);
2336 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 48, 16);
2337 tcg_temp_free_i64(tmp
);
2339 case 0x1: /* IIHL R1,I2 [RI] */
2340 tmp
= tcg_const_i64(i2
);
2341 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 32, 16);
2342 tcg_temp_free_i64(tmp
);
2344 case 0x2: /* IILH R1,I2 [RI] */
2345 tmp
= tcg_const_i64(i2
);
2346 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 16, 16);
2347 tcg_temp_free_i64(tmp
);
2349 case 0x3: /* IILL R1,I2 [RI] */
2350 tmp
= tcg_const_i64(i2
);
2351 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 0, 16);
2352 tcg_temp_free_i64(tmp
);
2354 case 0x4: /* NIHH R1,I2 [RI] */
2355 case 0x8: /* OIHH R1,I2 [RI] */
2357 tmp32
= tcg_temp_new_i32();
2360 tmp2
= tcg_const_i64((((uint64_t)i2
) << 48)
2361 | 0x0000ffffffffffffULL
);
2362 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2365 tmp2
= tcg_const_i64(((uint64_t)i2
) << 48);
2366 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2372 tcg_gen_shri_i64(tmp2
, tmp
, 48);
2373 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2374 set_cc_nz_u32(s
, tmp32
);
2375 tcg_temp_free_i64(tmp2
);
2376 tcg_temp_free_i32(tmp32
);
2377 tcg_temp_free_i64(tmp
);
2379 case 0x5: /* NIHL R1,I2 [RI] */
2380 case 0x9: /* OIHL R1,I2 [RI] */
2382 tmp32
= tcg_temp_new_i32();
2385 tmp2
= tcg_const_i64((((uint64_t)i2
) << 32)
2386 | 0xffff0000ffffffffULL
);
2387 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2390 tmp2
= tcg_const_i64(((uint64_t)i2
) << 32);
2391 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2397 tcg_gen_shri_i64(tmp2
, tmp
, 32);
2398 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2399 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2400 set_cc_nz_u32(s
, tmp32
);
2401 tcg_temp_free_i64(tmp2
);
2402 tcg_temp_free_i32(tmp32
);
2403 tcg_temp_free_i64(tmp
);
2405 case 0x6: /* NILH R1,I2 [RI] */
2406 case 0xa: /* OILH R1,I2 [RI] */
2408 tmp32
= tcg_temp_new_i32();
2411 tmp2
= tcg_const_i64((((uint64_t)i2
) << 16)
2412 | 0xffffffff0000ffffULL
);
2413 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2416 tmp2
= tcg_const_i64(((uint64_t)i2
) << 16);
2417 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2423 tcg_gen_shri_i64(tmp
, tmp
, 16);
2424 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2425 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2426 set_cc_nz_u32(s
, tmp32
);
2427 tcg_temp_free_i64(tmp2
);
2428 tcg_temp_free_i32(tmp32
);
2429 tcg_temp_free_i64(tmp
);
2431 case 0x7: /* NILL R1,I2 [RI] */
2432 case 0xb: /* OILL R1,I2 [RI] */
2434 tmp32
= tcg_temp_new_i32();
2437 tmp2
= tcg_const_i64(i2
| 0xffffffffffff0000ULL
);
2438 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2441 tmp2
= tcg_const_i64(i2
);
2442 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2448 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2449 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2450 set_cc_nz_u32(s
, tmp32
); /* signedness should not matter here */
2451 tcg_temp_free_i64(tmp2
);
2452 tcg_temp_free_i32(tmp32
);
2453 tcg_temp_free_i64(tmp
);
2455 case 0xc: /* LLIHH R1,I2 [RI] */
2456 tmp
= tcg_const_i64( ((uint64_t)i2
) << 48 );
2458 tcg_temp_free_i64(tmp
);
2460 case 0xd: /* LLIHL R1,I2 [RI] */
2461 tmp
= tcg_const_i64( ((uint64_t)i2
) << 32 );
2463 tcg_temp_free_i64(tmp
);
2465 case 0xe: /* LLILH R1,I2 [RI] */
2466 tmp
= tcg_const_i64( ((uint64_t)i2
) << 16 );
2468 tcg_temp_free_i64(tmp
);
2470 case 0xf: /* LLILL R1,I2 [RI] */
2471 tmp
= tcg_const_i64(i2
);
2473 tcg_temp_free_i64(tmp
);
2476 LOG_DISAS("illegal a5 operation 0x%x\n", op
);
2477 gen_illegal_opcode(s
, 2);
2482 static void disas_a7(DisasContext
*s
, int op
, int r1
, int i2
)
2485 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2488 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2490 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2491 case 0x1: /* TMLL or TML R1,I2 [RI] */
2492 case 0x2: /* TMHH R1,I2 [RI] */
2493 case 0x3: /* TMHL R1,I2 [RI] */
2495 tmp2
= tcg_const_i64((uint16_t)i2
);
2498 tcg_gen_shri_i64(tmp
, tmp
, 16);
2503 tcg_gen_shri_i64(tmp
, tmp
, 48);
2506 tcg_gen_shri_i64(tmp
, tmp
, 32);
2509 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
2510 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_64
);
2511 tcg_temp_free_i64(tmp
);
2512 tcg_temp_free_i64(tmp2
);
2514 case 0x4: /* brc m1, i2 */
2515 gen_brc(r1
, s
, i2
* 2LL);
2517 case 0x5: /* BRAS R1,I2 [RI] */
2518 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
2520 tcg_temp_free_i64(tmp
);
2521 gen_goto_tb(s
, 0, s
->pc
+ i2
* 2LL);
2522 s
->is_jmp
= DISAS_TB_JUMP
;
2524 case 0x6: /* BRCT R1,I2 [RI] */
2525 tmp32_1
= load_reg32(r1
);
2526 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
2527 store_reg32(r1
, tmp32_1
);
2528 gen_update_cc_op(s
);
2529 l1
= gen_new_label();
2530 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
2531 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2533 gen_goto_tb(s
, 1, s
->pc
+ 4);
2534 s
->is_jmp
= DISAS_TB_JUMP
;
2535 tcg_temp_free_i32(tmp32_1
);
2537 case 0x7: /* BRCTG R1,I2 [RI] */
2539 tcg_gen_subi_i64(tmp
, tmp
, 1);
2541 gen_update_cc_op(s
);
2542 l1
= gen_new_label();
2543 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
2544 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2546 gen_goto_tb(s
, 1, s
->pc
+ 4);
2547 s
->is_jmp
= DISAS_TB_JUMP
;
2548 tcg_temp_free_i64(tmp
);
2550 case 0x8: /* lhi r1, i2 */
2551 tmp32_1
= tcg_const_i32(i2
);
2552 store_reg32(r1
, tmp32_1
);
2553 tcg_temp_free_i32(tmp32_1
);
2555 case 0x9: /* lghi r1, i2 */
2556 tmp
= tcg_const_i64(i2
);
2558 tcg_temp_free_i64(tmp
);
2560 case 0xa: /* AHI R1,I2 [RI] */
2561 tmp32_1
= load_reg32(r1
);
2562 tmp32_2
= tcg_temp_new_i32();
2563 tmp32_3
= tcg_const_i32(i2
);
2566 tcg_gen_subi_i32(tmp32_2
, tmp32_1
, -i2
);
2568 tcg_gen_add_i32(tmp32_2
, tmp32_1
, tmp32_3
);
2571 store_reg32(r1
, tmp32_2
);
2572 set_cc_add32(s
, tmp32_1
, tmp32_3
, tmp32_2
);
2573 tcg_temp_free_i32(tmp32_1
);
2574 tcg_temp_free_i32(tmp32_2
);
2575 tcg_temp_free_i32(tmp32_3
);
2577 case 0xb: /* aghi r1, i2 */
2579 tmp2
= tcg_const_i64(i2
);
2582 tcg_gen_subi_i64(regs
[r1
], tmp
, -i2
);
2584 tcg_gen_add_i64(regs
[r1
], tmp
, tmp2
);
2586 set_cc_add64(s
, tmp
, tmp2
, regs
[r1
]);
2587 tcg_temp_free_i64(tmp
);
2588 tcg_temp_free_i64(tmp2
);
2590 case 0xc: /* MHI R1,I2 [RI] */
2591 tmp32_1
= load_reg32(r1
);
2592 tcg_gen_muli_i32(tmp32_1
, tmp32_1
, i2
);
2593 store_reg32(r1
, tmp32_1
);
2594 tcg_temp_free_i32(tmp32_1
);
2596 case 0xd: /* MGHI R1,I2 [RI] */
2598 tcg_gen_muli_i64(tmp
, tmp
, i2
);
2600 tcg_temp_free_i64(tmp
);
2602 case 0xe: /* CHI R1,I2 [RI] */
2603 tmp32_1
= load_reg32(r1
);
2604 cmp_s32c(s
, tmp32_1
, i2
);
2605 tcg_temp_free_i32(tmp32_1
);
2607 case 0xf: /* CGHI R1,I2 [RI] */
2609 cmp_s64c(s
, tmp
, i2
);
2610 tcg_temp_free_i64(tmp
);
2613 LOG_DISAS("illegal a7 operation 0x%x\n", op
);
2614 gen_illegal_opcode(s
, 2);
2619 static void disas_b2(DisasContext
*s
, int op
, uint32_t insn
)
2621 TCGv_i64 tmp
, tmp2
, tmp3
;
2622 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2625 #ifndef CONFIG_USER_ONLY
2629 r1
= (insn
>> 4) & 0xf;
2632 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2635 case 0x22: /* IPM R1 [RRE] */
2636 tmp32_1
= tcg_const_i32(r1
);
2638 gen_helper_ipm(cc_op
, tmp32_1
);
2639 tcg_temp_free_i32(tmp32_1
);
2641 case 0x41: /* CKSM R1,R2 [RRE] */
2642 tmp32_1
= tcg_const_i32(r1
);
2643 tmp32_2
= tcg_const_i32(r2
);
2644 potential_page_fault(s
);
2645 gen_helper_cksm(tmp32_1
, tmp32_2
);
2646 tcg_temp_free_i32(tmp32_1
);
2647 tcg_temp_free_i32(tmp32_2
);
2648 gen_op_movi_cc(s
, 0);
2650 case 0x4e: /* SAR R1,R2 [RRE] */
2651 tmp32_1
= load_reg32(r2
);
2652 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, aregs
[r1
]));
2653 tcg_temp_free_i32(tmp32_1
);
2655 case 0x4f: /* EAR R1,R2 [RRE] */
2656 tmp32_1
= tcg_temp_new_i32();
2657 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, aregs
[r2
]));
2658 store_reg32(r1
, tmp32_1
);
2659 tcg_temp_free_i32(tmp32_1
);
2661 case 0x52: /* MSR R1,R2 [RRE] */
2662 tmp32_1
= load_reg32(r1
);
2663 tmp32_2
= load_reg32(r2
);
2664 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
2665 store_reg32(r1
, tmp32_1
);
2666 tcg_temp_free_i32(tmp32_1
);
2667 tcg_temp_free_i32(tmp32_2
);
2669 case 0x54: /* MVPG R1,R2 [RRE] */
2671 tmp2
= load_reg(r1
);
2672 tmp3
= load_reg(r2
);
2673 potential_page_fault(s
);
2674 gen_helper_mvpg(tmp
, tmp2
, tmp3
);
2675 tcg_temp_free_i64(tmp
);
2676 tcg_temp_free_i64(tmp2
);
2677 tcg_temp_free_i64(tmp3
);
2678 /* XXX check CCO bit and set CC accordingly */
2679 gen_op_movi_cc(s
, 0);
2681 case 0x55: /* MVST R1,R2 [RRE] */
2682 tmp32_1
= load_reg32(0);
2683 tmp32_2
= tcg_const_i32(r1
);
2684 tmp32_3
= tcg_const_i32(r2
);
2685 potential_page_fault(s
);
2686 gen_helper_mvst(tmp32_1
, tmp32_2
, tmp32_3
);
2687 tcg_temp_free_i32(tmp32_1
);
2688 tcg_temp_free_i32(tmp32_2
);
2689 tcg_temp_free_i32(tmp32_3
);
2690 gen_op_movi_cc(s
, 1);
2692 case 0x5d: /* CLST R1,R2 [RRE] */
2693 tmp32_1
= load_reg32(0);
2694 tmp32_2
= tcg_const_i32(r1
);
2695 tmp32_3
= tcg_const_i32(r2
);
2696 potential_page_fault(s
);
2697 gen_helper_clst(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
2699 tcg_temp_free_i32(tmp32_1
);
2700 tcg_temp_free_i32(tmp32_2
);
2701 tcg_temp_free_i32(tmp32_3
);
2703 case 0x5e: /* SRST R1,R2 [RRE] */
2704 tmp32_1
= load_reg32(0);
2705 tmp32_2
= tcg_const_i32(r1
);
2706 tmp32_3
= tcg_const_i32(r2
);
2707 potential_page_fault(s
);
2708 gen_helper_srst(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
2710 tcg_temp_free_i32(tmp32_1
);
2711 tcg_temp_free_i32(tmp32_2
);
2712 tcg_temp_free_i32(tmp32_3
);
2715 #ifndef CONFIG_USER_ONLY
2716 case 0x02: /* STIDP D2(B2) [S] */
2718 check_privileged(s
, ilc
);
2719 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2720 tmp
= get_address(s
, 0, b2
, d2
);
2721 potential_page_fault(s
);
2722 gen_helper_stidp(tmp
);
2723 tcg_temp_free_i64(tmp
);
2725 case 0x04: /* SCK D2(B2) [S] */
2727 check_privileged(s
, ilc
);
2728 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2729 tmp
= get_address(s
, 0, b2
, d2
);
2730 potential_page_fault(s
);
2731 gen_helper_sck(cc_op
, tmp
);
2733 tcg_temp_free_i64(tmp
);
2735 case 0x05: /* STCK D2(B2) [S] */
2737 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2738 tmp
= get_address(s
, 0, b2
, d2
);
2739 potential_page_fault(s
);
2740 gen_helper_stck(cc_op
, tmp
);
2742 tcg_temp_free_i64(tmp
);
2744 case 0x06: /* SCKC D2(B2) [S] */
2745 /* Set Clock Comparator */
2746 check_privileged(s
, ilc
);
2747 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2748 tmp
= get_address(s
, 0, b2
, d2
);
2749 potential_page_fault(s
);
2750 gen_helper_sckc(tmp
);
2751 tcg_temp_free_i64(tmp
);
2753 case 0x07: /* STCKC D2(B2) [S] */
2754 /* Store Clock Comparator */
2755 check_privileged(s
, ilc
);
2756 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2757 tmp
= get_address(s
, 0, b2
, d2
);
2758 potential_page_fault(s
);
2759 gen_helper_stckc(tmp
);
2760 tcg_temp_free_i64(tmp
);
2762 case 0x08: /* SPT D2(B2) [S] */
2764 check_privileged(s
, ilc
);
2765 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2766 tmp
= get_address(s
, 0, b2
, d2
);
2767 potential_page_fault(s
);
2768 gen_helper_spt(tmp
);
2769 tcg_temp_free_i64(tmp
);
2771 case 0x09: /* STPT D2(B2) [S] */
2772 /* Store CPU Timer */
2773 check_privileged(s
, ilc
);
2774 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2775 tmp
= get_address(s
, 0, b2
, d2
);
2776 potential_page_fault(s
);
2777 gen_helper_stpt(tmp
);
2778 tcg_temp_free_i64(tmp
);
2780 case 0x0a: /* SPKA D2(B2) [S] */
2781 /* Set PSW Key from Address */
2782 check_privileged(s
, ilc
);
2783 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2784 tmp
= get_address(s
, 0, b2
, d2
);
2785 tmp2
= tcg_temp_new_i64();
2786 tcg_gen_andi_i64(tmp2
, psw_mask
, ~PSW_MASK_KEY
);
2787 tcg_gen_shli_i64(tmp
, tmp
, PSW_SHIFT_KEY
- 4);
2788 tcg_gen_or_i64(psw_mask
, tmp2
, tmp
);
2789 tcg_temp_free_i64(tmp2
);
2790 tcg_temp_free_i64(tmp
);
2792 case 0x0d: /* PTLB [S] */
2794 check_privileged(s
, ilc
);
2797 case 0x10: /* SPX D2(B2) [S] */
2798 /* Set Prefix Register */
2799 check_privileged(s
, ilc
);
2800 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2801 tmp
= get_address(s
, 0, b2
, d2
);
2802 potential_page_fault(s
);
2803 gen_helper_spx(tmp
);
2804 tcg_temp_free_i64(tmp
);
2806 case 0x11: /* STPX D2(B2) [S] */
2808 check_privileged(s
, ilc
);
2809 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2810 tmp
= get_address(s
, 0, b2
, d2
);
2811 tmp2
= tcg_temp_new_i64();
2812 tcg_gen_ld_i64(tmp2
, cpu_env
, offsetof(CPUState
, psa
));
2813 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2814 tcg_temp_free_i64(tmp
);
2815 tcg_temp_free_i64(tmp2
);
2817 case 0x12: /* STAP D2(B2) [S] */
2818 /* Store CPU Address */
2819 check_privileged(s
, ilc
);
2820 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2821 tmp
= get_address(s
, 0, b2
, d2
);
2822 tmp2
= tcg_temp_new_i64();
2823 tmp32_1
= tcg_temp_new_i32();
2824 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, cpu_num
));
2825 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
2826 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2827 tcg_temp_free_i64(tmp
);
2828 tcg_temp_free_i64(tmp2
);
2829 tcg_temp_free_i32(tmp32_1
);
2831 case 0x21: /* IPTE R1,R2 [RRE] */
2832 /* Invalidate PTE */
2833 check_privileged(s
, ilc
);
2834 r1
= (insn
>> 4) & 0xf;
2837 tmp2
= load_reg(r2
);
2838 gen_helper_ipte(tmp
, tmp2
);
2839 tcg_temp_free_i64(tmp
);
2840 tcg_temp_free_i64(tmp2
);
2842 case 0x29: /* ISKE R1,R2 [RRE] */
2843 /* Insert Storage Key Extended */
2844 check_privileged(s
, ilc
);
2845 r1
= (insn
>> 4) & 0xf;
2848 tmp2
= tcg_temp_new_i64();
2849 gen_helper_iske(tmp2
, tmp
);
2850 store_reg(r1
, tmp2
);
2851 tcg_temp_free_i64(tmp
);
2852 tcg_temp_free_i64(tmp2
);
2854 case 0x2a: /* RRBE R1,R2 [RRE] */
2855 /* Set Storage Key Extended */
2856 check_privileged(s
, ilc
);
2857 r1
= (insn
>> 4) & 0xf;
2859 tmp32_1
= load_reg32(r1
);
2861 gen_helper_rrbe(cc_op
, tmp32_1
, tmp
);
2863 tcg_temp_free_i32(tmp32_1
);
2864 tcg_temp_free_i64(tmp
);
2866 case 0x2b: /* SSKE R1,R2 [RRE] */
2867 /* Set Storage Key Extended */
2868 check_privileged(s
, ilc
);
2869 r1
= (insn
>> 4) & 0xf;
2871 tmp32_1
= load_reg32(r1
);
2873 gen_helper_sske(tmp32_1
, tmp
);
2874 tcg_temp_free_i32(tmp32_1
);
2875 tcg_temp_free_i64(tmp
);
2877 case 0x34: /* STCH ? */
2878 /* Store Subchannel */
2879 check_privileged(s
, ilc
);
2880 gen_op_movi_cc(s
, 3);
2882 case 0x46: /* STURA R1,R2 [RRE] */
2883 /* Store Using Real Address */
2884 check_privileged(s
, ilc
);
2885 r1
= (insn
>> 4) & 0xf;
2887 tmp32_1
= load_reg32(r1
);
2889 potential_page_fault(s
);
2890 gen_helper_stura(tmp
, tmp32_1
);
2891 tcg_temp_free_i32(tmp32_1
);
2892 tcg_temp_free_i64(tmp
);
2894 case 0x50: /* CSP R1,R2 [RRE] */
2895 /* Compare And Swap And Purge */
2896 check_privileged(s
, ilc
);
2897 r1
= (insn
>> 4) & 0xf;
2899 tmp32_1
= tcg_const_i32(r1
);
2900 tmp32_2
= tcg_const_i32(r2
);
2901 gen_helper_csp(cc_op
, tmp32_1
, tmp32_2
);
2903 tcg_temp_free_i32(tmp32_1
);
2904 tcg_temp_free_i32(tmp32_2
);
2906 case 0x5f: /* CHSC ? */
2907 /* Channel Subsystem Call */
2908 check_privileged(s
, ilc
);
2909 gen_op_movi_cc(s
, 3);
2911 case 0x78: /* STCKE D2(B2) [S] */
2912 /* Store Clock Extended */
2913 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2914 tmp
= get_address(s
, 0, b2
, d2
);
2915 potential_page_fault(s
);
2916 gen_helper_stcke(cc_op
, tmp
);
2918 tcg_temp_free_i64(tmp
);
2920 case 0x79: /* SACF D2(B2) [S] */
2921 /* Store Clock Extended */
2922 check_privileged(s
, ilc
);
2923 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2924 tmp
= get_address(s
, 0, b2
, d2
);
2925 potential_page_fault(s
);
2926 gen_helper_sacf(tmp
);
2927 tcg_temp_free_i64(tmp
);
2928 /* addressing mode has changed, so end the block */
2931 s
->is_jmp
= DISAS_EXCP
;
2933 case 0x7d: /* STSI D2,(B2) [S] */
2934 check_privileged(s
, ilc
);
2935 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2936 tmp
= get_address(s
, 0, b2
, d2
);
2937 tmp32_1
= load_reg32(0);
2938 tmp32_2
= load_reg32(1);
2939 potential_page_fault(s
);
2940 gen_helper_stsi(cc_op
, tmp
, tmp32_1
, tmp32_2
);
2942 tcg_temp_free_i64(tmp
);
2943 tcg_temp_free_i32(tmp32_1
);
2944 tcg_temp_free_i32(tmp32_2
);
2946 case 0x9d: /* LFPC D2(B2) [S] */
2947 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2948 tmp
= get_address(s
, 0, b2
, d2
);
2949 tmp2
= tcg_temp_new_i64();
2950 tmp32_1
= tcg_temp_new_i32();
2951 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2952 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
2953 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
2954 tcg_temp_free_i64(tmp
);
2955 tcg_temp_free_i64(tmp2
);
2956 tcg_temp_free_i32(tmp32_1
);
2958 case 0xb1: /* STFL D2(B2) [S] */
2959 /* Store Facility List (CPU features) at 200 */
2960 check_privileged(s
, ilc
);
2961 tmp2
= tcg_const_i64(0xc0000000);
2962 tmp
= tcg_const_i64(200);
2963 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2964 tcg_temp_free_i64(tmp2
);
2965 tcg_temp_free_i64(tmp
);
2967 case 0xb2: /* LPSWE D2(B2) [S] */
2968 /* Load PSW Extended */
2969 check_privileged(s
, ilc
);
2970 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2971 tmp
= get_address(s
, 0, b2
, d2
);
2972 tmp2
= tcg_temp_new_i64();
2973 tmp3
= tcg_temp_new_i64();
2974 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
2975 tcg_gen_addi_i64(tmp
, tmp
, 8);
2976 tcg_gen_qemu_ld64(tmp3
, tmp
, get_mem_index(s
));
2977 gen_helper_load_psw(tmp2
, tmp3
);
2978 /* we need to keep cc_op intact */
2979 s
->is_jmp
= DISAS_JUMP
;
2980 tcg_temp_free_i64(tmp
);
2981 tcg_temp_free_i64(tmp2
);
2982 tcg_temp_free_i64(tmp3
);
2984 case 0x20: /* SERVC R1,R2 [RRE] */
2985 /* SCLP Service call (PV hypercall) */
2986 check_privileged(s
, ilc
);
2987 potential_page_fault(s
);
2988 tmp32_1
= load_reg32(r2
);
2990 gen_helper_servc(cc_op
, tmp32_1
, tmp
);
2992 tcg_temp_free_i32(tmp32_1
);
2993 tcg_temp_free_i64(tmp
);
2997 LOG_DISAS("illegal b2 operation 0x%x\n", op
);
2998 gen_illegal_opcode(s
, ilc
);
3003 static void disas_b3(DisasContext
*s
, int op
, int m3
, int r1
, int r2
)
3006 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3007 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op
, m3
, r1
, r2
);
3008 #define FP_HELPER(i) \
3009 tmp32_1 = tcg_const_i32(r1); \
3010 tmp32_2 = tcg_const_i32(r2); \
3011 gen_helper_ ## i (tmp32_1, tmp32_2); \
3012 tcg_temp_free_i32(tmp32_1); \
3013 tcg_temp_free_i32(tmp32_2);
3015 #define FP_HELPER_CC(i) \
3016 tmp32_1 = tcg_const_i32(r1); \
3017 tmp32_2 = tcg_const_i32(r2); \
3018 gen_helper_ ## i (cc_op, tmp32_1, tmp32_2); \
3020 tcg_temp_free_i32(tmp32_1); \
3021 tcg_temp_free_i32(tmp32_2);
3024 case 0x0: /* LPEBR R1,R2 [RRE] */
3025 FP_HELPER_CC(lpebr
);
3027 case 0x2: /* LTEBR R1,R2 [RRE] */
3028 FP_HELPER_CC(ltebr
);
3030 case 0x3: /* LCEBR R1,R2 [RRE] */
3031 FP_HELPER_CC(lcebr
);
3033 case 0x4: /* LDEBR R1,R2 [RRE] */
3036 case 0x5: /* LXDBR R1,R2 [RRE] */
3039 case 0x9: /* CEBR R1,R2 [RRE] */
3042 case 0xa: /* AEBR R1,R2 [RRE] */
3045 case 0xb: /* SEBR R1,R2 [RRE] */
3048 case 0xd: /* DEBR R1,R2 [RRE] */
3051 case 0x10: /* LPDBR R1,R2 [RRE] */
3052 FP_HELPER_CC(lpdbr
);
3054 case 0x12: /* LTDBR R1,R2 [RRE] */
3055 FP_HELPER_CC(ltdbr
);
3057 case 0x13: /* LCDBR R1,R2 [RRE] */
3058 FP_HELPER_CC(lcdbr
);
3060 case 0x15: /* SQBDR R1,R2 [RRE] */
3063 case 0x17: /* MEEBR R1,R2 [RRE] */
3066 case 0x19: /* CDBR R1,R2 [RRE] */
3069 case 0x1a: /* ADBR R1,R2 [RRE] */
3072 case 0x1b: /* SDBR R1,R2 [RRE] */
3075 case 0x1c: /* MDBR R1,R2 [RRE] */
3078 case 0x1d: /* DDBR R1,R2 [RRE] */
3081 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3082 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3083 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3084 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3085 tmp32_1
= tcg_const_i32(m3
);
3086 tmp32_2
= tcg_const_i32(r2
);
3087 tmp32_3
= tcg_const_i32(r1
);
3090 gen_helper_maebr(tmp32_1
, tmp32_3
, tmp32_2
);
3093 gen_helper_madbr(tmp32_1
, tmp32_3
, tmp32_2
);
3096 gen_helper_msdbr(tmp32_1
, tmp32_3
, tmp32_2
);
3101 tcg_temp_free_i32(tmp32_1
);
3102 tcg_temp_free_i32(tmp32_2
);
3103 tcg_temp_free_i32(tmp32_3
);
3105 case 0x40: /* LPXBR R1,R2 [RRE] */
3106 FP_HELPER_CC(lpxbr
);
3108 case 0x42: /* LTXBR R1,R2 [RRE] */
3109 FP_HELPER_CC(ltxbr
);
3111 case 0x43: /* LCXBR R1,R2 [RRE] */
3112 FP_HELPER_CC(lcxbr
);
3114 case 0x44: /* LEDBR R1,R2 [RRE] */
3117 case 0x45: /* LDXBR R1,R2 [RRE] */
3120 case 0x46: /* LEXBR R1,R2 [RRE] */
3123 case 0x49: /* CXBR R1,R2 [RRE] */
3126 case 0x4a: /* AXBR R1,R2 [RRE] */
3129 case 0x4b: /* SXBR R1,R2 [RRE] */
3132 case 0x4c: /* MXBR R1,R2 [RRE] */
3135 case 0x4d: /* DXBR R1,R2 [RRE] */
3138 case 0x65: /* LXR R1,R2 [RRE] */
3139 tmp
= load_freg(r2
);
3140 store_freg(r1
, tmp
);
3141 tcg_temp_free_i64(tmp
);
3142 tmp
= load_freg(r2
+ 2);
3143 store_freg(r1
+ 2, tmp
);
3144 tcg_temp_free_i64(tmp
);
3146 case 0x74: /* LZER R1 [RRE] */
3147 tmp32_1
= tcg_const_i32(r1
);
3148 gen_helper_lzer(tmp32_1
);
3149 tcg_temp_free_i32(tmp32_1
);
3151 case 0x75: /* LZDR R1 [RRE] */
3152 tmp32_1
= tcg_const_i32(r1
);
3153 gen_helper_lzdr(tmp32_1
);
3154 tcg_temp_free_i32(tmp32_1
);
3156 case 0x76: /* LZXR R1 [RRE] */
3157 tmp32_1
= tcg_const_i32(r1
);
3158 gen_helper_lzxr(tmp32_1
);
3159 tcg_temp_free_i32(tmp32_1
);
3161 case 0x84: /* SFPC R1 [RRE] */
3162 tmp32_1
= load_reg32(r1
);
3163 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
3164 tcg_temp_free_i32(tmp32_1
);
3166 case 0x8c: /* EFPC R1 [RRE] */
3167 tmp32_1
= tcg_temp_new_i32();
3168 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
3169 store_reg32(r1
, tmp32_1
);
3170 tcg_temp_free_i32(tmp32_1
);
3172 case 0x94: /* CEFBR R1,R2 [RRE] */
3173 case 0x95: /* CDFBR R1,R2 [RRE] */
3174 case 0x96: /* CXFBR R1,R2 [RRE] */
3175 tmp32_1
= tcg_const_i32(r1
);
3176 tmp32_2
= load_reg32(r2
);
3179 gen_helper_cefbr(tmp32_1
, tmp32_2
);
3182 gen_helper_cdfbr(tmp32_1
, tmp32_2
);
3185 gen_helper_cxfbr(tmp32_1
, tmp32_2
);
3190 tcg_temp_free_i32(tmp32_1
);
3191 tcg_temp_free_i32(tmp32_2
);
3193 case 0x98: /* CFEBR R1,R2 [RRE] */
3194 case 0x99: /* CFDBR R1,R2 [RRE] */
3195 case 0x9a: /* CFXBR R1,R2 [RRE] */
3196 tmp32_1
= tcg_const_i32(r1
);
3197 tmp32_2
= tcg_const_i32(r2
);
3198 tmp32_3
= tcg_const_i32(m3
);
3201 gen_helper_cfebr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3204 gen_helper_cfdbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3207 gen_helper_cfxbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3213 tcg_temp_free_i32(tmp32_1
);
3214 tcg_temp_free_i32(tmp32_2
);
3215 tcg_temp_free_i32(tmp32_3
);
3217 case 0xa4: /* CEGBR R1,R2 [RRE] */
3218 case 0xa5: /* CDGBR R1,R2 [RRE] */
3219 tmp32_1
= tcg_const_i32(r1
);
3223 gen_helper_cegbr(tmp32_1
, tmp
);
3226 gen_helper_cdgbr(tmp32_1
, tmp
);
3231 tcg_temp_free_i32(tmp32_1
);
3232 tcg_temp_free_i64(tmp
);
3234 case 0xa6: /* CXGBR R1,R2 [RRE] */
3235 tmp32_1
= tcg_const_i32(r1
);
3237 gen_helper_cxgbr(tmp32_1
, tmp
);
3238 tcg_temp_free_i32(tmp32_1
);
3239 tcg_temp_free_i64(tmp
);
3241 case 0xa8: /* CGEBR R1,R2 [RRE] */
3242 tmp32_1
= tcg_const_i32(r1
);
3243 tmp32_2
= tcg_const_i32(r2
);
3244 tmp32_3
= tcg_const_i32(m3
);
3245 gen_helper_cgebr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3247 tcg_temp_free_i32(tmp32_1
);
3248 tcg_temp_free_i32(tmp32_2
);
3249 tcg_temp_free_i32(tmp32_3
);
3251 case 0xa9: /* CGDBR R1,R2 [RRE] */
3252 tmp32_1
= tcg_const_i32(r1
);
3253 tmp32_2
= tcg_const_i32(r2
);
3254 tmp32_3
= tcg_const_i32(m3
);
3255 gen_helper_cgdbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3257 tcg_temp_free_i32(tmp32_1
);
3258 tcg_temp_free_i32(tmp32_2
);
3259 tcg_temp_free_i32(tmp32_3
);
3261 case 0xaa: /* CGXBR R1,R2 [RRE] */
3262 tmp32_1
= tcg_const_i32(r1
);
3263 tmp32_2
= tcg_const_i32(r2
);
3264 tmp32_3
= tcg_const_i32(m3
);
3265 gen_helper_cgxbr(cc_op
, tmp32_1
, tmp32_2
, tmp32_3
);
3267 tcg_temp_free_i32(tmp32_1
);
3268 tcg_temp_free_i32(tmp32_2
);
3269 tcg_temp_free_i32(tmp32_3
);
3272 LOG_DISAS("illegal b3 operation 0x%x\n", op
);
3273 gen_illegal_opcode(s
, 2);
3281 static void disas_b9(DisasContext
*s
, int op
, int r1
, int r2
)
3283 TCGv_i64 tmp
, tmp2
, tmp3
;
3284 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3286 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
3288 case 0x0: /* LPGR R1,R2 [RRE] */
3289 case 0x1: /* LNGR R1,R2 [RRE] */
3290 case 0x2: /* LTGR R1,R2 [RRE] */
3291 case 0x3: /* LCGR R1,R2 [RRE] */
3292 case 0x10: /* LPGFR R1,R2 [RRE] */
3293 case 0x11: /* LNFGR R1,R2 [RRE] */
3294 case 0x12: /* LTGFR R1,R2 [RRE] */
3295 case 0x13: /* LCGFR R1,R2 [RRE] */
3297 tmp
= load_reg32_i64(r2
);
3302 case 0x0: /* LP?GR */
3303 set_cc_abs64(s
, tmp
);
3304 gen_helper_abs_i64(tmp
, tmp
);
3307 case 0x1: /* LN?GR */
3308 set_cc_nabs64(s
, tmp
);
3309 gen_helper_nabs_i64(tmp
, tmp
);
3312 case 0x2: /* LT?GR */
3318 case 0x3: /* LC?GR */
3319 tcg_gen_neg_i64(regs
[r1
], tmp
);
3320 set_cc_comp64(s
, regs
[r1
]);
3323 tcg_temp_free_i64(tmp
);
3325 case 0x4: /* LGR R1,R2 [RRE] */
3326 store_reg(r1
, regs
[r2
]);
3328 case 0x6: /* LGBR R1,R2 [RRE] */
3329 tmp2
= load_reg(r2
);
3330 tcg_gen_ext8s_i64(tmp2
, tmp2
);
3331 store_reg(r1
, tmp2
);
3332 tcg_temp_free_i64(tmp2
);
3334 case 0x8: /* AGR R1,R2 [RRE] */
3335 case 0xa: /* ALGR R1,R2 [RRE] */
3337 tmp2
= load_reg(r2
);
3338 tmp3
= tcg_temp_new_i64();
3339 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3340 store_reg(r1
, tmp3
);
3343 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3346 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3351 tcg_temp_free_i64(tmp
);
3352 tcg_temp_free_i64(tmp2
);
3353 tcg_temp_free_i64(tmp3
);
3355 case 0x9: /* SGR R1,R2 [RRE] */
3356 case 0xb: /* SLGR R1,R2 [RRE] */
3357 case 0x1b: /* SLGFR R1,R2 [RRE] */
3358 case 0x19: /* SGFR R1,R2 [RRE] */
3362 tmp32_1
= load_reg32(r2
);
3363 tmp2
= tcg_temp_new_i64();
3364 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3365 tcg_temp_free_i32(tmp32_1
);
3368 tmp32_1
= load_reg32(r2
);
3369 tmp2
= tcg_temp_new_i64();
3370 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3371 tcg_temp_free_i32(tmp32_1
);
3374 tmp2
= load_reg(r2
);
3377 tmp3
= tcg_temp_new_i64();
3378 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3379 store_reg(r1
, tmp3
);
3383 set_cc_sub64(s
, tmp
, tmp2
, tmp3
);
3387 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3392 tcg_temp_free_i64(tmp
);
3393 tcg_temp_free_i64(tmp2
);
3394 tcg_temp_free_i64(tmp3
);
3396 case 0xc: /* MSGR R1,R2 [RRE] */
3397 case 0x1c: /* MSGFR R1,R2 [RRE] */
3399 tmp2
= load_reg(r2
);
3401 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3403 tcg_gen_mul_i64(tmp
, tmp
, tmp2
);
3405 tcg_temp_free_i64(tmp
);
3406 tcg_temp_free_i64(tmp2
);
3408 case 0xd: /* DSGR R1,R2 [RRE] */
3409 case 0x1d: /* DSGFR R1,R2 [RRE] */
3410 tmp
= load_reg(r1
+ 1);
3412 tmp2
= load_reg(r2
);
3414 tmp32_1
= load_reg32(r2
);
3415 tmp2
= tcg_temp_new_i64();
3416 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3417 tcg_temp_free_i32(tmp32_1
);
3419 tmp3
= tcg_temp_new_i64();
3420 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3421 store_reg(r1
+ 1, tmp3
);
3422 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3423 store_reg(r1
, tmp3
);
3424 tcg_temp_free_i64(tmp
);
3425 tcg_temp_free_i64(tmp2
);
3426 tcg_temp_free_i64(tmp3
);
3428 case 0x14: /* LGFR R1,R2 [RRE] */
3429 tmp32_1
= load_reg32(r2
);
3430 tmp
= tcg_temp_new_i64();
3431 tcg_gen_ext_i32_i64(tmp
, tmp32_1
);
3433 tcg_temp_free_i32(tmp32_1
);
3434 tcg_temp_free_i64(tmp
);
3436 case 0x16: /* LLGFR R1,R2 [RRE] */
3437 tmp32_1
= load_reg32(r2
);
3438 tmp
= tcg_temp_new_i64();
3439 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3441 tcg_temp_free_i32(tmp32_1
);
3442 tcg_temp_free_i64(tmp
);
3444 case 0x17: /* LLGTR R1,R2 [RRE] */
3445 tmp32_1
= load_reg32(r2
);
3446 tmp
= tcg_temp_new_i64();
3447 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0x7fffffffUL
);
3448 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3450 tcg_temp_free_i32(tmp32_1
);
3451 tcg_temp_free_i64(tmp
);
3453 case 0x18: /* AGFR R1,R2 [RRE] */
3454 case 0x1a: /* ALGFR R1,R2 [RRE] */
3455 tmp32_1
= load_reg32(r2
);
3456 tmp2
= tcg_temp_new_i64();
3458 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3460 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3462 tcg_temp_free_i32(tmp32_1
);
3464 tmp3
= tcg_temp_new_i64();
3465 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3466 store_reg(r1
, tmp3
);
3468 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3470 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3472 tcg_temp_free_i64(tmp
);
3473 tcg_temp_free_i64(tmp2
);
3474 tcg_temp_free_i64(tmp3
);
3476 case 0x1f: /* LRVR R1,R2 [RRE] */
3477 tmp32_1
= load_reg32(r2
);
3478 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
3479 store_reg32(r1
, tmp32_1
);
3480 tcg_temp_free_i32(tmp32_1
);
3482 case 0x20: /* CGR R1,R2 [RRE] */
3483 case 0x30: /* CGFR R1,R2 [RRE] */
3484 tmp2
= load_reg(r2
);
3486 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3489 cmp_s64(s
, tmp
, tmp2
);
3490 tcg_temp_free_i64(tmp
);
3491 tcg_temp_free_i64(tmp2
);
3493 case 0x21: /* CLGR R1,R2 [RRE] */
3494 case 0x31: /* CLGFR R1,R2 [RRE] */
3495 tmp2
= load_reg(r2
);
3497 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3500 cmp_u64(s
, tmp
, tmp2
);
3501 tcg_temp_free_i64(tmp
);
3502 tcg_temp_free_i64(tmp2
);
3504 case 0x26: /* LBR R1,R2 [RRE] */
3505 tmp32_1
= load_reg32(r2
);
3506 tcg_gen_ext8s_i32(tmp32_1
, tmp32_1
);
3507 store_reg32(r1
, tmp32_1
);
3508 tcg_temp_free_i32(tmp32_1
);
3510 case 0x27: /* LHR R1,R2 [RRE] */
3511 tmp32_1
= load_reg32(r2
);
3512 tcg_gen_ext16s_i32(tmp32_1
, tmp32_1
);
3513 store_reg32(r1
, tmp32_1
);
3514 tcg_temp_free_i32(tmp32_1
);
3516 case 0x80: /* NGR R1,R2 [RRE] */
3517 case 0x81: /* OGR R1,R2 [RRE] */
3518 case 0x82: /* XGR R1,R2 [RRE] */
3520 tmp2
= load_reg(r2
);
3523 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
3526 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3529 tcg_gen_xor_i64(tmp
, tmp
, tmp2
);
3535 set_cc_nz_u64(s
, tmp
);
3536 tcg_temp_free_i64(tmp
);
3537 tcg_temp_free_i64(tmp2
);
3539 case 0x83: /* FLOGR R1,R2 [RRE] */
3541 tmp32_1
= tcg_const_i32(r1
);
3542 gen_helper_flogr(cc_op
, tmp32_1
, tmp
);
3544 tcg_temp_free_i64(tmp
);
3545 tcg_temp_free_i32(tmp32_1
);
3547 case 0x84: /* LLGCR R1,R2 [RRE] */
3549 tcg_gen_andi_i64(tmp
, tmp
, 0xff);
3551 tcg_temp_free_i64(tmp
);
3553 case 0x85: /* LLGHR R1,R2 [RRE] */
3555 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
3557 tcg_temp_free_i64(tmp
);
3559 case 0x87: /* DLGR R1,R2 [RRE] */
3560 tmp32_1
= tcg_const_i32(r1
);
3562 gen_helper_dlg(tmp32_1
, tmp
);
3563 tcg_temp_free_i64(tmp
);
3564 tcg_temp_free_i32(tmp32_1
);
3566 case 0x88: /* ALCGR R1,R2 [RRE] */
3568 tmp2
= load_reg(r2
);
3569 tmp3
= tcg_temp_new_i64();
3571 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
3572 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
3573 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
3574 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
3575 tcg_gen_add_i64(tmp3
, tmp
, tmp3
);
3576 store_reg(r1
, tmp3
);
3577 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3578 tcg_temp_free_i64(tmp
);
3579 tcg_temp_free_i64(tmp2
);
3580 tcg_temp_free_i64(tmp3
);
3582 case 0x89: /* SLBGR R1,R2 [RRE] */
3584 tmp2
= load_reg(r2
);
3585 tmp32_1
= tcg_const_i32(r1
);
3587 gen_helper_slbg(cc_op
, cc_op
, tmp32_1
, tmp
, tmp2
);
3589 tcg_temp_free_i64(tmp
);
3590 tcg_temp_free_i64(tmp2
);
3591 tcg_temp_free_i32(tmp32_1
);
3593 case 0x94: /* LLCR R1,R2 [RRE] */
3594 tmp32_1
= load_reg32(r2
);
3595 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xff);
3596 store_reg32(r1
, tmp32_1
);
3597 tcg_temp_free_i32(tmp32_1
);
3599 case 0x95: /* LLHR R1,R2 [RRE] */
3600 tmp32_1
= load_reg32(r2
);
3601 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xffff);
3602 store_reg32(r1
, tmp32_1
);
3603 tcg_temp_free_i32(tmp32_1
);
3605 case 0x96: /* MLR R1,R2 [RRE] */
3606 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3607 tmp2
= load_reg(r2
);
3608 tmp3
= load_reg((r1
+ 1) & 15);
3609 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3610 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3611 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3612 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3613 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3614 store_reg32_i64(r1
, tmp2
);
3615 tcg_temp_free_i64(tmp2
);
3616 tcg_temp_free_i64(tmp3
);
3618 case 0x97: /* DLR R1,R2 [RRE] */
3619 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3620 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3622 tmp2
= load_reg(r2
);
3623 tmp3
= load_reg((r1
+ 1) & 15);
3624 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3625 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3626 tcg_gen_shli_i64(tmp
, tmp
, 32);
3627 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
3629 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3630 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
3631 store_reg32_i64((r1
+ 1) & 15, tmp
);
3632 store_reg32_i64(r1
, tmp3
);
3633 tcg_temp_free_i64(tmp
);
3634 tcg_temp_free_i64(tmp2
);
3635 tcg_temp_free_i64(tmp3
);
3637 case 0x98: /* ALCR R1,R2 [RRE] */
3638 tmp32_1
= load_reg32(r1
);
3639 tmp32_2
= load_reg32(r2
);
3640 tmp32_3
= tcg_temp_new_i32();
3641 /* XXX possible optimization point */
3643 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
3644 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3645 store_reg32(r1
, tmp32_3
);
3646 tcg_temp_free_i32(tmp32_1
);
3647 tcg_temp_free_i32(tmp32_2
);
3648 tcg_temp_free_i32(tmp32_3
);
3650 case 0x99: /* SLBR R1,R2 [RRE] */
3651 tmp32_1
= load_reg32(r2
);
3652 tmp32_2
= tcg_const_i32(r1
);
3654 gen_helper_slb(cc_op
, cc_op
, tmp32_2
, tmp32_1
);
3656 tcg_temp_free_i32(tmp32_1
);
3657 tcg_temp_free_i32(tmp32_2
);
3660 LOG_DISAS("illegal b9 operation 0x%x\n", op
);
3661 gen_illegal_opcode(s
, 2);
3666 static void disas_c0(DisasContext
*s
, int op
, int r1
, int i2
)
3669 TCGv_i32 tmp32_1
, tmp32_2
;
3670 uint64_t target
= s
->pc
+ i2
* 2LL;
3673 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op
, r1
, i2
);
3676 case 0: /* larl r1, i2 */
3677 tmp
= tcg_const_i64(target
);
3679 tcg_temp_free_i64(tmp
);
3681 case 0x1: /* LGFI R1,I2 [RIL] */
3682 tmp
= tcg_const_i64((int64_t)i2
);
3684 tcg_temp_free_i64(tmp
);
3686 case 0x4: /* BRCL M1,I2 [RIL] */
3687 /* m1 & (1 << (3 - cc)) */
3688 tmp32_1
= tcg_const_i32(3);
3689 tmp32_2
= tcg_const_i32(1);
3691 tcg_gen_sub_i32(tmp32_1
, tmp32_1
, cc_op
);
3692 tcg_gen_shl_i32(tmp32_2
, tmp32_2
, tmp32_1
);
3693 tcg_temp_free_i32(tmp32_1
);
3694 tmp32_1
= tcg_const_i32(r1
); /* m1 == r1 */
3695 tcg_gen_and_i32(tmp32_1
, tmp32_1
, tmp32_2
);
3696 l1
= gen_new_label();
3697 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
3698 gen_goto_tb(s
, 0, target
);
3700 gen_goto_tb(s
, 1, s
->pc
+ 6);
3701 s
->is_jmp
= DISAS_TB_JUMP
;
3702 tcg_temp_free_i32(tmp32_1
);
3703 tcg_temp_free_i32(tmp32_2
);
3705 case 0x5: /* brasl r1, i2 */
3706 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 6));
3708 tcg_temp_free_i64(tmp
);
3709 gen_goto_tb(s
, 0, target
);
3710 s
->is_jmp
= DISAS_TB_JUMP
;
3712 case 0x7: /* XILF R1,I2 [RIL] */
3713 case 0xb: /* NILF R1,I2 [RIL] */
3714 case 0xd: /* OILF R1,I2 [RIL] */
3715 tmp32_1
= load_reg32(r1
);
3718 tcg_gen_xori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3721 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3724 tcg_gen_ori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3729 store_reg32(r1
, tmp32_1
);
3730 set_cc_nz_u32(s
, tmp32_1
);
3731 tcg_temp_free_i32(tmp32_1
);
3733 case 0x9: /* IILF R1,I2 [RIL] */
3734 tmp32_1
= tcg_const_i32((uint32_t)i2
);
3735 store_reg32(r1
, tmp32_1
);
3736 tcg_temp_free_i32(tmp32_1
);
3738 case 0xa: /* NIHF R1,I2 [RIL] */
3740 tmp32_1
= tcg_temp_new_i32();
3741 tcg_gen_andi_i64(tmp
, tmp
, (((uint64_t)((uint32_t)i2
)) << 32)
3744 tcg_gen_shri_i64(tmp
, tmp
, 32);
3745 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3746 set_cc_nz_u32(s
, tmp32_1
);
3747 tcg_temp_free_i64(tmp
);
3748 tcg_temp_free_i32(tmp32_1
);
3750 case 0xe: /* LLIHF R1,I2 [RIL] */
3751 tmp
= tcg_const_i64(((uint64_t)(uint32_t)i2
) << 32);
3753 tcg_temp_free_i64(tmp
);
3755 case 0xf: /* LLILF R1,I2 [RIL] */
3756 tmp
= tcg_const_i64((uint32_t)i2
);
3758 tcg_temp_free_i64(tmp
);
3761 LOG_DISAS("illegal c0 operation 0x%x\n", op
);
3762 gen_illegal_opcode(s
, 3);
3767 static void disas_c2(DisasContext
*s
, int op
, int r1
, int i2
)
3769 TCGv_i64 tmp
, tmp2
, tmp3
;
3770 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3773 case 0x4: /* SLGFI R1,I2 [RIL] */
3774 case 0xa: /* ALGFI R1,I2 [RIL] */
3776 tmp2
= tcg_const_i64((uint64_t)(uint32_t)i2
);
3777 tmp3
= tcg_temp_new_i64();
3780 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3781 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3784 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3785 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3790 store_reg(r1
, tmp3
);
3791 tcg_temp_free_i64(tmp
);
3792 tcg_temp_free_i64(tmp2
);
3793 tcg_temp_free_i64(tmp3
);
3795 case 0x5: /* SLFI R1,I2 [RIL] */
3796 case 0xb: /* ALFI R1,I2 [RIL] */
3797 tmp32_1
= load_reg32(r1
);
3798 tmp32_2
= tcg_const_i32(i2
);
3799 tmp32_3
= tcg_temp_new_i32();
3802 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3803 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3806 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3807 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3812 store_reg32(r1
, tmp32_3
);
3813 tcg_temp_free_i32(tmp32_1
);
3814 tcg_temp_free_i32(tmp32_2
);
3815 tcg_temp_free_i32(tmp32_3
);
3817 case 0xc: /* CGFI R1,I2 [RIL] */
3819 cmp_s64c(s
, tmp
, (int64_t)i2
);
3820 tcg_temp_free_i64(tmp
);
3822 case 0xe: /* CLGFI R1,I2 [RIL] */
3824 cmp_u64c(s
, tmp
, (uint64_t)(uint32_t)i2
);
3825 tcg_temp_free_i64(tmp
);
3827 case 0xd: /* CFI R1,I2 [RIL] */
3828 tmp32_1
= load_reg32(r1
);
3829 cmp_s32c(s
, tmp32_1
, i2
);
3830 tcg_temp_free_i32(tmp32_1
);
3832 case 0xf: /* CLFI R1,I2 [RIL] */
3833 tmp32_1
= load_reg32(r1
);
3834 cmp_u32c(s
, tmp32_1
, i2
);
3835 tcg_temp_free_i32(tmp32_1
);
3838 LOG_DISAS("illegal c2 operation 0x%x\n", op
);
3839 gen_illegal_opcode(s
, 3);
3844 static void gen_and_or_xor_i32(int opc
, TCGv_i32 tmp
, TCGv_i32 tmp2
)
3846 switch (opc
& 0xf) {
3848 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
3851 tcg_gen_or_i32(tmp
, tmp
, tmp2
);
3854 tcg_gen_xor_i32(tmp
, tmp
, tmp2
);
3861 static void disas_s390_insn(DisasContext
*s
)
3863 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
3864 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
, tmp32_4
;
3867 int op
, r1
, r2
, r3
, d1
, d2
, x2
, b1
, b2
, i
, i2
, r1b
;
3872 opc
= ldub_code(s
->pc
);
3873 LOG_DISAS("opc 0x%x\n", opc
);
3878 #ifndef CONFIG_USER_ONLY
3879 case 0x01: /* SAM */
3880 insn
= ld_code2(s
->pc
);
3881 /* set addressing mode, but we only do 64bit anyways */
3884 case 0x6: /* BCTR R1,R2 [RR] */
3885 insn
= ld_code2(s
->pc
);
3886 decode_rr(s
, insn
, &r1
, &r2
);
3887 tmp32_1
= load_reg32(r1
);
3888 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3889 store_reg32(r1
, tmp32_1
);
3892 gen_update_cc_op(s
);
3893 l1
= gen_new_label();
3894 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3896 /* not taking the branch, jump to after the instruction */
3897 gen_goto_tb(s
, 0, s
->pc
+ 2);
3900 /* take the branch, move R2 into psw.addr */
3901 tmp32_1
= load_reg32(r2
);
3902 tmp
= tcg_temp_new_i64();
3903 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3904 tcg_gen_mov_i64(psw_addr
, tmp
);
3905 s
->is_jmp
= DISAS_JUMP
;
3906 tcg_temp_free_i32(tmp32_1
);
3907 tcg_temp_free_i64(tmp
);
3910 case 0x7: /* BCR M1,R2 [RR] */
3911 insn
= ld_code2(s
->pc
);
3912 decode_rr(s
, insn
, &r1
, &r2
);
3915 gen_bcr(s
, r1
, tmp
, s
->pc
);
3916 tcg_temp_free_i64(tmp
);
3917 s
->is_jmp
= DISAS_TB_JUMP
;
3919 /* XXX: "serialization and checkpoint-synchronization function"? */
3922 case 0xa: /* SVC I [RR] */
3923 insn
= ld_code2(s
->pc
);
3928 tmp32_1
= tcg_const_i32(i
);
3929 tmp32_2
= tcg_const_i32(ilc
* 2);
3930 tmp32_3
= tcg_const_i32(EXCP_SVC
);
3931 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, int_svc_code
));
3932 tcg_gen_st_i32(tmp32_2
, cpu_env
, offsetof(CPUState
, int_svc_ilc
));
3933 gen_helper_exception(tmp32_3
);
3934 s
->is_jmp
= DISAS_EXCP
;
3935 tcg_temp_free_i32(tmp32_1
);
3936 tcg_temp_free_i32(tmp32_2
);
3937 tcg_temp_free_i32(tmp32_3
);
3939 case 0xd: /* BASR R1,R2 [RR] */
3940 insn
= ld_code2(s
->pc
);
3941 decode_rr(s
, insn
, &r1
, &r2
);
3942 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 2));
3945 tmp2
= load_reg(r2
);
3946 tcg_gen_mov_i64(psw_addr
, tmp2
);
3947 tcg_temp_free_i64(tmp2
);
3948 s
->is_jmp
= DISAS_JUMP
;
3950 tcg_temp_free_i64(tmp
);
3952 case 0xe: /* MVCL R1,R2 [RR] */
3953 insn
= ld_code2(s
->pc
);
3954 decode_rr(s
, insn
, &r1
, &r2
);
3955 tmp32_1
= tcg_const_i32(r1
);
3956 tmp32_2
= tcg_const_i32(r2
);
3957 potential_page_fault(s
);
3958 gen_helper_mvcl(cc_op
, tmp32_1
, tmp32_2
);
3960 tcg_temp_free_i32(tmp32_1
);
3961 tcg_temp_free_i32(tmp32_2
);
3963 case 0x10: /* LPR R1,R2 [RR] */
3964 insn
= ld_code2(s
->pc
);
3965 decode_rr(s
, insn
, &r1
, &r2
);
3966 tmp32_1
= load_reg32(r2
);
3967 set_cc_abs32(s
, tmp32_1
);
3968 gen_helper_abs_i32(tmp32_1
, tmp32_1
);
3969 store_reg32(r1
, tmp32_1
);
3970 tcg_temp_free_i32(tmp32_1
);
3972 case 0x11: /* LNR R1,R2 [RR] */
3973 insn
= ld_code2(s
->pc
);
3974 decode_rr(s
, insn
, &r1
, &r2
);
3975 tmp32_1
= load_reg32(r2
);
3976 set_cc_nabs32(s
, tmp32_1
);
3977 gen_helper_nabs_i32(tmp32_1
, tmp32_1
);
3978 store_reg32(r1
, tmp32_1
);
3979 tcg_temp_free_i32(tmp32_1
);
3981 case 0x12: /* LTR R1,R2 [RR] */
3982 insn
= ld_code2(s
->pc
);
3983 decode_rr(s
, insn
, &r1
, &r2
);
3984 tmp32_1
= load_reg32(r2
);
3986 store_reg32(r1
, tmp32_1
);
3988 set_cc_s32(s
, tmp32_1
);
3989 tcg_temp_free_i32(tmp32_1
);
3991 case 0x13: /* LCR R1,R2 [RR] */
3992 insn
= ld_code2(s
->pc
);
3993 decode_rr(s
, insn
, &r1
, &r2
);
3994 tmp32_1
= load_reg32(r2
);
3995 tcg_gen_neg_i32(tmp32_1
, tmp32_1
);
3996 store_reg32(r1
, tmp32_1
);
3997 set_cc_comp32(s
, tmp32_1
);
3998 tcg_temp_free_i32(tmp32_1
);
4000 case 0x14: /* NR R1,R2 [RR] */
4001 case 0x16: /* OR R1,R2 [RR] */
4002 case 0x17: /* XR R1,R2 [RR] */
4003 insn
= ld_code2(s
->pc
);
4004 decode_rr(s
, insn
, &r1
, &r2
);
4005 tmp32_2
= load_reg32(r2
);
4006 tmp32_1
= load_reg32(r1
);
4007 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4008 store_reg32(r1
, tmp32_1
);
4009 set_cc_nz_u32(s
, tmp32_1
);
4010 tcg_temp_free_i32(tmp32_1
);
4011 tcg_temp_free_i32(tmp32_2
);
4013 case 0x18: /* LR R1,R2 [RR] */
4014 insn
= ld_code2(s
->pc
);
4015 decode_rr(s
, insn
, &r1
, &r2
);
4016 tmp32_1
= load_reg32(r2
);
4017 store_reg32(r1
, tmp32_1
);
4018 tcg_temp_free_i32(tmp32_1
);
4020 case 0x15: /* CLR R1,R2 [RR] */
4021 case 0x19: /* CR R1,R2 [RR] */
4022 insn
= ld_code2(s
->pc
);
4023 decode_rr(s
, insn
, &r1
, &r2
);
4024 tmp32_1
= load_reg32(r1
);
4025 tmp32_2
= load_reg32(r2
);
4027 cmp_u32(s
, tmp32_1
, tmp32_2
);
4029 cmp_s32(s
, tmp32_1
, tmp32_2
);
4031 tcg_temp_free_i32(tmp32_1
);
4032 tcg_temp_free_i32(tmp32_2
);
4034 case 0x1a: /* AR R1,R2 [RR] */
4035 case 0x1e: /* ALR R1,R2 [RR] */
4036 insn
= ld_code2(s
->pc
);
4037 decode_rr(s
, insn
, &r1
, &r2
);
4038 tmp32_1
= load_reg32(r1
);
4039 tmp32_2
= load_reg32(r2
);
4040 tmp32_3
= tcg_temp_new_i32();
4041 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4042 store_reg32(r1
, tmp32_3
);
4044 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4046 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4048 tcg_temp_free_i32(tmp32_1
);
4049 tcg_temp_free_i32(tmp32_2
);
4050 tcg_temp_free_i32(tmp32_3
);
4052 case 0x1b: /* SR R1,R2 [RR] */
4053 case 0x1f: /* SLR R1,R2 [RR] */
4054 insn
= ld_code2(s
->pc
);
4055 decode_rr(s
, insn
, &r1
, &r2
);
4056 tmp32_1
= load_reg32(r1
);
4057 tmp32_2
= load_reg32(r2
);
4058 tmp32_3
= tcg_temp_new_i32();
4059 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4060 store_reg32(r1
, tmp32_3
);
4062 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4064 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4066 tcg_temp_free_i32(tmp32_1
);
4067 tcg_temp_free_i32(tmp32_2
);
4068 tcg_temp_free_i32(tmp32_3
);
4070 case 0x1c: /* MR R1,R2 [RR] */
4071 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4072 insn
= ld_code2(s
->pc
);
4073 decode_rr(s
, insn
, &r1
, &r2
);
4074 tmp2
= load_reg(r2
);
4075 tmp3
= load_reg((r1
+ 1) & 15);
4076 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4077 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4078 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4079 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4080 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4081 store_reg32_i64(r1
, tmp2
);
4082 tcg_temp_free_i64(tmp2
);
4083 tcg_temp_free_i64(tmp3
);
4085 case 0x1d: /* DR R1,R2 [RR] */
4086 insn
= ld_code2(s
->pc
);
4087 decode_rr(s
, insn
, &r1
, &r2
);
4088 tmp32_1
= load_reg32(r1
);
4089 tmp32_2
= load_reg32(r1
+ 1);
4090 tmp32_3
= load_reg32(r2
);
4092 tmp
= tcg_temp_new_i64(); /* dividend */
4093 tmp2
= tcg_temp_new_i64(); /* divisor */
4094 tmp3
= tcg_temp_new_i64();
4096 /* dividend is r(r1 << 32) | r(r1 + 1) */
4097 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4098 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4099 tcg_gen_shli_i64(tmp
, tmp
, 32);
4100 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4102 /* divisor is r(r2) */
4103 tcg_gen_ext_i32_i64(tmp2
, tmp32_3
);
4105 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4106 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4108 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4109 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4111 store_reg32(r1
, tmp32_1
); /* remainder */
4112 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4113 tcg_temp_free_i32(tmp32_1
);
4114 tcg_temp_free_i32(tmp32_2
);
4115 tcg_temp_free_i32(tmp32_3
);
4116 tcg_temp_free_i64(tmp
);
4117 tcg_temp_free_i64(tmp2
);
4118 tcg_temp_free_i64(tmp3
);
4120 case 0x28: /* LDR R1,R2 [RR] */
4121 insn
= ld_code2(s
->pc
);
4122 decode_rr(s
, insn
, &r1
, &r2
);
4123 tmp
= load_freg(r2
);
4124 store_freg(r1
, tmp
);
4125 tcg_temp_free_i64(tmp
);
4127 case 0x38: /* LER R1,R2 [RR] */
4128 insn
= ld_code2(s
->pc
);
4129 decode_rr(s
, insn
, &r1
, &r2
);
4130 tmp32_1
= load_freg32(r2
);
4131 store_freg32(r1
, tmp32_1
);
4132 tcg_temp_free_i32(tmp32_1
);
4134 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4135 insn
= ld_code4(s
->pc
);
4136 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4137 tmp2
= load_reg(r1
);
4138 tcg_gen_qemu_st16(tmp2
, tmp
, get_mem_index(s
));
4139 tcg_temp_free_i64(tmp
);
4140 tcg_temp_free_i64(tmp2
);
4143 insn
= ld_code4(s
->pc
);
4144 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4145 store_reg(r1
, tmp
); /* FIXME: 31/24-bit addressing */
4146 tcg_temp_free_i64(tmp
);
4148 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4149 insn
= ld_code4(s
->pc
);
4150 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4151 tmp2
= load_reg(r1
);
4152 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4153 tcg_temp_free_i64(tmp
);
4154 tcg_temp_free_i64(tmp2
);
4156 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4157 insn
= ld_code4(s
->pc
);
4158 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4159 tmp2
= tcg_temp_new_i64();
4160 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4161 store_reg8(r1
, tmp2
);
4162 tcg_temp_free_i64(tmp
);
4163 tcg_temp_free_i64(tmp2
);
4165 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4166 insn
= ld_code4(s
->pc
);
4167 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4168 tmp2
= load_reg(r1
);
4169 tmp3
= tcg_const_i64(s
->pc
+ 4);
4172 gen_helper_ex(cc_op
, cc_op
, tmp2
, tmp
, tmp3
);
4174 tcg_temp_free_i64(tmp
);
4175 tcg_temp_free_i64(tmp2
);
4176 tcg_temp_free_i64(tmp3
);
4178 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4179 insn
= ld_code4(s
->pc
);
4180 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4181 tcg_temp_free_i64(tmp
);
4183 tmp32_1
= load_reg32(r1
);
4184 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
4185 store_reg32(r1
, tmp32_1
);
4187 gen_update_cc_op(s
);
4188 l1
= gen_new_label();
4189 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
4191 /* not taking the branch, jump to after the instruction */
4192 gen_goto_tb(s
, 0, s
->pc
+ 4);
4195 /* take the branch, move R2 into psw.addr */
4196 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4197 tcg_gen_mov_i64(psw_addr
, tmp
);
4198 s
->is_jmp
= DISAS_JUMP
;
4199 tcg_temp_free_i32(tmp32_1
);
4200 tcg_temp_free_i64(tmp
);
4202 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4203 insn
= ld_code4(s
->pc
);
4204 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4205 gen_bcr(s
, r1
, tmp
, s
->pc
+ 4);
4206 tcg_temp_free_i64(tmp
);
4207 s
->is_jmp
= DISAS_TB_JUMP
;
4209 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4210 insn
= ld_code4(s
->pc
);
4211 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4212 tmp2
= tcg_temp_new_i64();
4213 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4214 store_reg32_i64(r1
, tmp2
);
4215 tcg_temp_free_i64(tmp
);
4216 tcg_temp_free_i64(tmp2
);
4218 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4219 insn
= ld_code4(s
->pc
);
4220 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4221 tmp32_1
= load_reg32(r1
);
4222 tmp32_2
= tcg_temp_new_i32();
4223 tmp2
= tcg_temp_new_i64();
4224 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4225 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4226 cmp_s32(s
, tmp32_1
, tmp32_2
);
4227 tcg_temp_free_i32(tmp32_1
);
4228 tcg_temp_free_i32(tmp32_2
);
4229 tcg_temp_free_i64(tmp
);
4230 tcg_temp_free_i64(tmp2
);
4232 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4233 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4234 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4235 insn
= ld_code4(s
->pc
);
4236 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4237 tmp2
= tcg_temp_new_i64();
4238 tmp32_1
= load_reg32(r1
);
4239 tmp32_2
= tcg_temp_new_i32();
4240 tmp32_3
= tcg_temp_new_i32();
4242 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4243 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4246 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4247 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4250 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4251 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4254 tcg_gen_mul_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4259 store_reg32(r1
, tmp32_3
);
4261 tcg_temp_free_i32(tmp32_1
);
4262 tcg_temp_free_i32(tmp32_2
);
4263 tcg_temp_free_i32(tmp32_3
);
4264 tcg_temp_free_i64(tmp
);
4265 tcg_temp_free_i64(tmp2
);
4267 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4268 insn
= ld_code4(s
->pc
);
4269 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4270 tmp2
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
4271 store_reg(r1
, tmp2
);
4272 tcg_gen_mov_i64(psw_addr
, tmp
);
4273 tcg_temp_free_i64(tmp
);
4274 tcg_temp_free_i64(tmp2
);
4275 s
->is_jmp
= DISAS_JUMP
;
4277 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4278 insn
= ld_code4(s
->pc
);
4279 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4280 tmp2
= tcg_temp_new_i64();
4281 tmp32_1
= tcg_temp_new_i32();
4282 tcg_gen_trunc_i64_i32(tmp32_1
, regs
[r1
]);
4283 gen_helper_cvd(tmp2
, tmp32_1
);
4284 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4285 tcg_temp_free_i64(tmp
);
4286 tcg_temp_free_i64(tmp2
);
4287 tcg_temp_free_i32(tmp32_1
);
4289 case 0x50: /* st r1, d2(x2, b2) */
4290 insn
= ld_code4(s
->pc
);
4291 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4292 tmp2
= load_reg(r1
);
4293 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4294 tcg_temp_free_i64(tmp
);
4295 tcg_temp_free_i64(tmp2
);
4297 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4298 insn
= ld_code4(s
->pc
);
4299 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4300 tmp2
= tcg_temp_new_i64();
4301 tmp32_1
= tcg_temp_new_i32();
4302 tmp32_2
= load_reg32(r1
);
4303 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4304 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4305 cmp_u32(s
, tmp32_2
, tmp32_1
);
4306 tcg_temp_free_i64(tmp
);
4307 tcg_temp_free_i64(tmp2
);
4308 tcg_temp_free_i32(tmp32_1
);
4309 tcg_temp_free_i32(tmp32_2
);
4311 case 0x54: /* N R1,D2(X2,B2) [RX] */
4312 case 0x56: /* O R1,D2(X2,B2) [RX] */
4313 case 0x57: /* X R1,D2(X2,B2) [RX] */
4314 insn
= ld_code4(s
->pc
);
4315 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4316 tmp2
= tcg_temp_new_i64();
4317 tmp32_1
= load_reg32(r1
);
4318 tmp32_2
= tcg_temp_new_i32();
4319 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4320 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4321 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4322 store_reg32(r1
, tmp32_1
);
4323 set_cc_nz_u32(s
, tmp32_1
);
4324 tcg_temp_free_i64(tmp
);
4325 tcg_temp_free_i64(tmp2
);
4326 tcg_temp_free_i32(tmp32_1
);
4327 tcg_temp_free_i32(tmp32_2
);
4329 case 0x58: /* l r1, d2(x2, b2) */
4330 insn
= ld_code4(s
->pc
);
4331 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4332 tmp2
= tcg_temp_new_i64();
4333 tmp32_1
= tcg_temp_new_i32();
4334 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4335 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4336 store_reg32(r1
, tmp32_1
);
4337 tcg_temp_free_i64(tmp
);
4338 tcg_temp_free_i64(tmp2
);
4339 tcg_temp_free_i32(tmp32_1
);
4341 case 0x59: /* C R1,D2(X2,B2) [RX] */
4342 insn
= ld_code4(s
->pc
);
4343 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4344 tmp2
= tcg_temp_new_i64();
4345 tmp32_1
= tcg_temp_new_i32();
4346 tmp32_2
= load_reg32(r1
);
4347 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4348 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4349 cmp_s32(s
, tmp32_2
, tmp32_1
);
4350 tcg_temp_free_i64(tmp
);
4351 tcg_temp_free_i64(tmp2
);
4352 tcg_temp_free_i32(tmp32_1
);
4353 tcg_temp_free_i32(tmp32_2
);
4355 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4356 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4357 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4358 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4359 insn
= ld_code4(s
->pc
);
4360 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4361 tmp32_1
= load_reg32(r1
);
4362 tmp32_2
= tcg_temp_new_i32();
4363 tmp32_3
= tcg_temp_new_i32();
4364 tcg_gen_qemu_ld32s(tmp
, tmp
, get_mem_index(s
));
4365 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4369 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4373 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4378 store_reg32(r1
, tmp32_3
);
4381 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4384 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4387 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4390 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4395 tcg_temp_free_i64(tmp
);
4396 tcg_temp_free_i32(tmp32_1
);
4397 tcg_temp_free_i32(tmp32_2
);
4398 tcg_temp_free_i32(tmp32_3
);
4400 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4401 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4402 insn
= ld_code4(s
->pc
);
4403 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4404 tmp2
= tcg_temp_new_i64();
4405 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4406 tmp3
= load_reg((r1
+ 1) & 15);
4407 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4408 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4409 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4410 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4411 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4412 store_reg32_i64(r1
, tmp2
);
4413 tcg_temp_free_i64(tmp
);
4414 tcg_temp_free_i64(tmp2
);
4415 tcg_temp_free_i64(tmp3
);
4417 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4418 insn
= ld_code4(s
->pc
);
4419 tmp3
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4420 tmp32_1
= load_reg32(r1
);
4421 tmp32_2
= load_reg32(r1
+ 1);
4423 tmp
= tcg_temp_new_i64();
4424 tmp2
= tcg_temp_new_i64();
4426 /* dividend is r(r1 << 32) | r(r1 + 1) */
4427 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4428 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4429 tcg_gen_shli_i64(tmp
, tmp
, 32);
4430 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4432 /* divisor is in memory */
4433 tcg_gen_qemu_ld32s(tmp2
, tmp3
, get_mem_index(s
));
4435 /* XXX divisor == 0 -> FixP divide exception */
4437 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4438 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4440 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4441 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4443 store_reg32(r1
, tmp32_1
); /* remainder */
4444 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4445 tcg_temp_free_i32(tmp32_1
);
4446 tcg_temp_free_i32(tmp32_2
);
4447 tcg_temp_free_i64(tmp
);
4448 tcg_temp_free_i64(tmp2
);
4449 tcg_temp_free_i64(tmp3
);
4451 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4452 insn
= ld_code4(s
->pc
);
4453 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4454 tmp2
= load_freg(r1
);
4455 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4456 tcg_temp_free_i64(tmp
);
4457 tcg_temp_free_i64(tmp2
);
4459 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4460 insn
= ld_code4(s
->pc
);
4461 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4462 tmp2
= tcg_temp_new_i64();
4463 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
4464 store_freg(r1
, tmp2
);
4465 tcg_temp_free_i64(tmp
);
4466 tcg_temp_free_i64(tmp2
);
4468 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4469 insn
= ld_code4(s
->pc
);
4470 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4471 tmp2
= tcg_temp_new_i64();
4472 tmp32_1
= load_freg32(r1
);
4473 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
4474 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4475 tcg_temp_free_i64(tmp
);
4476 tcg_temp_free_i64(tmp2
);
4477 tcg_temp_free_i32(tmp32_1
);
4479 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4480 insn
= ld_code4(s
->pc
);
4481 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4482 tmp2
= tcg_temp_new_i64();
4483 tmp32_1
= load_reg32(r1
);
4484 tmp32_2
= tcg_temp_new_i32();
4485 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4486 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4487 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4488 store_reg32(r1
, tmp32_1
);
4489 tcg_temp_free_i64(tmp
);
4490 tcg_temp_free_i64(tmp2
);
4491 tcg_temp_free_i32(tmp32_1
);
4492 tcg_temp_free_i32(tmp32_2
);
4494 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4495 insn
= ld_code4(s
->pc
);
4496 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4497 tmp2
= tcg_temp_new_i64();
4498 tmp32_1
= tcg_temp_new_i32();
4499 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4500 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4501 store_freg32(r1
, tmp32_1
);
4502 tcg_temp_free_i64(tmp
);
4503 tcg_temp_free_i64(tmp2
);
4504 tcg_temp_free_i32(tmp32_1
);
4506 #ifndef CONFIG_USER_ONLY
4507 case 0x80: /* SSM D2(B2) [S] */
4508 /* Set System Mask */
4509 check_privileged(s
, ilc
);
4510 insn
= ld_code4(s
->pc
);
4511 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4512 tmp
= get_address(s
, 0, b2
, d2
);
4513 tmp2
= tcg_temp_new_i64();
4514 tmp3
= tcg_temp_new_i64();
4515 tcg_gen_andi_i64(tmp3
, psw_mask
, ~0xff00000000000000ULL
);
4516 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4517 tcg_gen_shli_i64(tmp2
, tmp2
, 56);
4518 tcg_gen_or_i64(psw_mask
, tmp3
, tmp2
);
4519 tcg_temp_free_i64(tmp
);
4520 tcg_temp_free_i64(tmp2
);
4521 tcg_temp_free_i64(tmp3
);
4523 case 0x82: /* LPSW D2(B2) [S] */
4525 check_privileged(s
, ilc
);
4526 insn
= ld_code4(s
->pc
);
4527 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4528 tmp
= get_address(s
, 0, b2
, d2
);
4529 tmp2
= tcg_temp_new_i64();
4530 tmp3
= tcg_temp_new_i64();
4531 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4532 tcg_gen_addi_i64(tmp
, tmp
, 4);
4533 tcg_gen_qemu_ld32u(tmp3
, tmp
, get_mem_index(s
));
4534 gen_helper_load_psw(tmp2
, tmp3
);
4535 tcg_temp_free_i64(tmp
);
4536 tcg_temp_free_i64(tmp2
);
4537 tcg_temp_free_i64(tmp3
);
4538 /* we need to keep cc_op intact */
4539 s
->is_jmp
= DISAS_JUMP
;
4541 case 0x83: /* DIAG R1,R3,D2 [RS] */
4542 /* Diagnose call (KVM hypercall) */
4543 check_privileged(s
, ilc
);
4544 potential_page_fault(s
);
4545 insn
= ld_code4(s
->pc
);
4546 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4547 tmp32_1
= tcg_const_i32(insn
& 0xfff);
4550 gen_helper_diag(tmp2
, tmp32_1
, tmp2
, tmp3
);
4552 tcg_temp_free_i32(tmp32_1
);
4553 tcg_temp_free_i64(tmp2
);
4554 tcg_temp_free_i64(tmp3
);
4557 case 0x88: /* SRL R1,D2(B2) [RS] */
4558 case 0x89: /* SLL R1,D2(B2) [RS] */
4559 case 0x8a: /* SRA R1,D2(B2) [RS] */
4560 insn
= ld_code4(s
->pc
);
4561 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4562 tmp
= get_address(s
, 0, b2
, d2
);
4563 tmp32_1
= load_reg32(r1
);
4564 tmp32_2
= tcg_temp_new_i32();
4565 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4566 tcg_gen_andi_i32(tmp32_2
, tmp32_2
, 0x3f);
4569 tcg_gen_shr_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4572 tcg_gen_shl_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4575 tcg_gen_sar_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4576 set_cc_s32(s
, tmp32_1
);
4581 store_reg32(r1
, tmp32_1
);
4582 tcg_temp_free_i64(tmp
);
4583 tcg_temp_free_i32(tmp32_1
);
4584 tcg_temp_free_i32(tmp32_2
);
4586 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4587 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4588 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4589 insn
= ld_code4(s
->pc
);
4590 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4591 tmp
= get_address(s
, 0, b2
, d2
); /* shift */
4592 tmp2
= tcg_temp_new_i64();
4593 tmp32_1
= load_reg32(r1
);
4594 tmp32_2
= load_reg32(r1
+ 1);
4595 tcg_gen_concat_i32_i64(tmp2
, tmp32_2
, tmp32_1
); /* operand */
4598 tcg_gen_shr_i64(tmp2
, tmp2
, tmp
);
4601 tcg_gen_shl_i64(tmp2
, tmp2
, tmp
);
4604 tcg_gen_sar_i64(tmp2
, tmp2
, tmp
);
4605 set_cc_s64(s
, tmp2
);
4608 tcg_gen_shri_i64(tmp
, tmp2
, 32);
4609 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4610 store_reg32(r1
, tmp32_1
);
4611 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4612 store_reg32(r1
+ 1, tmp32_2
);
4613 tcg_temp_free_i64(tmp
);
4614 tcg_temp_free_i64(tmp2
);
4616 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4617 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4618 insn
= ld_code4(s
->pc
);
4619 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4621 tmp
= get_address(s
, 0, b2
, d2
);
4622 tmp2
= tcg_temp_new_i64();
4623 tmp3
= tcg_const_i64(4);
4624 tmp4
= tcg_const_i64(0xffffffff00000000ULL
);
4625 for (i
= r1
;; i
= (i
+ 1) % 16) {
4627 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4628 tcg_gen_and_i64(regs
[i
], regs
[i
], tmp4
);
4629 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
4631 tcg_gen_qemu_st32(regs
[i
], tmp
, get_mem_index(s
));
4636 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
4638 tcg_temp_free_i64(tmp
);
4639 tcg_temp_free_i64(tmp2
);
4640 tcg_temp_free_i64(tmp3
);
4641 tcg_temp_free_i64(tmp4
);
4643 case 0x91: /* TM D1(B1),I2 [SI] */
4644 insn
= ld_code4(s
->pc
);
4645 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4646 tmp2
= tcg_const_i64(i2
);
4647 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
4648 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
4649 tcg_temp_free_i64(tmp
);
4650 tcg_temp_free_i64(tmp2
);
4652 case 0x92: /* MVI D1(B1),I2 [SI] */
4653 insn
= ld_code4(s
->pc
);
4654 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4655 tmp2
= tcg_const_i64(i2
);
4656 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4657 tcg_temp_free_i64(tmp
);
4658 tcg_temp_free_i64(tmp2
);
4660 case 0x94: /* NI D1(B1),I2 [SI] */
4661 case 0x96: /* OI D1(B1),I2 [SI] */
4662 case 0x97: /* XI D1(B1),I2 [SI] */
4663 insn
= ld_code4(s
->pc
);
4664 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4665 tmp2
= tcg_temp_new_i64();
4666 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4669 tcg_gen_andi_i64(tmp2
, tmp2
, i2
);
4672 tcg_gen_ori_i64(tmp2
, tmp2
, i2
);
4675 tcg_gen_xori_i64(tmp2
, tmp2
, i2
);
4680 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4681 set_cc_nz_u64(s
, tmp2
);
4682 tcg_temp_free_i64(tmp
);
4683 tcg_temp_free_i64(tmp2
);
4685 case 0x95: /* CLI D1(B1),I2 [SI] */
4686 insn
= ld_code4(s
->pc
);
4687 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4688 tmp2
= tcg_temp_new_i64();
4689 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4690 cmp_u64c(s
, tmp2
, i2
);
4691 tcg_temp_free_i64(tmp
);
4692 tcg_temp_free_i64(tmp2
);
4694 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4695 insn
= ld_code4(s
->pc
);
4696 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4697 tmp
= get_address(s
, 0, b2
, d2
);
4698 tmp32_1
= tcg_const_i32(r1
);
4699 tmp32_2
= tcg_const_i32(r3
);
4700 potential_page_fault(s
);
4701 gen_helper_lam(tmp32_1
, tmp
, tmp32_2
);
4702 tcg_temp_free_i64(tmp
);
4703 tcg_temp_free_i32(tmp32_1
);
4704 tcg_temp_free_i32(tmp32_2
);
4706 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4707 insn
= ld_code4(s
->pc
);
4708 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4709 tmp
= get_address(s
, 0, b2
, d2
);
4710 tmp32_1
= tcg_const_i32(r1
);
4711 tmp32_2
= tcg_const_i32(r3
);
4712 potential_page_fault(s
);
4713 gen_helper_stam(tmp32_1
, tmp
, tmp32_2
);
4714 tcg_temp_free_i64(tmp
);
4715 tcg_temp_free_i32(tmp32_1
);
4716 tcg_temp_free_i32(tmp32_2
);
4719 insn
= ld_code4(s
->pc
);
4720 r1
= (insn
>> 20) & 0xf;
4721 op
= (insn
>> 16) & 0xf;
4723 disas_a5(s
, op
, r1
, i2
);
4726 insn
= ld_code4(s
->pc
);
4727 r1
= (insn
>> 20) & 0xf;
4728 op
= (insn
>> 16) & 0xf;
4730 disas_a7(s
, op
, r1
, i2
);
4732 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4733 insn
= ld_code4(s
->pc
);
4734 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4735 tmp
= get_address(s
, 0, b2
, d2
);
4736 tmp32_1
= tcg_const_i32(r1
);
4737 tmp32_2
= tcg_const_i32(r3
);
4738 potential_page_fault(s
);
4739 gen_helper_mvcle(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4741 tcg_temp_free_i64(tmp
);
4742 tcg_temp_free_i32(tmp32_1
);
4743 tcg_temp_free_i32(tmp32_2
);
4745 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4746 insn
= ld_code4(s
->pc
);
4747 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4748 tmp
= get_address(s
, 0, b2
, d2
);
4749 tmp32_1
= tcg_const_i32(r1
);
4750 tmp32_2
= tcg_const_i32(r3
);
4751 potential_page_fault(s
);
4752 gen_helper_clcle(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4754 tcg_temp_free_i64(tmp
);
4755 tcg_temp_free_i32(tmp32_1
);
4756 tcg_temp_free_i32(tmp32_2
);
4758 #ifndef CONFIG_USER_ONLY
4759 case 0xac: /* STNSM D1(B1),I2 [SI] */
4760 case 0xad: /* STOSM D1(B1),I2 [SI] */
4761 check_privileged(s
, ilc
);
4762 insn
= ld_code4(s
->pc
);
4763 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4764 tmp2
= tcg_temp_new_i64();
4765 tcg_gen_shri_i64(tmp2
, psw_mask
, 56);
4766 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4768 tcg_gen_andi_i64(psw_mask
, psw_mask
,
4769 ((uint64_t)i2
<< 56) | 0x00ffffffffffffffULL
);
4771 tcg_gen_ori_i64(psw_mask
, psw_mask
, (uint64_t)i2
<< 56);
4773 tcg_temp_free_i64(tmp
);
4774 tcg_temp_free_i64(tmp2
);
4776 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4777 check_privileged(s
, ilc
);
4778 insn
= ld_code4(s
->pc
);
4779 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4780 tmp
= get_address(s
, 0, b2
, d2
);
4781 tmp2
= load_reg(r3
);
4782 tmp32_1
= tcg_const_i32(r1
);
4783 potential_page_fault(s
);
4784 gen_helper_sigp(cc_op
, tmp
, tmp32_1
, tmp2
);
4786 tcg_temp_free_i64(tmp
);
4787 tcg_temp_free_i64(tmp2
);
4788 tcg_temp_free_i32(tmp32_1
);
4790 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4791 check_privileged(s
, ilc
);
4792 insn
= ld_code4(s
->pc
);
4793 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4794 tmp32_1
= tcg_const_i32(r1
);
4795 potential_page_fault(s
);
4796 gen_helper_lra(cc_op
, tmp
, tmp32_1
);
4798 tcg_temp_free_i64(tmp
);
4799 tcg_temp_free_i32(tmp32_1
);
4803 insn
= ld_code4(s
->pc
);
4804 op
= (insn
>> 16) & 0xff;
4806 case 0x9c: /* STFPC D2(B2) [S] */
4808 b2
= (insn
>> 12) & 0xf;
4809 tmp32_1
= tcg_temp_new_i32();
4810 tmp
= tcg_temp_new_i64();
4811 tmp2
= get_address(s
, 0, b2
, d2
);
4812 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUState
, fpc
));
4813 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4814 tcg_gen_qemu_st32(tmp
, tmp2
, get_mem_index(s
));
4815 tcg_temp_free_i32(tmp32_1
);
4816 tcg_temp_free_i64(tmp
);
4817 tcg_temp_free_i64(tmp2
);
4820 disas_b2(s
, op
, insn
);
4825 insn
= ld_code4(s
->pc
);
4826 op
= (insn
>> 16) & 0xff;
4827 r3
= (insn
>> 12) & 0xf; /* aka m3 */
4828 r1
= (insn
>> 4) & 0xf;
4830 disas_b3(s
, op
, r3
, r1
, r2
);
4832 #ifndef CONFIG_USER_ONLY
4833 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4835 check_privileged(s
, ilc
);
4836 insn
= ld_code4(s
->pc
);
4837 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4838 tmp
= get_address(s
, 0, b2
, d2
);
4839 tmp32_1
= tcg_const_i32(r1
);
4840 tmp32_2
= tcg_const_i32(r3
);
4841 potential_page_fault(s
);
4842 gen_helper_stctl(tmp32_1
, tmp
, tmp32_2
);
4843 tcg_temp_free_i64(tmp
);
4844 tcg_temp_free_i32(tmp32_1
);
4845 tcg_temp_free_i32(tmp32_2
);
4847 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4849 check_privileged(s
, ilc
);
4850 insn
= ld_code4(s
->pc
);
4851 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4852 tmp
= get_address(s
, 0, b2
, d2
);
4853 tmp32_1
= tcg_const_i32(r1
);
4854 tmp32_2
= tcg_const_i32(r3
);
4855 potential_page_fault(s
);
4856 gen_helper_lctl(tmp32_1
, tmp
, tmp32_2
);
4857 tcg_temp_free_i64(tmp
);
4858 tcg_temp_free_i32(tmp32_1
);
4859 tcg_temp_free_i32(tmp32_2
);
4863 insn
= ld_code4(s
->pc
);
4864 r1
= (insn
>> 4) & 0xf;
4866 op
= (insn
>> 16) & 0xff;
4867 disas_b9(s
, op
, r1
, r2
);
4869 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4870 insn
= ld_code4(s
->pc
);
4871 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4872 tmp
= get_address(s
, 0, b2
, d2
);
4873 tmp32_1
= tcg_const_i32(r1
);
4874 tmp32_2
= tcg_const_i32(r3
);
4875 potential_page_fault(s
);
4876 gen_helper_cs(cc_op
, tmp32_1
, tmp
, tmp32_2
);
4878 tcg_temp_free_i64(tmp
);
4879 tcg_temp_free_i32(tmp32_1
);
4880 tcg_temp_free_i32(tmp32_2
);
4882 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4883 insn
= ld_code4(s
->pc
);
4884 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4885 tmp
= get_address(s
, 0, b2
, d2
);
4886 tmp32_1
= load_reg32(r1
);
4887 tmp32_2
= tcg_const_i32(r3
);
4888 potential_page_fault(s
);
4889 gen_helper_clm(cc_op
, tmp32_1
, tmp32_2
, tmp
);
4891 tcg_temp_free_i64(tmp
);
4892 tcg_temp_free_i32(tmp32_1
);
4893 tcg_temp_free_i32(tmp32_2
);
4895 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4896 insn
= ld_code4(s
->pc
);
4897 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4898 tmp
= get_address(s
, 0, b2
, d2
);
4899 tmp32_1
= load_reg32(r1
);
4900 tmp32_2
= tcg_const_i32(r3
);
4901 potential_page_fault(s
);
4902 gen_helper_stcm(tmp32_1
, tmp32_2
, tmp
);
4903 tcg_temp_free_i64(tmp
);
4904 tcg_temp_free_i32(tmp32_1
);
4905 tcg_temp_free_i32(tmp32_2
);
4907 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4908 insn
= ld_code4(s
->pc
);
4909 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4911 /* effectively a 32-bit load */
4912 tmp
= get_address(s
, 0, b2
, d2
);
4913 tmp32_1
= tcg_temp_new_i32();
4914 tmp32_2
= tcg_const_i32(r3
);
4915 tcg_gen_qemu_ld32u(tmp
, tmp
, get_mem_index(s
));
4916 store_reg32_i64(r1
, tmp
);
4917 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4918 set_cc_icm(s
, tmp32_2
, tmp32_1
);
4919 tcg_temp_free_i64(tmp
);
4920 tcg_temp_free_i32(tmp32_1
);
4921 tcg_temp_free_i32(tmp32_2
);
4923 uint32_t mask
= 0x00ffffffUL
;
4924 uint32_t shift
= 24;
4926 tmp
= get_address(s
, 0, b2
, d2
);
4927 tmp2
= tcg_temp_new_i64();
4928 tmp32_1
= load_reg32(r1
);
4929 tmp32_2
= tcg_temp_new_i32();
4930 tmp32_3
= tcg_const_i32(r3
);
4931 tmp32_4
= tcg_const_i32(0);
4934 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4935 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4937 tcg_gen_shli_i32(tmp32_2
, tmp32_2
, shift
);
4939 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, mask
);
4940 tcg_gen_or_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4941 tcg_gen_or_i32(tmp32_4
, tmp32_4
, tmp32_2
);
4942 tcg_gen_addi_i64(tmp
, tmp
, 1);
4944 m3
= (m3
<< 1) & 0xf;
4945 mask
= (mask
>> 8) | 0xff000000UL
;
4948 store_reg32(r1
, tmp32_1
);
4949 set_cc_icm(s
, tmp32_3
, tmp32_4
);
4950 tcg_temp_free_i64(tmp
);
4951 tcg_temp_free_i64(tmp2
);
4952 tcg_temp_free_i32(tmp32_1
);
4953 tcg_temp_free_i32(tmp32_2
);
4954 tcg_temp_free_i32(tmp32_3
);
4955 tcg_temp_free_i32(tmp32_4
);
4957 /* i.e. env->cc = 0 */
4958 gen_op_movi_cc(s
, 0);
4963 insn
= ld_code6(s
->pc
);
4964 r1
= (insn
>> 36) & 0xf;
4965 op
= (insn
>> 32) & 0xf;
4969 disas_c0(s
, op
, r1
, i2
);
4972 disas_c2(s
, op
, r1
, i2
);
4978 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4979 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4980 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4981 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4982 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4983 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4984 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4985 insn
= ld_code6(s
->pc
);
4986 vl
= tcg_const_i32((insn
>> 32) & 0xff);
4987 b1
= (insn
>> 28) & 0xf;
4988 b2
= (insn
>> 12) & 0xf;
4989 d1
= (insn
>> 16) & 0xfff;
4991 tmp
= get_address(s
, 0, b1
, d1
);
4992 tmp2
= get_address(s
, 0, b2
, d2
);
4995 gen_op_mvc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
4998 potential_page_fault(s
);
4999 gen_helper_nc(cc_op
, vl
, tmp
, tmp2
);
5003 gen_op_clc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
5006 potential_page_fault(s
);
5007 gen_helper_oc(cc_op
, vl
, tmp
, tmp2
);
5011 potential_page_fault(s
);
5012 gen_helper_xc(cc_op
, vl
, tmp
, tmp2
);
5016 potential_page_fault(s
);
5017 gen_helper_tr(vl
, tmp
, tmp2
);
5021 potential_page_fault(s
);
5022 gen_helper_unpk(vl
, tmp
, tmp2
);
5027 tcg_temp_free_i64(tmp
);
5028 tcg_temp_free_i64(tmp2
);
5030 #ifndef CONFIG_USER_ONLY
5031 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5032 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5033 check_privileged(s
, ilc
);
5034 potential_page_fault(s
);
5035 insn
= ld_code6(s
->pc
);
5036 r1
= (insn
>> 36) & 0xf;
5037 r3
= (insn
>> 32) & 0xf;
5038 b1
= (insn
>> 28) & 0xf;
5039 d1
= (insn
>> 16) & 0xfff;
5040 b2
= (insn
>> 12) & 0xf;
5044 tmp2
= get_address(s
, 0, b1
, d1
);
5045 tmp3
= get_address(s
, 0, b2
, d2
);
5047 gen_helper_mvcp(cc_op
, tmp
, tmp2
, tmp3
);
5049 gen_helper_mvcs(cc_op
, tmp
, tmp2
, tmp3
);
5052 tcg_temp_free_i64(tmp
);
5053 tcg_temp_free_i64(tmp2
);
5054 tcg_temp_free_i64(tmp3
);
5058 insn
= ld_code6(s
->pc
);
5061 r1
= (insn
>> 36) & 0xf;
5062 x2
= (insn
>> 32) & 0xf;
5063 b2
= (insn
>> 28) & 0xf;
5064 d2
= ((int)((((insn
>> 16) & 0xfff)
5065 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5066 disas_e3(s
, op
, r1
, x2
, b2
, d2
);
5068 #ifndef CONFIG_USER_ONLY
5070 /* Test Protection */
5071 check_privileged(s
, ilc
);
5072 insn
= ld_code6(s
->pc
);
5078 insn
= ld_code6(s
->pc
);
5081 r1
= (insn
>> 36) & 0xf;
5082 r3
= (insn
>> 32) & 0xf;
5083 b2
= (insn
>> 28) & 0xf;
5084 d2
= ((int)((((insn
>> 16) & 0xfff)
5085 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5086 disas_eb(s
, op
, r1
, r3
, b2
, d2
);
5089 insn
= ld_code6(s
->pc
);
5092 r1
= (insn
>> 36) & 0xf;
5093 x2
= (insn
>> 32) & 0xf;
5094 b2
= (insn
>> 28) & 0xf;
5095 d2
= (short)((insn
>> 16) & 0xfff);
5096 r1b
= (insn
>> 12) & 0xf;
5097 disas_ed(s
, op
, r1
, x2
, b2
, d2
, r1b
);
5100 LOG_DISAS("unimplemented opcode 0x%x\n", opc
);
5101 gen_illegal_opcode(s
, ilc
);
5105 /* Instruction length is encoded in the opcode */
5109 static inline void gen_intermediate_code_internal(CPUState
*env
,
5110 TranslationBlock
*tb
,
5114 target_ulong pc_start
;
5115 uint64_t next_page_start
;
5116 uint16_t *gen_opc_end
;
5118 int num_insns
, max_insns
;
5124 if (!(tb
->flags
& FLAG_MASK_64
)) {
5125 pc_start
&= 0x7fffffff;
5129 dc
.is_jmp
= DISAS_NEXT
;
5131 dc
.cc_op
= CC_OP_DYNAMIC
;
5133 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5135 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
5138 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5139 if (max_insns
== 0) {
5140 max_insns
= CF_COUNT_MASK
;
5146 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5147 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5148 if (bp
->pc
== dc
.pc
) {
5155 j
= gen_opc_ptr
- gen_opc_buf
;
5159 gen_opc_instr_start
[lj
++] = 0;
5162 gen_opc_pc
[lj
] = dc
.pc
;
5163 gen_opc_cc_op
[lj
] = dc
.cc_op
;
5164 gen_opc_instr_start
[lj
] = 1;
5165 gen_opc_icount
[lj
] = num_insns
;
5167 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
5170 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5171 LOG_DISAS("pc " TARGET_FMT_lx
"\n",
5174 disas_s390_insn(&dc
);
5177 if (env
->singlestep_enabled
) {
5180 } while (!dc
.is_jmp
&& gen_opc_ptr
< gen_opc_end
&& dc
.pc
< next_page_start
5181 && num_insns
< max_insns
&& !env
->singlestep_enabled
5185 update_psw_addr(&dc
);
5188 if (singlestep
&& dc
.cc_op
!= CC_OP_DYNAMIC
) {
5189 gen_op_calc_cc(&dc
);
5191 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5193 gen_op_set_cc_op(&dc
);
5196 if (tb
->cflags
& CF_LAST_IO
) {
5199 /* Generate the return instruction */
5200 if (dc
.is_jmp
!= DISAS_TB_JUMP
) {
5203 gen_icount_end(tb
, num_insns
);
5204 *gen_opc_ptr
= INDEX_op_end
;
5206 j
= gen_opc_ptr
- gen_opc_buf
;
5209 gen_opc_instr_start
[lj
++] = 0;
5212 tb
->size
= dc
.pc
- pc_start
;
5213 tb
->icount
= num_insns
;
5215 #if defined(S390X_DEBUG_DISAS)
5216 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
5217 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5218 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5219 log_target_disas(pc_start
, dc
.pc
- pc_start
, 1);
5225 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
5227 gen_intermediate_code_internal(env
, tb
, 0);
5230 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
5232 gen_intermediate_code_internal(env
, tb
, 1);
5235 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5238 env
->psw
.addr
= gen_opc_pc
[pc_pos
];
5239 cc_op
= gen_opc_cc_op
[pc_pos
];
5240 if ((cc_op
!= CC_OP_DYNAMIC
) && (cc_op
!= CC_OP_STATIC
)) {