target-s390x: switch to AREG0 free mode
[qemu/ar7.git] / target-s390x / translate.c
blob66119cd122706699dc23c896d2ebea29e2a06bbe
1 /*
2 * S/390 translation
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
22 /* #define DEBUG_INLINE_BRANCHES */
23 #define S390X_DEBUG_DISAS
24 /* #define S390X_DEBUG_DISAS_VERBOSE */
26 #ifdef S390X_DEBUG_DISAS_VERBOSE
27 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
28 #else
29 # define LOG_DISAS(...) do { } while (0)
30 #endif
32 #include "cpu.h"
33 #include "disas.h"
34 #include "tcg-op.h"
35 #include "qemu-log.h"
37 /* global register indexes */
38 static TCGv_ptr cpu_env;
40 #include "gen-icount.h"
41 #include "helper.h"
42 #define GEN_HELPER 1
43 #include "helper.h"
45 typedef struct DisasContext DisasContext;
46 struct DisasContext {
47 uint64_t pc;
48 int is_jmp;
49 enum cc_op cc_op;
50 struct TranslationBlock *tb;
53 #define DISAS_EXCP 4
55 static void gen_op_calc_cc(DisasContext *s);
57 #ifdef DEBUG_INLINE_BRANCHES
58 static uint64_t inline_branch_hit[CC_OP_MAX];
59 static uint64_t inline_branch_miss[CC_OP_MAX];
60 #endif
62 static inline void debug_insn(uint64_t insn)
64 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
67 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
69 if (!(s->tb->flags & FLAG_MASK_64)) {
70 if (s->tb->flags & FLAG_MASK_32) {
71 return pc | 0x80000000;
74 return pc;
77 void cpu_dump_state(CPUS390XState *env, FILE *f, fprintf_function cpu_fprintf,
78 int flags)
80 int i;
82 for (i = 0; i < 16; i++) {
83 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
84 if ((i % 4) == 3) {
85 cpu_fprintf(f, "\n");
86 } else {
87 cpu_fprintf(f, " ");
91 for (i = 0; i < 16; i++) {
92 cpu_fprintf(f, "F%02d=%016" PRIx64, i, *(uint64_t *)&env->fregs[i]);
93 if ((i % 4) == 3) {
94 cpu_fprintf(f, "\n");
95 } else {
96 cpu_fprintf(f, " ");
100 cpu_fprintf(f, "\n");
102 #ifndef CONFIG_USER_ONLY
103 for (i = 0; i < 16; i++) {
104 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
105 if ((i % 4) == 3) {
106 cpu_fprintf(f, "\n");
107 } else {
108 cpu_fprintf(f, " ");
111 #endif
113 cpu_fprintf(f, "\n");
115 if (env->cc_op > 3) {
116 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
117 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
118 } else {
119 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
120 env->psw.mask, env->psw.addr, env->cc_op);
123 #ifdef DEBUG_INLINE_BRANCHES
124 for (i = 0; i < CC_OP_MAX; i++) {
125 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
126 inline_branch_miss[i], inline_branch_hit[i]);
128 #endif
131 static TCGv_i64 psw_addr;
132 static TCGv_i64 psw_mask;
134 static TCGv_i32 cc_op;
135 static TCGv_i64 cc_src;
136 static TCGv_i64 cc_dst;
137 static TCGv_i64 cc_vr;
139 static char cpu_reg_names[10*3 + 6*4];
140 static TCGv_i64 regs[16];
142 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
144 void s390x_translate_init(void)
146 int i;
147 size_t cpu_reg_names_size = sizeof(cpu_reg_names);
148 char *p;
150 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
151 psw_addr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, psw.addr),
152 "psw_addr");
153 psw_mask = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, psw.mask),
154 "psw_mask");
156 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUS390XState, cc_op),
157 "cc_op");
158 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_src),
159 "cc_src");
160 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_dst),
161 "cc_dst");
162 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_vr),
163 "cc_vr");
165 p = cpu_reg_names;
166 for (i = 0; i < 16; i++) {
167 snprintf(p, cpu_reg_names_size, "r%d", i);
168 regs[i] = tcg_global_mem_new(TCG_AREG0,
169 offsetof(CPUS390XState, regs[i]), p);
170 p += (i < 10) ? 3 : 4;
171 cpu_reg_names_size -= (i < 10) ? 3 : 4;
175 static inline TCGv_i64 load_reg(int reg)
177 TCGv_i64 r = tcg_temp_new_i64();
178 tcg_gen_mov_i64(r, regs[reg]);
179 return r;
182 static inline TCGv_i64 load_freg(int reg)
184 TCGv_i64 r = tcg_temp_new_i64();
185 tcg_gen_ld_i64(r, cpu_env, offsetof(CPUS390XState, fregs[reg].d));
186 return r;
189 static inline TCGv_i32 load_freg32(int reg)
191 TCGv_i32 r = tcg_temp_new_i32();
192 tcg_gen_ld_i32(r, cpu_env, offsetof(CPUS390XState, fregs[reg].l.upper));
193 return r;
196 static inline TCGv_i32 load_reg32(int reg)
198 TCGv_i32 r = tcg_temp_new_i32();
199 tcg_gen_trunc_i64_i32(r, regs[reg]);
200 return r;
203 static inline TCGv_i64 load_reg32_i64(int reg)
205 TCGv_i64 r = tcg_temp_new_i64();
206 tcg_gen_ext32s_i64(r, regs[reg]);
207 return r;
210 static inline void store_reg(int reg, TCGv_i64 v)
212 tcg_gen_mov_i64(regs[reg], v);
215 static inline void store_freg(int reg, TCGv_i64 v)
217 tcg_gen_st_i64(v, cpu_env, offsetof(CPUS390XState, fregs[reg].d));
220 static inline void store_reg32(int reg, TCGv_i32 v)
222 #if HOST_LONG_BITS == 32
223 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
224 #else
225 TCGv_i64 tmp = tcg_temp_new_i64();
226 tcg_gen_extu_i32_i64(tmp, v);
227 /* 32 bit register writes keep the upper half */
228 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 32);
229 tcg_temp_free_i64(tmp);
230 #endif
233 static inline void store_reg32_i64(int reg, TCGv_i64 v)
235 /* 32 bit register writes keep the upper half */
236 #if HOST_LONG_BITS == 32
237 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), TCGV_LOW(v));
238 #else
239 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
240 #endif
243 static inline void store_reg16(int reg, TCGv_i32 v)
245 TCGv_i64 tmp = tcg_temp_new_i64();
246 tcg_gen_extu_i32_i64(tmp, v);
247 /* 16 bit register writes keep the upper bytes */
248 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 16);
249 tcg_temp_free_i64(tmp);
252 static inline void store_reg8(int reg, TCGv_i64 v)
254 /* 8 bit register writes keep the upper bytes */
255 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
258 static inline void store_freg32(int reg, TCGv_i32 v)
260 tcg_gen_st_i32(v, cpu_env, offsetof(CPUS390XState, fregs[reg].l.upper));
263 static inline void update_psw_addr(DisasContext *s)
265 /* psw.addr */
266 tcg_gen_movi_i64(psw_addr, s->pc);
269 static inline void potential_page_fault(DisasContext *s)
271 #ifndef CONFIG_USER_ONLY
272 update_psw_addr(s);
273 gen_op_calc_cc(s);
274 #endif
277 static inline uint64_t ld_code2(uint64_t pc)
279 return (uint64_t)cpu_lduw_code(cpu_single_env, pc);
282 static inline uint64_t ld_code4(uint64_t pc)
284 return (uint64_t)cpu_ldl_code(cpu_single_env, pc);
287 static inline uint64_t ld_code6(uint64_t pc)
289 uint64_t opc;
290 opc = (uint64_t)cpu_lduw_code(cpu_single_env, pc) << 32;
291 opc |= (uint64_t)(uint32_t)cpu_ldl_code(cpu_single_env, pc + 2);
292 return opc;
295 static inline int get_mem_index(DisasContext *s)
297 switch (s->tb->flags & FLAG_MASK_ASC) {
298 case PSW_ASC_PRIMARY >> 32:
299 return 0;
300 case PSW_ASC_SECONDARY >> 32:
301 return 1;
302 case PSW_ASC_HOME >> 32:
303 return 2;
304 default:
305 tcg_abort();
306 break;
310 static inline void gen_debug(DisasContext *s)
312 TCGv_i32 tmp = tcg_const_i32(EXCP_DEBUG);
313 update_psw_addr(s);
314 gen_op_calc_cc(s);
315 gen_helper_exception(cpu_env, tmp);
316 tcg_temp_free_i32(tmp);
317 s->is_jmp = DISAS_EXCP;
320 #ifdef CONFIG_USER_ONLY
322 static void gen_illegal_opcode(DisasContext *s, int ilc)
324 TCGv_i32 tmp = tcg_const_i32(EXCP_SPEC);
325 update_psw_addr(s);
326 gen_op_calc_cc(s);
327 gen_helper_exception(cpu_env, tmp);
328 tcg_temp_free_i32(tmp);
329 s->is_jmp = DISAS_EXCP;
332 #else /* CONFIG_USER_ONLY */
334 static void debug_print_inst(DisasContext *s, int ilc)
336 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
337 uint64_t inst = 0;
339 switch (ilc & 3) {
340 case 1:
341 inst = ld_code2(s->pc);
342 break;
343 case 2:
344 inst = ld_code4(s->pc);
345 break;
346 case 3:
347 inst = ld_code6(s->pc);
348 break;
351 fprintf(stderr, "Illegal instruction [%d at %016" PRIx64 "]: 0x%016"
352 PRIx64 "\n", ilc, s->pc, inst);
353 #endif
356 static void gen_program_exception(DisasContext *s, int ilc, int code)
358 TCGv_i32 tmp;
360 debug_print_inst(s, ilc);
362 /* remember what pgm exeption this was */
363 tmp = tcg_const_i32(code);
364 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_code));
365 tcg_temp_free_i32(tmp);
367 tmp = tcg_const_i32(ilc);
368 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_ilc));
369 tcg_temp_free_i32(tmp);
371 /* advance past instruction */
372 s->pc += (ilc * 2);
373 update_psw_addr(s);
375 /* save off cc */
376 gen_op_calc_cc(s);
378 /* trigger exception */
379 tmp = tcg_const_i32(EXCP_PGM);
380 gen_helper_exception(cpu_env, tmp);
381 tcg_temp_free_i32(tmp);
383 /* end TB here */
384 s->is_jmp = DISAS_EXCP;
388 static void gen_illegal_opcode(DisasContext *s, int ilc)
390 gen_program_exception(s, ilc, PGM_SPECIFICATION);
393 static void gen_privileged_exception(DisasContext *s, int ilc)
395 gen_program_exception(s, ilc, PGM_PRIVILEGED);
398 static void check_privileged(DisasContext *s, int ilc)
400 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
401 gen_privileged_exception(s, ilc);
405 #endif /* CONFIG_USER_ONLY */
407 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
409 TCGv_i64 tmp;
411 /* 31-bitify the immediate part; register contents are dealt with below */
412 if (!(s->tb->flags & FLAG_MASK_64)) {
413 d2 &= 0x7fffffffUL;
416 if (x2) {
417 if (d2) {
418 tmp = tcg_const_i64(d2);
419 tcg_gen_add_i64(tmp, tmp, regs[x2]);
420 } else {
421 tmp = load_reg(x2);
423 if (b2) {
424 tcg_gen_add_i64(tmp, tmp, regs[b2]);
426 } else if (b2) {
427 if (d2) {
428 tmp = tcg_const_i64(d2);
429 tcg_gen_add_i64(tmp, tmp, regs[b2]);
430 } else {
431 tmp = load_reg(b2);
433 } else {
434 tmp = tcg_const_i64(d2);
437 /* 31-bit mode mask if there are values loaded from registers */
438 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
439 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
442 return tmp;
445 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
447 s->cc_op = CC_OP_CONST0 + val;
450 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
452 tcg_gen_discard_i64(cc_src);
453 tcg_gen_mov_i64(cc_dst, dst);
454 tcg_gen_discard_i64(cc_vr);
455 s->cc_op = op;
458 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
460 tcg_gen_discard_i64(cc_src);
461 tcg_gen_extu_i32_i64(cc_dst, dst);
462 tcg_gen_discard_i64(cc_vr);
463 s->cc_op = op;
466 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
467 TCGv_i64 dst)
469 tcg_gen_mov_i64(cc_src, src);
470 tcg_gen_mov_i64(cc_dst, dst);
471 tcg_gen_discard_i64(cc_vr);
472 s->cc_op = op;
475 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
476 TCGv_i32 dst)
478 tcg_gen_extu_i32_i64(cc_src, src);
479 tcg_gen_extu_i32_i64(cc_dst, dst);
480 tcg_gen_discard_i64(cc_vr);
481 s->cc_op = op;
484 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
485 TCGv_i64 dst, TCGv_i64 vr)
487 tcg_gen_mov_i64(cc_src, src);
488 tcg_gen_mov_i64(cc_dst, dst);
489 tcg_gen_mov_i64(cc_vr, vr);
490 s->cc_op = op;
493 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
494 TCGv_i32 dst, TCGv_i32 vr)
496 tcg_gen_extu_i32_i64(cc_src, src);
497 tcg_gen_extu_i32_i64(cc_dst, dst);
498 tcg_gen_extu_i32_i64(cc_vr, vr);
499 s->cc_op = op;
502 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
504 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
507 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
509 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
512 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
513 enum cc_op cond)
515 gen_op_update2_cc_i32(s, cond, v1, v2);
518 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
519 enum cc_op cond)
521 gen_op_update2_cc_i64(s, cond, v1, v2);
524 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
526 cmp_32(s, v1, v2, CC_OP_LTGT_32);
529 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
531 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
534 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
536 /* XXX optimize for the constant? put it in s? */
537 TCGv_i32 tmp = tcg_const_i32(v2);
538 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
539 tcg_temp_free_i32(tmp);
542 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
544 TCGv_i32 tmp = tcg_const_i32(v2);
545 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
546 tcg_temp_free_i32(tmp);
549 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
551 cmp_64(s, v1, v2, CC_OP_LTGT_64);
554 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
556 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
559 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
561 TCGv_i64 tmp = tcg_const_i64(v2);
562 cmp_s64(s, v1, tmp);
563 tcg_temp_free_i64(tmp);
566 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
568 TCGv_i64 tmp = tcg_const_i64(v2);
569 cmp_u64(s, v1, tmp);
570 tcg_temp_free_i64(tmp);
573 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
575 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
578 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
580 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
583 static void set_cc_add64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
585 gen_op_update3_cc_i64(s, CC_OP_ADD_64, v1, v2, vr);
588 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
589 TCGv_i64 vr)
591 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
594 static void set_cc_sub64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
596 gen_op_update3_cc_i64(s, CC_OP_SUB_64, v1, v2, vr);
599 static void set_cc_subu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
600 TCGv_i64 vr)
602 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, v1, v2, vr);
605 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
607 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
610 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
612 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
615 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
617 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
620 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
621 TCGv_i32 vr)
623 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
626 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
628 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
631 static void set_cc_subu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
632 TCGv_i32 vr)
634 gen_op_update3_cc_i32(s, CC_OP_SUBU_32, v1, v2, vr);
637 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
639 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
642 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
644 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
647 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
649 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
652 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
654 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
657 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
659 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
662 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
664 tcg_gen_extu_i32_i64(cc_src, v1);
665 tcg_gen_mov_i64(cc_dst, v2);
666 tcg_gen_discard_i64(cc_vr);
667 s->cc_op = CC_OP_LTGT_F32;
670 static void gen_set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
672 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
675 /* CC value is in env->cc_op */
676 static inline void set_cc_static(DisasContext *s)
678 tcg_gen_discard_i64(cc_src);
679 tcg_gen_discard_i64(cc_dst);
680 tcg_gen_discard_i64(cc_vr);
681 s->cc_op = CC_OP_STATIC;
684 static inline void gen_op_set_cc_op(DisasContext *s)
686 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
687 tcg_gen_movi_i32(cc_op, s->cc_op);
691 static inline void gen_update_cc_op(DisasContext *s)
693 gen_op_set_cc_op(s);
696 /* calculates cc into cc_op */
697 static void gen_op_calc_cc(DisasContext *s)
699 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
700 TCGv_i64 dummy = tcg_const_i64(0);
702 switch (s->cc_op) {
703 case CC_OP_CONST0:
704 case CC_OP_CONST1:
705 case CC_OP_CONST2:
706 case CC_OP_CONST3:
707 /* s->cc_op is the cc value */
708 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
709 break;
710 case CC_OP_STATIC:
711 /* env->cc_op already is the cc value */
712 break;
713 case CC_OP_NZ:
714 case CC_OP_ABS_64:
715 case CC_OP_NABS_64:
716 case CC_OP_ABS_32:
717 case CC_OP_NABS_32:
718 case CC_OP_LTGT0_32:
719 case CC_OP_LTGT0_64:
720 case CC_OP_COMP_32:
721 case CC_OP_COMP_64:
722 case CC_OP_NZ_F32:
723 case CC_OP_NZ_F64:
724 /* 1 argument */
725 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, dummy, cc_dst, dummy);
726 break;
727 case CC_OP_ICM:
728 case CC_OP_LTGT_32:
729 case CC_OP_LTGT_64:
730 case CC_OP_LTUGTU_32:
731 case CC_OP_LTUGTU_64:
732 case CC_OP_TM_32:
733 case CC_OP_TM_64:
734 case CC_OP_LTGT_F32:
735 case CC_OP_LTGT_F64:
736 case CC_OP_SLAG:
737 /* 2 arguments */
738 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, dummy);
739 break;
740 case CC_OP_ADD_64:
741 case CC_OP_ADDU_64:
742 case CC_OP_SUB_64:
743 case CC_OP_SUBU_64:
744 case CC_OP_ADD_32:
745 case CC_OP_ADDU_32:
746 case CC_OP_SUB_32:
747 case CC_OP_SUBU_32:
748 /* 3 arguments */
749 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, cc_vr);
750 break;
751 case CC_OP_DYNAMIC:
752 /* unknown operation - assume 3 arguments and cc_op in env */
753 gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
754 break;
755 default:
756 tcg_abort();
759 tcg_temp_free_i32(local_cc_op);
761 /* We now have cc in cc_op as constant */
762 set_cc_static(s);
765 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
767 debug_insn(insn);
769 *r1 = (insn >> 4) & 0xf;
770 *r2 = insn & 0xf;
773 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
774 int *x2, int *b2, int *d2)
776 debug_insn(insn);
778 *r1 = (insn >> 20) & 0xf;
779 *x2 = (insn >> 16) & 0xf;
780 *b2 = (insn >> 12) & 0xf;
781 *d2 = insn & 0xfff;
783 return get_address(s, *x2, *b2, *d2);
786 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
787 int *b2, int *d2)
789 debug_insn(insn);
791 *r1 = (insn >> 20) & 0xf;
792 /* aka m3 */
793 *r3 = (insn >> 16) & 0xf;
794 *b2 = (insn >> 12) & 0xf;
795 *d2 = insn & 0xfff;
798 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
799 int *b1, int *d1)
801 debug_insn(insn);
803 *i2 = (insn >> 16) & 0xff;
804 *b1 = (insn >> 12) & 0xf;
805 *d1 = insn & 0xfff;
807 return get_address(s, 0, *b1, *d1);
810 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
812 TranslationBlock *tb;
814 gen_update_cc_op(s);
816 tb = s->tb;
817 /* NOTE: we handle the case where the TB spans two pages here */
818 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
819 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
820 /* jump to same page: we can use a direct jump */
821 tcg_gen_goto_tb(tb_num);
822 tcg_gen_movi_i64(psw_addr, pc);
823 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
824 } else {
825 /* jump to another page: currently not optimized */
826 tcg_gen_movi_i64(psw_addr, pc);
827 tcg_gen_exit_tb(0);
831 static inline void account_noninline_branch(DisasContext *s, int cc_op)
833 #ifdef DEBUG_INLINE_BRANCHES
834 inline_branch_miss[cc_op]++;
835 #endif
838 static inline void account_inline_branch(DisasContext *s)
840 #ifdef DEBUG_INLINE_BRANCHES
841 inline_branch_hit[s->cc_op]++;
842 #endif
845 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
847 TCGv_i32 tmp, tmp2, r;
848 TCGv_i64 tmp64;
849 int old_cc_op;
851 switch (s->cc_op) {
852 case CC_OP_LTGT0_32:
853 tmp = tcg_temp_new_i32();
854 tcg_gen_trunc_i64_i32(tmp, cc_dst);
855 switch (mask) {
856 case 0x8 | 0x4: /* dst <= 0 */
857 tcg_gen_brcondi_i32(TCG_COND_GT, tmp, 0, skip);
858 break;
859 case 0x8 | 0x2: /* dst >= 0 */
860 tcg_gen_brcondi_i32(TCG_COND_LT, tmp, 0, skip);
861 break;
862 case 0x8: /* dst == 0 */
863 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
864 break;
865 case 0x7: /* dst != 0 */
866 case 0x6: /* dst != 0 */
867 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
868 break;
869 case 0x4: /* dst < 0 */
870 tcg_gen_brcondi_i32(TCG_COND_GE, tmp, 0, skip);
871 break;
872 case 0x2: /* dst > 0 */
873 tcg_gen_brcondi_i32(TCG_COND_LE, tmp, 0, skip);
874 break;
875 default:
876 tcg_temp_free_i32(tmp);
877 goto do_dynamic;
879 account_inline_branch(s);
880 tcg_temp_free_i32(tmp);
881 break;
882 case CC_OP_LTGT0_64:
883 switch (mask) {
884 case 0x8 | 0x4: /* dst <= 0 */
885 tcg_gen_brcondi_i64(TCG_COND_GT, cc_dst, 0, skip);
886 break;
887 case 0x8 | 0x2: /* dst >= 0 */
888 tcg_gen_brcondi_i64(TCG_COND_LT, cc_dst, 0, skip);
889 break;
890 case 0x8: /* dst == 0 */
891 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
892 break;
893 case 0x7: /* dst != 0 */
894 case 0x6: /* dst != 0 */
895 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
896 break;
897 case 0x4: /* dst < 0 */
898 tcg_gen_brcondi_i64(TCG_COND_GE, cc_dst, 0, skip);
899 break;
900 case 0x2: /* dst > 0 */
901 tcg_gen_brcondi_i64(TCG_COND_LE, cc_dst, 0, skip);
902 break;
903 default:
904 goto do_dynamic;
906 account_inline_branch(s);
907 break;
908 case CC_OP_LTGT_32:
909 tmp = tcg_temp_new_i32();
910 tmp2 = tcg_temp_new_i32();
911 tcg_gen_trunc_i64_i32(tmp, cc_src);
912 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
913 switch (mask) {
914 case 0x8 | 0x4: /* src <= dst */
915 tcg_gen_brcond_i32(TCG_COND_GT, tmp, tmp2, skip);
916 break;
917 case 0x8 | 0x2: /* src >= dst */
918 tcg_gen_brcond_i32(TCG_COND_LT, tmp, tmp2, skip);
919 break;
920 case 0x8: /* src == dst */
921 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
922 break;
923 case 0x7: /* src != dst */
924 case 0x6: /* src != dst */
925 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
926 break;
927 case 0x4: /* src < dst */
928 tcg_gen_brcond_i32(TCG_COND_GE, tmp, tmp2, skip);
929 break;
930 case 0x2: /* src > dst */
931 tcg_gen_brcond_i32(TCG_COND_LE, tmp, tmp2, skip);
932 break;
933 default:
934 tcg_temp_free_i32(tmp);
935 tcg_temp_free_i32(tmp2);
936 goto do_dynamic;
938 account_inline_branch(s);
939 tcg_temp_free_i32(tmp);
940 tcg_temp_free_i32(tmp2);
941 break;
942 case CC_OP_LTGT_64:
943 switch (mask) {
944 case 0x8 | 0x4: /* src <= dst */
945 tcg_gen_brcond_i64(TCG_COND_GT, cc_src, cc_dst, skip);
946 break;
947 case 0x8 | 0x2: /* src >= dst */
948 tcg_gen_brcond_i64(TCG_COND_LT, cc_src, cc_dst, skip);
949 break;
950 case 0x8: /* src == dst */
951 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
952 break;
953 case 0x7: /* src != dst */
954 case 0x6: /* src != dst */
955 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
956 break;
957 case 0x4: /* src < dst */
958 tcg_gen_brcond_i64(TCG_COND_GE, cc_src, cc_dst, skip);
959 break;
960 case 0x2: /* src > dst */
961 tcg_gen_brcond_i64(TCG_COND_LE, cc_src, cc_dst, skip);
962 break;
963 default:
964 goto do_dynamic;
966 account_inline_branch(s);
967 break;
968 case CC_OP_LTUGTU_32:
969 tmp = tcg_temp_new_i32();
970 tmp2 = tcg_temp_new_i32();
971 tcg_gen_trunc_i64_i32(tmp, cc_src);
972 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
973 switch (mask) {
974 case 0x8 | 0x4: /* src <= dst */
975 tcg_gen_brcond_i32(TCG_COND_GTU, tmp, tmp2, skip);
976 break;
977 case 0x8 | 0x2: /* src >= dst */
978 tcg_gen_brcond_i32(TCG_COND_LTU, tmp, tmp2, skip);
979 break;
980 case 0x8: /* src == dst */
981 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
982 break;
983 case 0x7: /* src != dst */
984 case 0x6: /* src != dst */
985 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
986 break;
987 case 0x4: /* src < dst */
988 tcg_gen_brcond_i32(TCG_COND_GEU, tmp, tmp2, skip);
989 break;
990 case 0x2: /* src > dst */
991 tcg_gen_brcond_i32(TCG_COND_LEU, tmp, tmp2, skip);
992 break;
993 default:
994 tcg_temp_free_i32(tmp);
995 tcg_temp_free_i32(tmp2);
996 goto do_dynamic;
998 account_inline_branch(s);
999 tcg_temp_free_i32(tmp);
1000 tcg_temp_free_i32(tmp2);
1001 break;
1002 case CC_OP_LTUGTU_64:
1003 switch (mask) {
1004 case 0x8 | 0x4: /* src <= dst */
1005 tcg_gen_brcond_i64(TCG_COND_GTU, cc_src, cc_dst, skip);
1006 break;
1007 case 0x8 | 0x2: /* src >= dst */
1008 tcg_gen_brcond_i64(TCG_COND_LTU, cc_src, cc_dst, skip);
1009 break;
1010 case 0x8: /* src == dst */
1011 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
1012 break;
1013 case 0x7: /* src != dst */
1014 case 0x6: /* src != dst */
1015 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
1016 break;
1017 case 0x4: /* src < dst */
1018 tcg_gen_brcond_i64(TCG_COND_GEU, cc_src, cc_dst, skip);
1019 break;
1020 case 0x2: /* src > dst */
1021 tcg_gen_brcond_i64(TCG_COND_LEU, cc_src, cc_dst, skip);
1022 break;
1023 default:
1024 goto do_dynamic;
1026 account_inline_branch(s);
1027 break;
1028 case CC_OP_NZ:
1029 switch (mask) {
1030 /* dst == 0 || dst != 0 */
1031 case 0x8 | 0x4:
1032 case 0x8 | 0x4 | 0x2:
1033 case 0x8 | 0x4 | 0x2 | 0x1:
1034 case 0x8 | 0x4 | 0x1:
1035 break;
1036 /* dst == 0 */
1037 case 0x8:
1038 case 0x8 | 0x2:
1039 case 0x8 | 0x2 | 0x1:
1040 case 0x8 | 0x1:
1041 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1042 break;
1043 /* dst != 0 */
1044 case 0x4:
1045 case 0x4 | 0x2:
1046 case 0x4 | 0x2 | 0x1:
1047 case 0x4 | 0x1:
1048 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1049 break;
1050 default:
1051 goto do_dynamic;
1053 account_inline_branch(s);
1054 break;
1055 case CC_OP_TM_32:
1056 tmp = tcg_temp_new_i32();
1057 tmp2 = tcg_temp_new_i32();
1059 tcg_gen_trunc_i64_i32(tmp, cc_src);
1060 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
1061 tcg_gen_and_i32(tmp, tmp, tmp2);
1062 switch (mask) {
1063 case 0x8: /* val & mask == 0 */
1064 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1065 break;
1066 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1067 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1068 break;
1069 default:
1070 tcg_temp_free_i32(tmp);
1071 tcg_temp_free_i32(tmp2);
1072 goto do_dynamic;
1074 tcg_temp_free_i32(tmp);
1075 tcg_temp_free_i32(tmp2);
1076 account_inline_branch(s);
1077 break;
1078 case CC_OP_TM_64:
1079 tmp64 = tcg_temp_new_i64();
1081 tcg_gen_and_i64(tmp64, cc_src, cc_dst);
1082 switch (mask) {
1083 case 0x8: /* val & mask == 0 */
1084 tcg_gen_brcondi_i64(TCG_COND_NE, tmp64, 0, skip);
1085 break;
1086 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1087 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp64, 0, skip);
1088 break;
1089 default:
1090 tcg_temp_free_i64(tmp64);
1091 goto do_dynamic;
1093 tcg_temp_free_i64(tmp64);
1094 account_inline_branch(s);
1095 break;
1096 case CC_OP_ICM:
1097 switch (mask) {
1098 case 0x8: /* val == 0 */
1099 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1100 break;
1101 case 0x4 | 0x2 | 0x1: /* val != 0 */
1102 case 0x4 | 0x2: /* val != 0 */
1103 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1104 break;
1105 default:
1106 goto do_dynamic;
1108 account_inline_branch(s);
1109 break;
1110 case CC_OP_STATIC:
1111 old_cc_op = s->cc_op;
1112 goto do_dynamic_nocccalc;
1113 case CC_OP_DYNAMIC:
1114 default:
1115 do_dynamic:
1116 old_cc_op = s->cc_op;
1117 /* calculate cc value */
1118 gen_op_calc_cc(s);
1120 do_dynamic_nocccalc:
1121 /* jump based on cc */
1122 account_noninline_branch(s, old_cc_op);
1124 switch (mask) {
1125 case 0x8 | 0x4 | 0x2 | 0x1:
1126 /* always true */
1127 break;
1128 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1129 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 3, skip);
1130 break;
1131 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1132 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 2, skip);
1133 break;
1134 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1135 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 1, skip);
1136 break;
1137 case 0x8 | 0x2: /* cc == 0 || cc == 2 */
1138 tmp = tcg_temp_new_i32();
1139 tcg_gen_andi_i32(tmp, cc_op, 1);
1140 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1141 tcg_temp_free_i32(tmp);
1142 break;
1143 case 0x8 | 0x4: /* cc < 2 */
1144 tcg_gen_brcondi_i32(TCG_COND_GEU, cc_op, 2, skip);
1145 break;
1146 case 0x8: /* cc == 0 */
1147 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 0, skip);
1148 break;
1149 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1150 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 0, skip);
1151 break;
1152 case 0x4 | 0x1: /* cc == 1 || cc == 3 */
1153 tmp = tcg_temp_new_i32();
1154 tcg_gen_andi_i32(tmp, cc_op, 1);
1155 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1156 tcg_temp_free_i32(tmp);
1157 break;
1158 case 0x4: /* cc == 1 */
1159 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 1, skip);
1160 break;
1161 case 0x2 | 0x1: /* cc > 1 */
1162 tcg_gen_brcondi_i32(TCG_COND_LEU, cc_op, 1, skip);
1163 break;
1164 case 0x2: /* cc == 2 */
1165 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 2, skip);
1166 break;
1167 case 0x1: /* cc == 3 */
1168 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 3, skip);
1169 break;
1170 default: /* cc is masked by something else */
1171 tmp = tcg_const_i32(3);
1172 /* 3 - cc */
1173 tcg_gen_sub_i32(tmp, tmp, cc_op);
1174 tmp2 = tcg_const_i32(1);
1175 /* 1 << (3 - cc) */
1176 tcg_gen_shl_i32(tmp2, tmp2, tmp);
1177 r = tcg_const_i32(mask);
1178 /* mask & (1 << (3 - cc)) */
1179 tcg_gen_and_i32(r, r, tmp2);
1180 tcg_temp_free_i32(tmp);
1181 tcg_temp_free_i32(tmp2);
1183 tcg_gen_brcondi_i32(TCG_COND_EQ, r, 0, skip);
1184 tcg_temp_free_i32(r);
1185 break;
1187 break;
1191 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1192 uint64_t offset)
1194 int skip;
1196 if (mask == 0xf) {
1197 /* unconditional */
1198 tcg_gen_mov_i64(psw_addr, target);
1199 tcg_gen_exit_tb(0);
1200 } else if (mask == 0) {
1201 /* ignore cc and never match */
1202 gen_goto_tb(s, 0, offset + 2);
1203 } else {
1204 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1206 tcg_gen_mov_i64(new_addr, target);
1207 skip = gen_new_label();
1208 gen_jcc(s, mask, skip);
1209 tcg_gen_mov_i64(psw_addr, new_addr);
1210 tcg_temp_free_i64(new_addr);
1211 tcg_gen_exit_tb(0);
1212 gen_set_label(skip);
1213 tcg_temp_free_i64(new_addr);
1214 gen_goto_tb(s, 1, offset + 2);
1218 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1220 int skip;
1222 if (mask == 0xf) {
1223 /* unconditional */
1224 gen_goto_tb(s, 0, s->pc + offset);
1225 } else if (mask == 0) {
1226 /* ignore cc and never match */
1227 gen_goto_tb(s, 0, s->pc + 4);
1228 } else {
1229 skip = gen_new_label();
1230 gen_jcc(s, mask, skip);
1231 gen_goto_tb(s, 0, s->pc + offset);
1232 gen_set_label(skip);
1233 gen_goto_tb(s, 1, s->pc + 4);
1235 s->is_jmp = DISAS_TB_JUMP;
1238 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1240 TCGv_i64 tmp, tmp2;
1241 int i;
1242 int l_memset = gen_new_label();
1243 int l_out = gen_new_label();
1244 TCGv_i64 dest = tcg_temp_local_new_i64();
1245 TCGv_i64 src = tcg_temp_local_new_i64();
1246 TCGv_i32 vl;
1248 /* Find out if we should use the inline version of mvc */
1249 switch (l) {
1250 case 0:
1251 case 1:
1252 case 2:
1253 case 3:
1254 case 4:
1255 case 5:
1256 case 6:
1257 case 7:
1258 case 11:
1259 case 15:
1260 /* use inline */
1261 break;
1262 default:
1263 /* Fall back to helper */
1264 vl = tcg_const_i32(l);
1265 potential_page_fault(s);
1266 gen_helper_mvc(cpu_env, vl, s1, s2);
1267 tcg_temp_free_i32(vl);
1268 return;
1271 tcg_gen_mov_i64(dest, s1);
1272 tcg_gen_mov_i64(src, s2);
1274 if (!(s->tb->flags & FLAG_MASK_64)) {
1275 /* XXX what if we overflow while moving? */
1276 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1277 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1280 tmp = tcg_temp_new_i64();
1281 tcg_gen_addi_i64(tmp, src, 1);
1282 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1283 tcg_temp_free_i64(tmp);
1285 switch (l) {
1286 case 0:
1287 tmp = tcg_temp_new_i64();
1289 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1290 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1292 tcg_temp_free_i64(tmp);
1293 break;
1294 case 1:
1295 tmp = tcg_temp_new_i64();
1297 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1298 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1300 tcg_temp_free_i64(tmp);
1301 break;
1302 case 3:
1303 tmp = tcg_temp_new_i64();
1305 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1306 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1308 tcg_temp_free_i64(tmp);
1309 break;
1310 case 4:
1311 tmp = tcg_temp_new_i64();
1312 tmp2 = tcg_temp_new_i64();
1314 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1315 tcg_gen_addi_i64(src, src, 4);
1316 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1317 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1318 tcg_gen_addi_i64(dest, dest, 4);
1319 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1321 tcg_temp_free_i64(tmp);
1322 tcg_temp_free_i64(tmp2);
1323 break;
1324 case 7:
1325 tmp = tcg_temp_new_i64();
1327 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1328 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1330 tcg_temp_free_i64(tmp);
1331 break;
1332 default:
1333 /* The inline version can become too big for too uneven numbers, only
1334 use it on known good lengths */
1335 tmp = tcg_temp_new_i64();
1336 tmp2 = tcg_const_i64(8);
1337 for (i = 0; (i + 7) <= l; i += 8) {
1338 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1339 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1341 tcg_gen_add_i64(src, src, tmp2);
1342 tcg_gen_add_i64(dest, dest, tmp2);
1345 tcg_temp_free_i64(tmp2);
1346 tmp2 = tcg_const_i64(1);
1348 for (; i <= l; i++) {
1349 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1350 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1352 tcg_gen_add_i64(src, src, tmp2);
1353 tcg_gen_add_i64(dest, dest, tmp2);
1356 tcg_temp_free_i64(tmp2);
1357 tcg_temp_free_i64(tmp);
1358 break;
1361 tcg_gen_br(l_out);
1363 gen_set_label(l_memset);
1364 /* memset case (dest == (src + 1)) */
1366 tmp = tcg_temp_new_i64();
1367 tmp2 = tcg_temp_new_i64();
1368 /* fill tmp with the byte */
1369 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1370 tcg_gen_shli_i64(tmp2, tmp, 8);
1371 tcg_gen_or_i64(tmp, tmp, tmp2);
1372 tcg_gen_shli_i64(tmp2, tmp, 16);
1373 tcg_gen_or_i64(tmp, tmp, tmp2);
1374 tcg_gen_shli_i64(tmp2, tmp, 32);
1375 tcg_gen_or_i64(tmp, tmp, tmp2);
1376 tcg_temp_free_i64(tmp2);
1378 tmp2 = tcg_const_i64(8);
1380 for (i = 0; (i + 7) <= l; i += 8) {
1381 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1382 tcg_gen_addi_i64(dest, dest, 8);
1385 tcg_temp_free_i64(tmp2);
1386 tmp2 = tcg_const_i64(1);
1388 for (; i <= l; i++) {
1389 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1390 tcg_gen_addi_i64(dest, dest, 1);
1393 tcg_temp_free_i64(tmp2);
1394 tcg_temp_free_i64(tmp);
1396 gen_set_label(l_out);
1398 tcg_temp_free(dest);
1399 tcg_temp_free(src);
1402 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1404 TCGv_i64 tmp;
1405 TCGv_i64 tmp2;
1406 TCGv_i32 vl;
1408 /* check for simple 32bit or 64bit match */
1409 switch (l) {
1410 case 0:
1411 tmp = tcg_temp_new_i64();
1412 tmp2 = tcg_temp_new_i64();
1414 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1415 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1416 cmp_u64(s, tmp, tmp2);
1418 tcg_temp_free_i64(tmp);
1419 tcg_temp_free_i64(tmp2);
1420 return;
1421 case 1:
1422 tmp = tcg_temp_new_i64();
1423 tmp2 = tcg_temp_new_i64();
1425 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1426 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1427 cmp_u64(s, tmp, tmp2);
1429 tcg_temp_free_i64(tmp);
1430 tcg_temp_free_i64(tmp2);
1431 return;
1432 case 3:
1433 tmp = tcg_temp_new_i64();
1434 tmp2 = tcg_temp_new_i64();
1436 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1437 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1438 cmp_u64(s, tmp, tmp2);
1440 tcg_temp_free_i64(tmp);
1441 tcg_temp_free_i64(tmp2);
1442 return;
1443 case 7:
1444 tmp = tcg_temp_new_i64();
1445 tmp2 = tcg_temp_new_i64();
1447 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1448 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1449 cmp_u64(s, tmp, tmp2);
1451 tcg_temp_free_i64(tmp);
1452 tcg_temp_free_i64(tmp2);
1453 return;
1456 potential_page_fault(s);
1457 vl = tcg_const_i32(l);
1458 gen_helper_clc(cc_op, cpu_env, vl, s1, s2);
1459 tcg_temp_free_i32(vl);
1460 set_cc_static(s);
1463 static void disas_e3(DisasContext* s, int op, int r1, int x2, int b2, int d2)
1465 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1466 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1468 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1469 op, r1, x2, b2, d2);
1470 addr = get_address(s, x2, b2, d2);
1471 switch (op) {
1472 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1473 case 0x4: /* lg r1,d2(x2,b2) */
1474 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1475 if (op == 0x2) {
1476 set_cc_s64(s, regs[r1]);
1478 break;
1479 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1480 tmp2 = tcg_temp_new_i64();
1481 tmp32_1 = tcg_temp_new_i32();
1482 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1483 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1484 store_reg32(r1, tmp32_1);
1485 set_cc_s32(s, tmp32_1);
1486 tcg_temp_free_i64(tmp2);
1487 tcg_temp_free_i32(tmp32_1);
1488 break;
1489 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1490 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1491 tmp2 = tcg_temp_new_i64();
1492 if (op == 0xc) {
1493 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1494 } else {
1495 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1497 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1498 tcg_temp_free_i64(tmp2);
1499 break;
1500 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1501 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1502 tmp2 = tcg_temp_new_i64();
1503 if (op == 0x1d) {
1504 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1505 } else {
1506 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1508 tmp4 = load_reg(r1 + 1);
1509 tmp3 = tcg_temp_new_i64();
1510 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1511 store_reg(r1 + 1, tmp3);
1512 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1513 store_reg(r1, tmp3);
1514 tcg_temp_free_i64(tmp2);
1515 tcg_temp_free_i64(tmp3);
1516 tcg_temp_free_i64(tmp4);
1517 break;
1518 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1519 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1520 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1521 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1522 if (op == 0x1a) {
1523 tmp2 = tcg_temp_new_i64();
1524 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1525 } else if (op == 0x18) {
1526 tmp2 = tcg_temp_new_i64();
1527 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1528 } else {
1529 tmp2 = tcg_temp_new_i64();
1530 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1532 tmp4 = load_reg(r1);
1533 tmp3 = tcg_temp_new_i64();
1534 tcg_gen_add_i64(tmp3, tmp4, tmp2);
1535 store_reg(r1, tmp3);
1536 switch (op) {
1537 case 0x8:
1538 case 0x18:
1539 set_cc_add64(s, tmp4, tmp2, tmp3);
1540 break;
1541 case 0xa:
1542 case 0x1a:
1543 set_cc_addu64(s, tmp4, tmp2, tmp3);
1544 break;
1545 default:
1546 tcg_abort();
1548 tcg_temp_free_i64(tmp2);
1549 tcg_temp_free_i64(tmp3);
1550 tcg_temp_free_i64(tmp4);
1551 break;
1552 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1553 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1554 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1555 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1556 tmp2 = tcg_temp_new_i64();
1557 if (op == 0x19) {
1558 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1559 } else if (op == 0x1b) {
1560 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1561 } else {
1562 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1564 tmp4 = load_reg(r1);
1565 tmp3 = tcg_temp_new_i64();
1566 tcg_gen_sub_i64(tmp3, tmp4, tmp2);
1567 store_reg(r1, tmp3);
1568 switch (op) {
1569 case 0x9:
1570 case 0x19:
1571 set_cc_sub64(s, tmp4, tmp2, tmp3);
1572 break;
1573 case 0xb:
1574 case 0x1b:
1575 set_cc_subu64(s, tmp4, tmp2, tmp3);
1576 break;
1577 default:
1578 tcg_abort();
1580 tcg_temp_free_i64(tmp2);
1581 tcg_temp_free_i64(tmp3);
1582 tcg_temp_free_i64(tmp4);
1583 break;
1584 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1585 tmp2 = tcg_temp_new_i64();
1586 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1587 tcg_gen_bswap64_i64(tmp2, tmp2);
1588 store_reg(r1, tmp2);
1589 tcg_temp_free_i64(tmp2);
1590 break;
1591 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1592 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1593 tmp2 = tcg_temp_new_i64();
1594 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1595 if (op == 0x14) {
1596 tcg_gen_ext32s_i64(tmp2, tmp2);
1598 store_reg(r1, tmp2);
1599 tcg_temp_free_i64(tmp2);
1600 break;
1601 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1602 tmp2 = tcg_temp_new_i64();
1603 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1604 store_reg(r1, tmp2);
1605 tcg_temp_free_i64(tmp2);
1606 break;
1607 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1608 tmp2 = tcg_temp_new_i64();
1609 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1610 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1611 store_reg(r1, tmp2);
1612 tcg_temp_free_i64(tmp2);
1613 break;
1614 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1615 tmp2 = tcg_temp_new_i64();
1616 tmp32_1 = tcg_temp_new_i32();
1617 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1618 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1619 tcg_temp_free_i64(tmp2);
1620 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1621 store_reg32(r1, tmp32_1);
1622 tcg_temp_free_i32(tmp32_1);
1623 break;
1624 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1625 tmp2 = tcg_temp_new_i64();
1626 tmp32_1 = tcg_temp_new_i32();
1627 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1628 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1629 tcg_temp_free_i64(tmp2);
1630 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1631 store_reg16(r1, tmp32_1);
1632 tcg_temp_free_i32(tmp32_1);
1633 break;
1634 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1635 case 0x21: /* CLG R1,D2(X2,B2) */
1636 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1637 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1638 tmp2 = tcg_temp_new_i64();
1639 switch (op) {
1640 case 0x20:
1641 case 0x21:
1642 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1643 break;
1644 case 0x30:
1645 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1646 break;
1647 case 0x31:
1648 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1649 break;
1650 default:
1651 tcg_abort();
1653 switch (op) {
1654 case 0x20:
1655 case 0x30:
1656 cmp_s64(s, regs[r1], tmp2);
1657 break;
1658 case 0x21:
1659 case 0x31:
1660 cmp_u64(s, regs[r1], tmp2);
1661 break;
1662 default:
1663 tcg_abort();
1665 tcg_temp_free_i64(tmp2);
1666 break;
1667 case 0x24: /* stg r1, d2(x2,b2) */
1668 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1669 break;
1670 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1671 tmp32_1 = load_reg32(r1);
1672 tmp2 = tcg_temp_new_i64();
1673 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1674 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1675 tcg_temp_free_i32(tmp32_1);
1676 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1677 tcg_temp_free_i64(tmp2);
1678 break;
1679 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1680 tmp32_1 = load_reg32(r1);
1681 tmp2 = tcg_temp_new_i64();
1682 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1683 tcg_temp_free_i32(tmp32_1);
1684 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1685 tcg_temp_free_i64(tmp2);
1686 break;
1687 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1688 tmp32_1 = load_reg32(r1);
1689 tmp32_2 = tcg_temp_new_i32();
1690 tmp2 = tcg_temp_new_i64();
1691 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1692 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1693 tcg_temp_free_i64(tmp2);
1694 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1695 store_reg32(r1, tmp32_2);
1696 set_cc_nz_u32(s, tmp32_2);
1697 tcg_temp_free_i32(tmp32_1);
1698 tcg_temp_free_i32(tmp32_2);
1699 break;
1700 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1701 tmp3 = tcg_temp_new_i64();
1702 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1703 store_reg32_i64(r1, tmp3);
1704 tcg_temp_free_i64(tmp3);
1705 break;
1706 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1707 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1708 tmp32_1 = load_reg32(r1);
1709 tmp32_2 = tcg_temp_new_i32();
1710 tmp32_3 = tcg_temp_new_i32();
1711 tmp2 = tcg_temp_new_i64();
1712 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1713 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1714 tcg_temp_free_i64(tmp2);
1715 switch (op) {
1716 case 0x5a:
1717 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
1718 break;
1719 case 0x5b:
1720 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
1721 break;
1722 default:
1723 tcg_abort();
1725 store_reg32(r1, tmp32_3);
1726 switch (op) {
1727 case 0x5a:
1728 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
1729 break;
1730 case 0x5b:
1731 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
1732 break;
1733 default:
1734 tcg_abort();
1736 tcg_temp_free_i32(tmp32_1);
1737 tcg_temp_free_i32(tmp32_2);
1738 tcg_temp_free_i32(tmp32_3);
1739 break;
1740 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1741 store_reg(r1, addr);
1742 break;
1743 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1744 tmp32_1 = load_reg32(r1);
1745 tmp2 = tcg_temp_new_i64();
1746 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1747 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1748 tcg_temp_free_i32(tmp32_1);
1749 tcg_temp_free_i64(tmp2);
1750 break;
1751 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1752 tmp3 = tcg_temp_new_i64();
1753 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1754 store_reg8(r1, tmp3);
1755 tcg_temp_free_i64(tmp3);
1756 break;
1757 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1758 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1759 tmp2 = tcg_temp_new_i64();
1760 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1761 switch (op) {
1762 case 0x76:
1763 tcg_gen_ext8s_i64(tmp2, tmp2);
1764 store_reg32_i64(r1, tmp2);
1765 break;
1766 case 0x77:
1767 tcg_gen_ext8s_i64(tmp2, tmp2);
1768 store_reg(r1, tmp2);
1769 break;
1770 default:
1771 tcg_abort();
1773 tcg_temp_free_i64(tmp2);
1774 break;
1775 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1776 tmp2 = tcg_temp_new_i64();
1777 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1778 store_reg32_i64(r1, tmp2);
1779 tcg_temp_free_i64(tmp2);
1780 break;
1781 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1782 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1783 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1784 tmp3 = tcg_temp_new_i64();
1785 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1786 switch (op) {
1787 case 0x80:
1788 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1789 break;
1790 case 0x81:
1791 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1792 break;
1793 case 0x82:
1794 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1795 break;
1796 default:
1797 tcg_abort();
1799 set_cc_nz_u64(s, regs[r1]);
1800 tcg_temp_free_i64(tmp3);
1801 break;
1802 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1803 tmp2 = tcg_temp_new_i64();
1804 tmp32_1 = tcg_const_i32(r1);
1805 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1806 gen_helper_mlg(cpu_env, tmp32_1, tmp2);
1807 tcg_temp_free_i64(tmp2);
1808 tcg_temp_free_i32(tmp32_1);
1809 break;
1810 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1811 tmp2 = tcg_temp_new_i64();
1812 tmp32_1 = tcg_const_i32(r1);
1813 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1814 gen_helper_dlg(cpu_env, tmp32_1, tmp2);
1815 tcg_temp_free_i64(tmp2);
1816 tcg_temp_free_i32(tmp32_1);
1817 break;
1818 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1819 tmp2 = tcg_temp_new_i64();
1820 tmp3 = tcg_temp_new_i64();
1821 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1822 /* XXX possible optimization point */
1823 gen_op_calc_cc(s);
1824 tcg_gen_extu_i32_i64(tmp3, cc_op);
1825 tcg_gen_shri_i64(tmp3, tmp3, 1);
1826 tcg_gen_andi_i64(tmp3, tmp3, 1);
1827 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1828 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1829 store_reg(r1, tmp3);
1830 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1831 tcg_temp_free_i64(tmp2);
1832 tcg_temp_free_i64(tmp3);
1833 break;
1834 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1835 tmp2 = tcg_temp_new_i64();
1836 tmp32_1 = tcg_const_i32(r1);
1837 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1838 /* XXX possible optimization point */
1839 gen_op_calc_cc(s);
1840 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, regs[r1], tmp2);
1841 set_cc_static(s);
1842 tcg_temp_free_i64(tmp2);
1843 tcg_temp_free_i32(tmp32_1);
1844 break;
1845 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1846 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1847 break;
1848 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1849 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1850 break;
1851 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1852 tmp2 = tcg_temp_new_i64();
1853 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1854 store_reg32_i64(r1, tmp2);
1855 tcg_temp_free_i64(tmp2);
1856 break;
1857 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1858 tmp2 = tcg_temp_new_i64();
1859 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1860 store_reg32_i64(r1, tmp2);
1861 tcg_temp_free_i64(tmp2);
1862 break;
1863 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1864 tmp2 = tcg_temp_new_i64();
1865 tmp3 = load_reg((r1 + 1) & 15);
1866 tcg_gen_ext32u_i64(tmp3, tmp3);
1867 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1868 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1869 store_reg32_i64((r1 + 1) & 15, tmp2);
1870 tcg_gen_shri_i64(tmp2, tmp2, 32);
1871 store_reg32_i64(r1, tmp2);
1872 tcg_temp_free_i64(tmp2);
1873 tcg_temp_free_i64(tmp3);
1874 break;
1875 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1876 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1877 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1878 tmp = load_reg(r1);
1879 tmp2 = tcg_temp_new_i64();
1880 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1881 tmp3 = load_reg((r1 + 1) & 15);
1882 tcg_gen_ext32u_i64(tmp2, tmp2);
1883 tcg_gen_ext32u_i64(tmp3, tmp3);
1884 tcg_gen_shli_i64(tmp, tmp, 32);
1885 tcg_gen_or_i64(tmp, tmp, tmp3);
1887 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1888 tcg_gen_div_i64(tmp, tmp, tmp2);
1889 store_reg32_i64((r1 + 1) & 15, tmp);
1890 store_reg32_i64(r1, tmp3);
1891 tcg_temp_free_i64(tmp);
1892 tcg_temp_free_i64(tmp2);
1893 tcg_temp_free_i64(tmp3);
1894 break;
1895 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1896 tmp2 = tcg_temp_new_i64();
1897 tmp32_1 = load_reg32(r1);
1898 tmp32_2 = tcg_temp_new_i32();
1899 tmp32_3 = tcg_temp_new_i32();
1900 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1901 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1902 /* XXX possible optimization point */
1903 gen_op_calc_cc(s);
1904 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1905 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1906 store_reg32(r1, tmp32_3);
1907 tcg_temp_free_i64(tmp2);
1908 tcg_temp_free_i32(tmp32_1);
1909 tcg_temp_free_i32(tmp32_2);
1910 tcg_temp_free_i32(tmp32_3);
1911 break;
1912 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1913 tmp2 = tcg_temp_new_i64();
1914 tmp32_1 = tcg_const_i32(r1);
1915 tmp32_2 = tcg_temp_new_i32();
1916 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1917 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1918 /* XXX possible optimization point */
1919 gen_op_calc_cc(s);
1920 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_1, tmp32_2);
1921 set_cc_static(s);
1922 tcg_temp_free_i64(tmp2);
1923 tcg_temp_free_i32(tmp32_1);
1924 tcg_temp_free_i32(tmp32_2);
1925 break;
1926 default:
1927 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1928 gen_illegal_opcode(s, 3);
1929 break;
1931 tcg_temp_free_i64(addr);
1934 #ifndef CONFIG_USER_ONLY
1935 static void disas_e5(DisasContext* s, uint64_t insn)
1937 TCGv_i64 tmp, tmp2;
1938 int op = (insn >> 32) & 0xff;
1940 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1941 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1943 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1944 switch (op) {
1945 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1946 /* Test Protection */
1947 potential_page_fault(s);
1948 gen_helper_tprot(cc_op, tmp, tmp2);
1949 set_cc_static(s);
1950 break;
1951 default:
1952 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1953 gen_illegal_opcode(s, 3);
1954 break;
1957 tcg_temp_free_i64(tmp);
1958 tcg_temp_free_i64(tmp2);
1960 #endif
1962 static void disas_eb(DisasContext *s, int op, int r1, int r3, int b2, int d2)
1964 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1965 TCGv_i32 tmp32_1, tmp32_2;
1966 int i, stm_len;
1967 int ilc = 3;
1969 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1970 op, r1, r3, b2, d2);
1971 switch (op) {
1972 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1973 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1974 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1975 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1976 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1977 if (b2) {
1978 tmp = get_address(s, 0, b2, d2);
1979 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1980 } else {
1981 tmp = tcg_const_i64(d2 & 0x3f);
1983 switch (op) {
1984 case 0xc:
1985 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1986 break;
1987 case 0xd:
1988 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1989 break;
1990 case 0xa:
1991 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
1992 break;
1993 case 0xb:
1994 tmp2 = tcg_temp_new_i64();
1995 tmp3 = tcg_temp_new_i64();
1996 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
1997 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
1998 /* override sign bit with source sign */
1999 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
2000 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
2001 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
2002 tcg_temp_free_i64(tmp2);
2003 tcg_temp_free_i64(tmp3);
2004 break;
2005 case 0x1c:
2006 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
2007 break;
2008 default:
2009 tcg_abort();
2010 break;
2012 if (op == 0xa) {
2013 set_cc_s64(s, regs[r1]);
2015 tcg_temp_free_i64(tmp);
2016 break;
2017 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2018 if (b2) {
2019 tmp = get_address(s, 0, b2, d2);
2020 tcg_gen_andi_i64(tmp, tmp, 0x3f);
2021 } else {
2022 tmp = tcg_const_i64(d2 & 0x3f);
2024 tmp32_1 = tcg_temp_new_i32();
2025 tmp32_2 = load_reg32(r3);
2026 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2027 switch (op) {
2028 case 0x1d:
2029 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
2030 break;
2031 default:
2032 tcg_abort();
2033 break;
2035 store_reg32(r1, tmp32_1);
2036 tcg_temp_free_i64(tmp);
2037 tcg_temp_free_i32(tmp32_1);
2038 tcg_temp_free_i32(tmp32_2);
2039 break;
2040 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2041 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2042 stm_len = 8;
2043 goto do_mh;
2044 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2045 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2046 stm_len = 4;
2047 do_mh:
2048 /* Apparently, unrolling lmg/stmg of any size gains performance -
2049 even for very long ones... */
2050 tmp = get_address(s, 0, b2, d2);
2051 tmp3 = tcg_const_i64(stm_len);
2052 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
2053 for (i = r1;; i = (i + 1) % 16) {
2054 switch (op) {
2055 case 0x4:
2056 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
2057 break;
2058 case 0x96:
2059 tmp2 = tcg_temp_new_i64();
2060 #if HOST_LONG_BITS == 32
2061 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2062 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
2063 #else
2064 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2065 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
2066 tcg_gen_ext32u_i64(regs[i], regs[i]);
2067 tcg_gen_or_i64(regs[i], regs[i], tmp2);
2068 #endif
2069 tcg_temp_free_i64(tmp2);
2070 break;
2071 case 0x24:
2072 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
2073 break;
2074 case 0x26:
2075 tmp2 = tcg_temp_new_i64();
2076 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
2077 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2078 tcg_temp_free_i64(tmp2);
2079 break;
2080 default:
2081 tcg_abort();
2083 if (i == r3) {
2084 break;
2086 tcg_gen_add_i64(tmp, tmp, tmp3);
2088 tcg_temp_free_i64(tmp);
2089 tcg_temp_free_i64(tmp3);
2090 tcg_temp_free_i64(tmp4);
2091 break;
2092 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2093 tmp = get_address(s, 0, b2, d2);
2094 tmp32_1 = tcg_const_i32(r1);
2095 tmp32_2 = tcg_const_i32(r3);
2096 potential_page_fault(s);
2097 gen_helper_stcmh(cpu_env, tmp32_1, tmp, tmp32_2);
2098 tcg_temp_free_i64(tmp);
2099 tcg_temp_free_i32(tmp32_1);
2100 tcg_temp_free_i32(tmp32_2);
2101 break;
2102 #ifndef CONFIG_USER_ONLY
2103 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2104 /* Load Control */
2105 check_privileged(s, ilc);
2106 tmp = get_address(s, 0, b2, d2);
2107 tmp32_1 = tcg_const_i32(r1);
2108 tmp32_2 = tcg_const_i32(r3);
2109 potential_page_fault(s);
2110 gen_helper_lctlg(cpu_env, tmp32_1, tmp, tmp32_2);
2111 tcg_temp_free_i64(tmp);
2112 tcg_temp_free_i32(tmp32_1);
2113 tcg_temp_free_i32(tmp32_2);
2114 break;
2115 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2116 /* Store Control */
2117 check_privileged(s, ilc);
2118 tmp = get_address(s, 0, b2, d2);
2119 tmp32_1 = tcg_const_i32(r1);
2120 tmp32_2 = tcg_const_i32(r3);
2121 potential_page_fault(s);
2122 gen_helper_stctg(cpu_env, tmp32_1, tmp, tmp32_2);
2123 tcg_temp_free_i64(tmp);
2124 tcg_temp_free_i32(tmp32_1);
2125 tcg_temp_free_i32(tmp32_2);
2126 break;
2127 #endif
2128 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2129 tmp = get_address(s, 0, b2, d2);
2130 tmp32_1 = tcg_const_i32(r1);
2131 tmp32_2 = tcg_const_i32(r3);
2132 potential_page_fault(s);
2133 /* XXX rewrite in tcg */
2134 gen_helper_csg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2135 set_cc_static(s);
2136 tcg_temp_free_i64(tmp);
2137 tcg_temp_free_i32(tmp32_1);
2138 tcg_temp_free_i32(tmp32_2);
2139 break;
2140 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2141 tmp = get_address(s, 0, b2, d2);
2142 tmp32_1 = tcg_const_i32(r1);
2143 tmp32_2 = tcg_const_i32(r3);
2144 potential_page_fault(s);
2145 /* XXX rewrite in tcg */
2146 gen_helper_cdsg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2147 set_cc_static(s);
2148 tcg_temp_free_i64(tmp);
2149 tcg_temp_free_i32(tmp32_1);
2150 tcg_temp_free_i32(tmp32_2);
2151 break;
2152 case 0x51: /* TMY D1(B1),I2 [SIY] */
2153 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2154 tmp2 = tcg_const_i64((r1 << 4) | r3);
2155 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
2156 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2157 that incurs less conversions */
2158 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
2159 tcg_temp_free_i64(tmp);
2160 tcg_temp_free_i64(tmp2);
2161 break;
2162 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2163 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2164 tmp2 = tcg_const_i64((r1 << 4) | r3);
2165 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
2166 tcg_temp_free_i64(tmp);
2167 tcg_temp_free_i64(tmp2);
2168 break;
2169 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2170 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
2171 tmp = tcg_temp_new_i64();
2172 tmp32_1 = tcg_temp_new_i32();
2173 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
2174 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2175 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
2176 tcg_temp_free_i64(tmp);
2177 tcg_temp_free_i64(tmp3);
2178 tcg_temp_free_i32(tmp32_1);
2179 break;
2180 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2181 tmp = get_address(s, 0, b2, d2);
2182 tmp32_1 = tcg_const_i32(r1);
2183 tmp32_2 = tcg_const_i32(r3);
2184 potential_page_fault(s);
2185 /* XXX split CC calculation out */
2186 gen_helper_icmh(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2187 set_cc_static(s);
2188 tcg_temp_free_i64(tmp);
2189 tcg_temp_free_i32(tmp32_1);
2190 tcg_temp_free_i32(tmp32_2);
2191 break;
2192 default:
2193 LOG_DISAS("illegal eb operation 0x%x\n", op);
2194 gen_illegal_opcode(s, ilc);
2195 break;
2199 static void disas_ed(DisasContext *s, int op, int r1, int x2, int b2, int d2,
2200 int r1b)
2202 TCGv_i32 tmp_r1, tmp32;
2203 TCGv_i64 addr, tmp;
2204 addr = get_address(s, x2, b2, d2);
2205 tmp_r1 = tcg_const_i32(r1);
2206 switch (op) {
2207 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
2208 potential_page_fault(s);
2209 gen_helper_ldeb(cpu_env, tmp_r1, addr);
2210 break;
2211 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2212 potential_page_fault(s);
2213 gen_helper_lxdb(cpu_env, tmp_r1, addr);
2214 break;
2215 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2216 tmp = tcg_temp_new_i64();
2217 tmp32 = load_freg32(r1);
2218 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2219 set_cc_cmp_f32_i64(s, tmp32, tmp);
2220 tcg_temp_free_i64(tmp);
2221 tcg_temp_free_i32(tmp32);
2222 break;
2223 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2224 tmp = tcg_temp_new_i64();
2225 tmp32 = tcg_temp_new_i32();
2226 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2227 tcg_gen_trunc_i64_i32(tmp32, tmp);
2228 gen_helper_aeb(cpu_env, tmp_r1, tmp32);
2229 tcg_temp_free_i64(tmp);
2230 tcg_temp_free_i32(tmp32);
2232 tmp32 = load_freg32(r1);
2233 gen_set_cc_nz_f32(s, tmp32);
2234 tcg_temp_free_i32(tmp32);
2235 break;
2236 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2237 tmp = tcg_temp_new_i64();
2238 tmp32 = tcg_temp_new_i32();
2239 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2240 tcg_gen_trunc_i64_i32(tmp32, tmp);
2241 gen_helper_seb(cpu_env, tmp_r1, tmp32);
2242 tcg_temp_free_i64(tmp);
2243 tcg_temp_free_i32(tmp32);
2245 tmp32 = load_freg32(r1);
2246 gen_set_cc_nz_f32(s, tmp32);
2247 tcg_temp_free_i32(tmp32);
2248 break;
2249 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2250 tmp = tcg_temp_new_i64();
2251 tmp32 = tcg_temp_new_i32();
2252 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2253 tcg_gen_trunc_i64_i32(tmp32, tmp);
2254 gen_helper_deb(cpu_env, tmp_r1, tmp32);
2255 tcg_temp_free_i64(tmp);
2256 tcg_temp_free_i32(tmp32);
2257 break;
2258 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2259 potential_page_fault(s);
2260 gen_helper_tceb(cc_op, cpu_env, tmp_r1, addr);
2261 set_cc_static(s);
2262 break;
2263 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2264 potential_page_fault(s);
2265 gen_helper_tcdb(cc_op, cpu_env, tmp_r1, addr);
2266 set_cc_static(s);
2267 break;
2268 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2269 potential_page_fault(s);
2270 gen_helper_tcxb(cc_op, cpu_env, tmp_r1, addr);
2271 set_cc_static(s);
2272 break;
2273 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2274 tmp = tcg_temp_new_i64();
2275 tmp32 = tcg_temp_new_i32();
2276 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2277 tcg_gen_trunc_i64_i32(tmp32, tmp);
2278 gen_helper_meeb(cpu_env, tmp_r1, tmp32);
2279 tcg_temp_free_i64(tmp);
2280 tcg_temp_free_i32(tmp32);
2281 break;
2282 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2283 potential_page_fault(s);
2284 gen_helper_cdb(cc_op, cpu_env, tmp_r1, addr);
2285 set_cc_static(s);
2286 break;
2287 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2288 potential_page_fault(s);
2289 gen_helper_adb(cc_op, cpu_env, tmp_r1, addr);
2290 set_cc_static(s);
2291 break;
2292 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2293 potential_page_fault(s);
2294 gen_helper_sdb(cc_op, cpu_env, tmp_r1, addr);
2295 set_cc_static(s);
2296 break;
2297 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2298 potential_page_fault(s);
2299 gen_helper_mdb(cpu_env, tmp_r1, addr);
2300 break;
2301 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2302 potential_page_fault(s);
2303 gen_helper_ddb(cpu_env, tmp_r1, addr);
2304 break;
2305 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2306 /* for RXF insns, r1 is R3 and r1b is R1 */
2307 tmp32 = tcg_const_i32(r1b);
2308 potential_page_fault(s);
2309 gen_helper_madb(cpu_env, tmp32, addr, tmp_r1);
2310 tcg_temp_free_i32(tmp32);
2311 break;
2312 default:
2313 LOG_DISAS("illegal ed operation 0x%x\n", op);
2314 gen_illegal_opcode(s, 3);
2315 return;
2317 tcg_temp_free_i32(tmp_r1);
2318 tcg_temp_free_i64(addr);
2321 static void disas_a5(DisasContext *s, int op, int r1, int i2)
2323 TCGv_i64 tmp, tmp2;
2324 TCGv_i32 tmp32;
2325 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2326 switch (op) {
2327 case 0x0: /* IIHH R1,I2 [RI] */
2328 tmp = tcg_const_i64(i2);
2329 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2330 tcg_temp_free_i64(tmp);
2331 break;
2332 case 0x1: /* IIHL R1,I2 [RI] */
2333 tmp = tcg_const_i64(i2);
2334 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2335 tcg_temp_free_i64(tmp);
2336 break;
2337 case 0x2: /* IILH R1,I2 [RI] */
2338 tmp = tcg_const_i64(i2);
2339 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2340 tcg_temp_free_i64(tmp);
2341 break;
2342 case 0x3: /* IILL R1,I2 [RI] */
2343 tmp = tcg_const_i64(i2);
2344 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2345 tcg_temp_free_i64(tmp);
2346 break;
2347 case 0x4: /* NIHH R1,I2 [RI] */
2348 case 0x8: /* OIHH R1,I2 [RI] */
2349 tmp = load_reg(r1);
2350 tmp32 = tcg_temp_new_i32();
2351 switch (op) {
2352 case 0x4:
2353 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2354 | 0x0000ffffffffffffULL);
2355 tcg_gen_and_i64(tmp, tmp, tmp2);
2356 break;
2357 case 0x8:
2358 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2359 tcg_gen_or_i64(tmp, tmp, tmp2);
2360 break;
2361 default:
2362 tcg_abort();
2364 store_reg(r1, tmp);
2365 tcg_gen_shri_i64(tmp2, tmp, 48);
2366 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2367 set_cc_nz_u32(s, tmp32);
2368 tcg_temp_free_i64(tmp2);
2369 tcg_temp_free_i32(tmp32);
2370 tcg_temp_free_i64(tmp);
2371 break;
2372 case 0x5: /* NIHL R1,I2 [RI] */
2373 case 0x9: /* OIHL R1,I2 [RI] */
2374 tmp = load_reg(r1);
2375 tmp32 = tcg_temp_new_i32();
2376 switch (op) {
2377 case 0x5:
2378 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2379 | 0xffff0000ffffffffULL);
2380 tcg_gen_and_i64(tmp, tmp, tmp2);
2381 break;
2382 case 0x9:
2383 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2384 tcg_gen_or_i64(tmp, tmp, tmp2);
2385 break;
2386 default:
2387 tcg_abort();
2389 store_reg(r1, tmp);
2390 tcg_gen_shri_i64(tmp2, tmp, 32);
2391 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2392 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2393 set_cc_nz_u32(s, tmp32);
2394 tcg_temp_free_i64(tmp2);
2395 tcg_temp_free_i32(tmp32);
2396 tcg_temp_free_i64(tmp);
2397 break;
2398 case 0x6: /* NILH R1,I2 [RI] */
2399 case 0xa: /* OILH R1,I2 [RI] */
2400 tmp = load_reg(r1);
2401 tmp32 = tcg_temp_new_i32();
2402 switch (op) {
2403 case 0x6:
2404 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2405 | 0xffffffff0000ffffULL);
2406 tcg_gen_and_i64(tmp, tmp, tmp2);
2407 break;
2408 case 0xa:
2409 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2410 tcg_gen_or_i64(tmp, tmp, tmp2);
2411 break;
2412 default:
2413 tcg_abort();
2415 store_reg(r1, tmp);
2416 tcg_gen_shri_i64(tmp, tmp, 16);
2417 tcg_gen_trunc_i64_i32(tmp32, tmp);
2418 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2419 set_cc_nz_u32(s, tmp32);
2420 tcg_temp_free_i64(tmp2);
2421 tcg_temp_free_i32(tmp32);
2422 tcg_temp_free_i64(tmp);
2423 break;
2424 case 0x7: /* NILL R1,I2 [RI] */
2425 case 0xb: /* OILL R1,I2 [RI] */
2426 tmp = load_reg(r1);
2427 tmp32 = tcg_temp_new_i32();
2428 switch (op) {
2429 case 0x7:
2430 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2431 tcg_gen_and_i64(tmp, tmp, tmp2);
2432 break;
2433 case 0xb:
2434 tmp2 = tcg_const_i64(i2);
2435 tcg_gen_or_i64(tmp, tmp, tmp2);
2436 break;
2437 default:
2438 tcg_abort();
2440 store_reg(r1, tmp);
2441 tcg_gen_trunc_i64_i32(tmp32, tmp);
2442 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2443 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2444 tcg_temp_free_i64(tmp2);
2445 tcg_temp_free_i32(tmp32);
2446 tcg_temp_free_i64(tmp);
2447 break;
2448 case 0xc: /* LLIHH R1,I2 [RI] */
2449 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2450 store_reg(r1, tmp);
2451 tcg_temp_free_i64(tmp);
2452 break;
2453 case 0xd: /* LLIHL R1,I2 [RI] */
2454 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2455 store_reg(r1, tmp);
2456 tcg_temp_free_i64(tmp);
2457 break;
2458 case 0xe: /* LLILH R1,I2 [RI] */
2459 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2460 store_reg(r1, tmp);
2461 tcg_temp_free_i64(tmp);
2462 break;
2463 case 0xf: /* LLILL R1,I2 [RI] */
2464 tmp = tcg_const_i64(i2);
2465 store_reg(r1, tmp);
2466 tcg_temp_free_i64(tmp);
2467 break;
2468 default:
2469 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2470 gen_illegal_opcode(s, 2);
2471 return;
2475 static void disas_a7(DisasContext *s, int op, int r1, int i2)
2477 TCGv_i64 tmp, tmp2;
2478 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2479 int l1;
2481 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2482 switch (op) {
2483 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2484 case 0x1: /* TMLL or TML R1,I2 [RI] */
2485 case 0x2: /* TMHH R1,I2 [RI] */
2486 case 0x3: /* TMHL R1,I2 [RI] */
2487 tmp = load_reg(r1);
2488 tmp2 = tcg_const_i64((uint16_t)i2);
2489 switch (op) {
2490 case 0x0:
2491 tcg_gen_shri_i64(tmp, tmp, 16);
2492 break;
2493 case 0x1:
2494 break;
2495 case 0x2:
2496 tcg_gen_shri_i64(tmp, tmp, 48);
2497 break;
2498 case 0x3:
2499 tcg_gen_shri_i64(tmp, tmp, 32);
2500 break;
2502 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2503 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2504 tcg_temp_free_i64(tmp);
2505 tcg_temp_free_i64(tmp2);
2506 break;
2507 case 0x4: /* brc m1, i2 */
2508 gen_brc(r1, s, i2 * 2LL);
2509 return;
2510 case 0x5: /* BRAS R1,I2 [RI] */
2511 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2512 store_reg(r1, tmp);
2513 tcg_temp_free_i64(tmp);
2514 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2515 s->is_jmp = DISAS_TB_JUMP;
2516 break;
2517 case 0x6: /* BRCT R1,I2 [RI] */
2518 tmp32_1 = load_reg32(r1);
2519 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2520 store_reg32(r1, tmp32_1);
2521 gen_update_cc_op(s);
2522 l1 = gen_new_label();
2523 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2524 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2525 gen_set_label(l1);
2526 gen_goto_tb(s, 1, s->pc + 4);
2527 s->is_jmp = DISAS_TB_JUMP;
2528 tcg_temp_free_i32(tmp32_1);
2529 break;
2530 case 0x7: /* BRCTG R1,I2 [RI] */
2531 tmp = load_reg(r1);
2532 tcg_gen_subi_i64(tmp, tmp, 1);
2533 store_reg(r1, tmp);
2534 gen_update_cc_op(s);
2535 l1 = gen_new_label();
2536 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2537 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2538 gen_set_label(l1);
2539 gen_goto_tb(s, 1, s->pc + 4);
2540 s->is_jmp = DISAS_TB_JUMP;
2541 tcg_temp_free_i64(tmp);
2542 break;
2543 case 0x8: /* lhi r1, i2 */
2544 tmp32_1 = tcg_const_i32(i2);
2545 store_reg32(r1, tmp32_1);
2546 tcg_temp_free_i32(tmp32_1);
2547 break;
2548 case 0x9: /* lghi r1, i2 */
2549 tmp = tcg_const_i64(i2);
2550 store_reg(r1, tmp);
2551 tcg_temp_free_i64(tmp);
2552 break;
2553 case 0xa: /* AHI R1,I2 [RI] */
2554 tmp32_1 = load_reg32(r1);
2555 tmp32_2 = tcg_temp_new_i32();
2556 tmp32_3 = tcg_const_i32(i2);
2558 if (i2 < 0) {
2559 tcg_gen_subi_i32(tmp32_2, tmp32_1, -i2);
2560 } else {
2561 tcg_gen_add_i32(tmp32_2, tmp32_1, tmp32_3);
2564 store_reg32(r1, tmp32_2);
2565 set_cc_add32(s, tmp32_1, tmp32_3, tmp32_2);
2566 tcg_temp_free_i32(tmp32_1);
2567 tcg_temp_free_i32(tmp32_2);
2568 tcg_temp_free_i32(tmp32_3);
2569 break;
2570 case 0xb: /* aghi r1, i2 */
2571 tmp = load_reg(r1);
2572 tmp2 = tcg_const_i64(i2);
2574 if (i2 < 0) {
2575 tcg_gen_subi_i64(regs[r1], tmp, -i2);
2576 } else {
2577 tcg_gen_add_i64(regs[r1], tmp, tmp2);
2579 set_cc_add64(s, tmp, tmp2, regs[r1]);
2580 tcg_temp_free_i64(tmp);
2581 tcg_temp_free_i64(tmp2);
2582 break;
2583 case 0xc: /* MHI R1,I2 [RI] */
2584 tmp32_1 = load_reg32(r1);
2585 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2586 store_reg32(r1, tmp32_1);
2587 tcg_temp_free_i32(tmp32_1);
2588 break;
2589 case 0xd: /* MGHI R1,I2 [RI] */
2590 tmp = load_reg(r1);
2591 tcg_gen_muli_i64(tmp, tmp, i2);
2592 store_reg(r1, tmp);
2593 tcg_temp_free_i64(tmp);
2594 break;
2595 case 0xe: /* CHI R1,I2 [RI] */
2596 tmp32_1 = load_reg32(r1);
2597 cmp_s32c(s, tmp32_1, i2);
2598 tcg_temp_free_i32(tmp32_1);
2599 break;
2600 case 0xf: /* CGHI R1,I2 [RI] */
2601 tmp = load_reg(r1);
2602 cmp_s64c(s, tmp, i2);
2603 tcg_temp_free_i64(tmp);
2604 break;
2605 default:
2606 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2607 gen_illegal_opcode(s, 2);
2608 return;
2612 static void disas_b2(DisasContext *s, int op, uint32_t insn)
2614 TCGv_i64 tmp, tmp2, tmp3;
2615 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2616 int r1, r2;
2617 int ilc = 2;
2618 #ifndef CONFIG_USER_ONLY
2619 int r3, d2, b2;
2620 #endif
2622 r1 = (insn >> 4) & 0xf;
2623 r2 = insn & 0xf;
2625 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2627 switch (op) {
2628 case 0x22: /* IPM R1 [RRE] */
2629 tmp32_1 = tcg_const_i32(r1);
2630 gen_op_calc_cc(s);
2631 gen_helper_ipm(cpu_env, cc_op, tmp32_1);
2632 tcg_temp_free_i32(tmp32_1);
2633 break;
2634 case 0x41: /* CKSM R1,R2 [RRE] */
2635 tmp32_1 = tcg_const_i32(r1);
2636 tmp32_2 = tcg_const_i32(r2);
2637 potential_page_fault(s);
2638 gen_helper_cksm(cpu_env, tmp32_1, tmp32_2);
2639 tcg_temp_free_i32(tmp32_1);
2640 tcg_temp_free_i32(tmp32_2);
2641 gen_op_movi_cc(s, 0);
2642 break;
2643 case 0x4e: /* SAR R1,R2 [RRE] */
2644 tmp32_1 = load_reg32(r2);
2645 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r1]));
2646 tcg_temp_free_i32(tmp32_1);
2647 break;
2648 case 0x4f: /* EAR R1,R2 [RRE] */
2649 tmp32_1 = tcg_temp_new_i32();
2650 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r2]));
2651 store_reg32(r1, tmp32_1);
2652 tcg_temp_free_i32(tmp32_1);
2653 break;
2654 case 0x52: /* MSR R1,R2 [RRE] */
2655 tmp32_1 = load_reg32(r1);
2656 tmp32_2 = load_reg32(r2);
2657 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2658 store_reg32(r1, tmp32_1);
2659 tcg_temp_free_i32(tmp32_1);
2660 tcg_temp_free_i32(tmp32_2);
2661 break;
2662 case 0x54: /* MVPG R1,R2 [RRE] */
2663 tmp = load_reg(0);
2664 tmp2 = load_reg(r1);
2665 tmp3 = load_reg(r2);
2666 potential_page_fault(s);
2667 gen_helper_mvpg(cpu_env, tmp, tmp2, tmp3);
2668 tcg_temp_free_i64(tmp);
2669 tcg_temp_free_i64(tmp2);
2670 tcg_temp_free_i64(tmp3);
2671 /* XXX check CCO bit and set CC accordingly */
2672 gen_op_movi_cc(s, 0);
2673 break;
2674 case 0x55: /* MVST R1,R2 [RRE] */
2675 tmp32_1 = load_reg32(0);
2676 tmp32_2 = tcg_const_i32(r1);
2677 tmp32_3 = tcg_const_i32(r2);
2678 potential_page_fault(s);
2679 gen_helper_mvst(cpu_env, tmp32_1, tmp32_2, tmp32_3);
2680 tcg_temp_free_i32(tmp32_1);
2681 tcg_temp_free_i32(tmp32_2);
2682 tcg_temp_free_i32(tmp32_3);
2683 gen_op_movi_cc(s, 1);
2684 break;
2685 case 0x5d: /* CLST R1,R2 [RRE] */
2686 tmp32_1 = load_reg32(0);
2687 tmp32_2 = tcg_const_i32(r1);
2688 tmp32_3 = tcg_const_i32(r2);
2689 potential_page_fault(s);
2690 gen_helper_clst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2691 set_cc_static(s);
2692 tcg_temp_free_i32(tmp32_1);
2693 tcg_temp_free_i32(tmp32_2);
2694 tcg_temp_free_i32(tmp32_3);
2695 break;
2696 case 0x5e: /* SRST R1,R2 [RRE] */
2697 tmp32_1 = load_reg32(0);
2698 tmp32_2 = tcg_const_i32(r1);
2699 tmp32_3 = tcg_const_i32(r2);
2700 potential_page_fault(s);
2701 gen_helper_srst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2702 set_cc_static(s);
2703 tcg_temp_free_i32(tmp32_1);
2704 tcg_temp_free_i32(tmp32_2);
2705 tcg_temp_free_i32(tmp32_3);
2706 break;
2708 #ifndef CONFIG_USER_ONLY
2709 case 0x02: /* STIDP D2(B2) [S] */
2710 /* Store CPU ID */
2711 check_privileged(s, ilc);
2712 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2713 tmp = get_address(s, 0, b2, d2);
2714 potential_page_fault(s);
2715 gen_helper_stidp(cpu_env, tmp);
2716 tcg_temp_free_i64(tmp);
2717 break;
2718 case 0x04: /* SCK D2(B2) [S] */
2719 /* Set Clock */
2720 check_privileged(s, ilc);
2721 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2722 tmp = get_address(s, 0, b2, d2);
2723 potential_page_fault(s);
2724 gen_helper_sck(cc_op, tmp);
2725 set_cc_static(s);
2726 tcg_temp_free_i64(tmp);
2727 break;
2728 case 0x05: /* STCK D2(B2) [S] */
2729 /* Store Clock */
2730 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2731 tmp = get_address(s, 0, b2, d2);
2732 potential_page_fault(s);
2733 gen_helper_stck(cc_op, cpu_env, tmp);
2734 set_cc_static(s);
2735 tcg_temp_free_i64(tmp);
2736 break;
2737 case 0x06: /* SCKC D2(B2) [S] */
2738 /* Set Clock Comparator */
2739 check_privileged(s, ilc);
2740 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2741 tmp = get_address(s, 0, b2, d2);
2742 potential_page_fault(s);
2743 gen_helper_sckc(cpu_env, tmp);
2744 tcg_temp_free_i64(tmp);
2745 break;
2746 case 0x07: /* STCKC D2(B2) [S] */
2747 /* Store Clock Comparator */
2748 check_privileged(s, ilc);
2749 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2750 tmp = get_address(s, 0, b2, d2);
2751 potential_page_fault(s);
2752 gen_helper_stckc(cpu_env, tmp);
2753 tcg_temp_free_i64(tmp);
2754 break;
2755 case 0x08: /* SPT D2(B2) [S] */
2756 /* Set CPU Timer */
2757 check_privileged(s, ilc);
2758 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2759 tmp = get_address(s, 0, b2, d2);
2760 potential_page_fault(s);
2761 gen_helper_spt(cpu_env, tmp);
2762 tcg_temp_free_i64(tmp);
2763 break;
2764 case 0x09: /* STPT D2(B2) [S] */
2765 /* Store CPU Timer */
2766 check_privileged(s, ilc);
2767 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2768 tmp = get_address(s, 0, b2, d2);
2769 potential_page_fault(s);
2770 gen_helper_stpt(cpu_env, tmp);
2771 tcg_temp_free_i64(tmp);
2772 break;
2773 case 0x0a: /* SPKA D2(B2) [S] */
2774 /* Set PSW Key from Address */
2775 check_privileged(s, ilc);
2776 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2777 tmp = get_address(s, 0, b2, d2);
2778 tmp2 = tcg_temp_new_i64();
2779 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2780 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2781 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2782 tcg_temp_free_i64(tmp2);
2783 tcg_temp_free_i64(tmp);
2784 break;
2785 case 0x0d: /* PTLB [S] */
2786 /* Purge TLB */
2787 check_privileged(s, ilc);
2788 gen_helper_ptlb(cpu_env);
2789 break;
2790 case 0x10: /* SPX D2(B2) [S] */
2791 /* Set Prefix Register */
2792 check_privileged(s, ilc);
2793 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2794 tmp = get_address(s, 0, b2, d2);
2795 potential_page_fault(s);
2796 gen_helper_spx(cpu_env, tmp);
2797 tcg_temp_free_i64(tmp);
2798 break;
2799 case 0x11: /* STPX D2(B2) [S] */
2800 /* Store Prefix */
2801 check_privileged(s, ilc);
2802 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2803 tmp = get_address(s, 0, b2, d2);
2804 tmp2 = tcg_temp_new_i64();
2805 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUS390XState, psa));
2806 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2807 tcg_temp_free_i64(tmp);
2808 tcg_temp_free_i64(tmp2);
2809 break;
2810 case 0x12: /* STAP D2(B2) [S] */
2811 /* Store CPU Address */
2812 check_privileged(s, ilc);
2813 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2814 tmp = get_address(s, 0, b2, d2);
2815 tmp2 = tcg_temp_new_i64();
2816 tmp32_1 = tcg_temp_new_i32();
2817 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, cpu_num));
2818 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2819 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2820 tcg_temp_free_i64(tmp);
2821 tcg_temp_free_i64(tmp2);
2822 tcg_temp_free_i32(tmp32_1);
2823 break;
2824 case 0x21: /* IPTE R1,R2 [RRE] */
2825 /* Invalidate PTE */
2826 check_privileged(s, ilc);
2827 r1 = (insn >> 4) & 0xf;
2828 r2 = insn & 0xf;
2829 tmp = load_reg(r1);
2830 tmp2 = load_reg(r2);
2831 gen_helper_ipte(cpu_env, tmp, tmp2);
2832 tcg_temp_free_i64(tmp);
2833 tcg_temp_free_i64(tmp2);
2834 break;
2835 case 0x29: /* ISKE R1,R2 [RRE] */
2836 /* Insert Storage Key Extended */
2837 check_privileged(s, ilc);
2838 r1 = (insn >> 4) & 0xf;
2839 r2 = insn & 0xf;
2840 tmp = load_reg(r2);
2841 tmp2 = tcg_temp_new_i64();
2842 gen_helper_iske(tmp2, cpu_env, tmp);
2843 store_reg(r1, tmp2);
2844 tcg_temp_free_i64(tmp);
2845 tcg_temp_free_i64(tmp2);
2846 break;
2847 case 0x2a: /* RRBE R1,R2 [RRE] */
2848 /* Set Storage Key Extended */
2849 check_privileged(s, ilc);
2850 r1 = (insn >> 4) & 0xf;
2851 r2 = insn & 0xf;
2852 tmp32_1 = load_reg32(r1);
2853 tmp = load_reg(r2);
2854 gen_helper_rrbe(cc_op, cpu_env, tmp32_1, tmp);
2855 set_cc_static(s);
2856 tcg_temp_free_i32(tmp32_1);
2857 tcg_temp_free_i64(tmp);
2858 break;
2859 case 0x2b: /* SSKE R1,R2 [RRE] */
2860 /* Set Storage Key Extended */
2861 check_privileged(s, ilc);
2862 r1 = (insn >> 4) & 0xf;
2863 r2 = insn & 0xf;
2864 tmp32_1 = load_reg32(r1);
2865 tmp = load_reg(r2);
2866 gen_helper_sske(cpu_env, tmp32_1, tmp);
2867 tcg_temp_free_i32(tmp32_1);
2868 tcg_temp_free_i64(tmp);
2869 break;
2870 case 0x34: /* STCH ? */
2871 /* Store Subchannel */
2872 check_privileged(s, ilc);
2873 gen_op_movi_cc(s, 3);
2874 break;
2875 case 0x46: /* STURA R1,R2 [RRE] */
2876 /* Store Using Real Address */
2877 check_privileged(s, ilc);
2878 r1 = (insn >> 4) & 0xf;
2879 r2 = insn & 0xf;
2880 tmp32_1 = load_reg32(r1);
2881 tmp = load_reg(r2);
2882 potential_page_fault(s);
2883 gen_helper_stura(cpu_env, tmp, tmp32_1);
2884 tcg_temp_free_i32(tmp32_1);
2885 tcg_temp_free_i64(tmp);
2886 break;
2887 case 0x50: /* CSP R1,R2 [RRE] */
2888 /* Compare And Swap And Purge */
2889 check_privileged(s, ilc);
2890 r1 = (insn >> 4) & 0xf;
2891 r2 = insn & 0xf;
2892 tmp32_1 = tcg_const_i32(r1);
2893 tmp32_2 = tcg_const_i32(r2);
2894 gen_helper_csp(cc_op, cpu_env, tmp32_1, tmp32_2);
2895 set_cc_static(s);
2896 tcg_temp_free_i32(tmp32_1);
2897 tcg_temp_free_i32(tmp32_2);
2898 break;
2899 case 0x5f: /* CHSC ? */
2900 /* Channel Subsystem Call */
2901 check_privileged(s, ilc);
2902 gen_op_movi_cc(s, 3);
2903 break;
2904 case 0x78: /* STCKE D2(B2) [S] */
2905 /* Store Clock Extended */
2906 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2907 tmp = get_address(s, 0, b2, d2);
2908 potential_page_fault(s);
2909 gen_helper_stcke(cc_op, cpu_env, tmp);
2910 set_cc_static(s);
2911 tcg_temp_free_i64(tmp);
2912 break;
2913 case 0x79: /* SACF D2(B2) [S] */
2914 /* Store Clock Extended */
2915 check_privileged(s, ilc);
2916 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2917 tmp = get_address(s, 0, b2, d2);
2918 potential_page_fault(s);
2919 gen_helper_sacf(cpu_env, tmp);
2920 tcg_temp_free_i64(tmp);
2921 /* addressing mode has changed, so end the block */
2922 s->pc += ilc * 2;
2923 update_psw_addr(s);
2924 s->is_jmp = DISAS_EXCP;
2925 break;
2926 case 0x7d: /* STSI D2,(B2) [S] */
2927 check_privileged(s, ilc);
2928 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2929 tmp = get_address(s, 0, b2, d2);
2930 tmp32_1 = load_reg32(0);
2931 tmp32_2 = load_reg32(1);
2932 potential_page_fault(s);
2933 gen_helper_stsi(cc_op, cpu_env, tmp, tmp32_1, tmp32_2);
2934 set_cc_static(s);
2935 tcg_temp_free_i64(tmp);
2936 tcg_temp_free_i32(tmp32_1);
2937 tcg_temp_free_i32(tmp32_2);
2938 break;
2939 case 0x9d: /* LFPC D2(B2) [S] */
2940 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2941 tmp = get_address(s, 0, b2, d2);
2942 tmp2 = tcg_temp_new_i64();
2943 tmp32_1 = tcg_temp_new_i32();
2944 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2945 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2946 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2947 tcg_temp_free_i64(tmp);
2948 tcg_temp_free_i64(tmp2);
2949 tcg_temp_free_i32(tmp32_1);
2950 break;
2951 case 0xb1: /* STFL D2(B2) [S] */
2952 /* Store Facility List (CPU features) at 200 */
2953 check_privileged(s, ilc);
2954 tmp2 = tcg_const_i64(0xc0000000);
2955 tmp = tcg_const_i64(200);
2956 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2957 tcg_temp_free_i64(tmp2);
2958 tcg_temp_free_i64(tmp);
2959 break;
2960 case 0xb2: /* LPSWE D2(B2) [S] */
2961 /* Load PSW Extended */
2962 check_privileged(s, ilc);
2963 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2964 tmp = get_address(s, 0, b2, d2);
2965 tmp2 = tcg_temp_new_i64();
2966 tmp3 = tcg_temp_new_i64();
2967 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2968 tcg_gen_addi_i64(tmp, tmp, 8);
2969 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2970 gen_helper_load_psw(cpu_env, tmp2, tmp3);
2971 /* we need to keep cc_op intact */
2972 s->is_jmp = DISAS_JUMP;
2973 tcg_temp_free_i64(tmp);
2974 tcg_temp_free_i64(tmp2);
2975 tcg_temp_free_i64(tmp3);
2976 break;
2977 case 0x20: /* SERVC R1,R2 [RRE] */
2978 /* SCLP Service call (PV hypercall) */
2979 check_privileged(s, ilc);
2980 potential_page_fault(s);
2981 tmp32_1 = load_reg32(r2);
2982 tmp = load_reg(r1);
2983 gen_helper_servc(cc_op, cpu_env, tmp32_1, tmp);
2984 set_cc_static(s);
2985 tcg_temp_free_i32(tmp32_1);
2986 tcg_temp_free_i64(tmp);
2987 break;
2988 #endif
2989 default:
2990 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2991 gen_illegal_opcode(s, ilc);
2992 break;
2996 static void disas_b3(DisasContext *s, int op, int m3, int r1, int r2)
2998 TCGv_i64 tmp;
2999 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3000 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
3001 #define FP_HELPER(i) \
3002 tmp32_1 = tcg_const_i32(r1); \
3003 tmp32_2 = tcg_const_i32(r2); \
3004 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
3005 tcg_temp_free_i32(tmp32_1); \
3006 tcg_temp_free_i32(tmp32_2);
3008 #define FP_HELPER_CC(i) \
3009 tmp32_1 = tcg_const_i32(r1); \
3010 tmp32_2 = tcg_const_i32(r2); \
3011 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
3012 set_cc_static(s); \
3013 tcg_temp_free_i32(tmp32_1); \
3014 tcg_temp_free_i32(tmp32_2);
3016 switch (op) {
3017 case 0x0: /* LPEBR R1,R2 [RRE] */
3018 FP_HELPER_CC(lpebr);
3019 break;
3020 case 0x2: /* LTEBR R1,R2 [RRE] */
3021 FP_HELPER_CC(ltebr);
3022 break;
3023 case 0x3: /* LCEBR R1,R2 [RRE] */
3024 FP_HELPER_CC(lcebr);
3025 break;
3026 case 0x4: /* LDEBR R1,R2 [RRE] */
3027 FP_HELPER(ldebr);
3028 break;
3029 case 0x5: /* LXDBR R1,R2 [RRE] */
3030 FP_HELPER(lxdbr);
3031 break;
3032 case 0x9: /* CEBR R1,R2 [RRE] */
3033 FP_HELPER_CC(cebr);
3034 break;
3035 case 0xa: /* AEBR R1,R2 [RRE] */
3036 FP_HELPER_CC(aebr);
3037 break;
3038 case 0xb: /* SEBR R1,R2 [RRE] */
3039 FP_HELPER_CC(sebr);
3040 break;
3041 case 0xd: /* DEBR R1,R2 [RRE] */
3042 FP_HELPER(debr);
3043 break;
3044 case 0x10: /* LPDBR R1,R2 [RRE] */
3045 FP_HELPER_CC(lpdbr);
3046 break;
3047 case 0x12: /* LTDBR R1,R2 [RRE] */
3048 FP_HELPER_CC(ltdbr);
3049 break;
3050 case 0x13: /* LCDBR R1,R2 [RRE] */
3051 FP_HELPER_CC(lcdbr);
3052 break;
3053 case 0x15: /* SQBDR R1,R2 [RRE] */
3054 FP_HELPER(sqdbr);
3055 break;
3056 case 0x17: /* MEEBR R1,R2 [RRE] */
3057 FP_HELPER(meebr);
3058 break;
3059 case 0x19: /* CDBR R1,R2 [RRE] */
3060 FP_HELPER_CC(cdbr);
3061 break;
3062 case 0x1a: /* ADBR R1,R2 [RRE] */
3063 FP_HELPER_CC(adbr);
3064 break;
3065 case 0x1b: /* SDBR R1,R2 [RRE] */
3066 FP_HELPER_CC(sdbr);
3067 break;
3068 case 0x1c: /* MDBR R1,R2 [RRE] */
3069 FP_HELPER(mdbr);
3070 break;
3071 case 0x1d: /* DDBR R1,R2 [RRE] */
3072 FP_HELPER(ddbr);
3073 break;
3074 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3075 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3076 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3077 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3078 tmp32_1 = tcg_const_i32(m3);
3079 tmp32_2 = tcg_const_i32(r2);
3080 tmp32_3 = tcg_const_i32(r1);
3081 switch (op) {
3082 case 0xe:
3083 gen_helper_maebr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3084 break;
3085 case 0x1e:
3086 gen_helper_madbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3087 break;
3088 case 0x1f:
3089 gen_helper_msdbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3090 break;
3091 default:
3092 tcg_abort();
3094 tcg_temp_free_i32(tmp32_1);
3095 tcg_temp_free_i32(tmp32_2);
3096 tcg_temp_free_i32(tmp32_3);
3097 break;
3098 case 0x40: /* LPXBR R1,R2 [RRE] */
3099 FP_HELPER_CC(lpxbr);
3100 break;
3101 case 0x42: /* LTXBR R1,R2 [RRE] */
3102 FP_HELPER_CC(ltxbr);
3103 break;
3104 case 0x43: /* LCXBR R1,R2 [RRE] */
3105 FP_HELPER_CC(lcxbr);
3106 break;
3107 case 0x44: /* LEDBR R1,R2 [RRE] */
3108 FP_HELPER(ledbr);
3109 break;
3110 case 0x45: /* LDXBR R1,R2 [RRE] */
3111 FP_HELPER(ldxbr);
3112 break;
3113 case 0x46: /* LEXBR R1,R2 [RRE] */
3114 FP_HELPER(lexbr);
3115 break;
3116 case 0x49: /* CXBR R1,R2 [RRE] */
3117 FP_HELPER_CC(cxbr);
3118 break;
3119 case 0x4a: /* AXBR R1,R2 [RRE] */
3120 FP_HELPER_CC(axbr);
3121 break;
3122 case 0x4b: /* SXBR R1,R2 [RRE] */
3123 FP_HELPER_CC(sxbr);
3124 break;
3125 case 0x4c: /* MXBR R1,R2 [RRE] */
3126 FP_HELPER(mxbr);
3127 break;
3128 case 0x4d: /* DXBR R1,R2 [RRE] */
3129 FP_HELPER(dxbr);
3130 break;
3131 case 0x65: /* LXR R1,R2 [RRE] */
3132 tmp = load_freg(r2);
3133 store_freg(r1, tmp);
3134 tcg_temp_free_i64(tmp);
3135 tmp = load_freg(r2 + 2);
3136 store_freg(r1 + 2, tmp);
3137 tcg_temp_free_i64(tmp);
3138 break;
3139 case 0x74: /* LZER R1 [RRE] */
3140 tmp32_1 = tcg_const_i32(r1);
3141 gen_helper_lzer(cpu_env, tmp32_1);
3142 tcg_temp_free_i32(tmp32_1);
3143 break;
3144 case 0x75: /* LZDR R1 [RRE] */
3145 tmp32_1 = tcg_const_i32(r1);
3146 gen_helper_lzdr(cpu_env, tmp32_1);
3147 tcg_temp_free_i32(tmp32_1);
3148 break;
3149 case 0x76: /* LZXR R1 [RRE] */
3150 tmp32_1 = tcg_const_i32(r1);
3151 gen_helper_lzxr(cpu_env, tmp32_1);
3152 tcg_temp_free_i32(tmp32_1);
3153 break;
3154 case 0x84: /* SFPC R1 [RRE] */
3155 tmp32_1 = load_reg32(r1);
3156 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
3157 tcg_temp_free_i32(tmp32_1);
3158 break;
3159 case 0x8c: /* EFPC R1 [RRE] */
3160 tmp32_1 = tcg_temp_new_i32();
3161 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
3162 store_reg32(r1, tmp32_1);
3163 tcg_temp_free_i32(tmp32_1);
3164 break;
3165 case 0x94: /* CEFBR R1,R2 [RRE] */
3166 case 0x95: /* CDFBR R1,R2 [RRE] */
3167 case 0x96: /* CXFBR R1,R2 [RRE] */
3168 tmp32_1 = tcg_const_i32(r1);
3169 tmp32_2 = load_reg32(r2);
3170 switch (op) {
3171 case 0x94:
3172 gen_helper_cefbr(cpu_env, tmp32_1, tmp32_2);
3173 break;
3174 case 0x95:
3175 gen_helper_cdfbr(cpu_env, tmp32_1, tmp32_2);
3176 break;
3177 case 0x96:
3178 gen_helper_cxfbr(cpu_env, tmp32_1, tmp32_2);
3179 break;
3180 default:
3181 tcg_abort();
3183 tcg_temp_free_i32(tmp32_1);
3184 tcg_temp_free_i32(tmp32_2);
3185 break;
3186 case 0x98: /* CFEBR R1,R2 [RRE] */
3187 case 0x99: /* CFDBR R1,R2 [RRE] */
3188 case 0x9a: /* CFXBR R1,R2 [RRE] */
3189 tmp32_1 = tcg_const_i32(r1);
3190 tmp32_2 = tcg_const_i32(r2);
3191 tmp32_3 = tcg_const_i32(m3);
3192 switch (op) {
3193 case 0x98:
3194 gen_helper_cfebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3195 break;
3196 case 0x99:
3197 gen_helper_cfdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3198 break;
3199 case 0x9a:
3200 gen_helper_cfxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3201 break;
3202 default:
3203 tcg_abort();
3205 set_cc_static(s);
3206 tcg_temp_free_i32(tmp32_1);
3207 tcg_temp_free_i32(tmp32_2);
3208 tcg_temp_free_i32(tmp32_3);
3209 break;
3210 case 0xa4: /* CEGBR R1,R2 [RRE] */
3211 case 0xa5: /* CDGBR R1,R2 [RRE] */
3212 tmp32_1 = tcg_const_i32(r1);
3213 tmp = load_reg(r2);
3214 switch (op) {
3215 case 0xa4:
3216 gen_helper_cegbr(cpu_env, tmp32_1, tmp);
3217 break;
3218 case 0xa5:
3219 gen_helper_cdgbr(cpu_env, tmp32_1, tmp);
3220 break;
3221 default:
3222 tcg_abort();
3224 tcg_temp_free_i32(tmp32_1);
3225 tcg_temp_free_i64(tmp);
3226 break;
3227 case 0xa6: /* CXGBR R1,R2 [RRE] */
3228 tmp32_1 = tcg_const_i32(r1);
3229 tmp = load_reg(r2);
3230 gen_helper_cxgbr(cpu_env, tmp32_1, tmp);
3231 tcg_temp_free_i32(tmp32_1);
3232 tcg_temp_free_i64(tmp);
3233 break;
3234 case 0xa8: /* CGEBR R1,R2 [RRE] */
3235 tmp32_1 = tcg_const_i32(r1);
3236 tmp32_2 = tcg_const_i32(r2);
3237 tmp32_3 = tcg_const_i32(m3);
3238 gen_helper_cgebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3239 set_cc_static(s);
3240 tcg_temp_free_i32(tmp32_1);
3241 tcg_temp_free_i32(tmp32_2);
3242 tcg_temp_free_i32(tmp32_3);
3243 break;
3244 case 0xa9: /* CGDBR R1,R2 [RRE] */
3245 tmp32_1 = tcg_const_i32(r1);
3246 tmp32_2 = tcg_const_i32(r2);
3247 tmp32_3 = tcg_const_i32(m3);
3248 gen_helper_cgdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3249 set_cc_static(s);
3250 tcg_temp_free_i32(tmp32_1);
3251 tcg_temp_free_i32(tmp32_2);
3252 tcg_temp_free_i32(tmp32_3);
3253 break;
3254 case 0xaa: /* CGXBR R1,R2 [RRE] */
3255 tmp32_1 = tcg_const_i32(r1);
3256 tmp32_2 = tcg_const_i32(r2);
3257 tmp32_3 = tcg_const_i32(m3);
3258 gen_helper_cgxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3259 set_cc_static(s);
3260 tcg_temp_free_i32(tmp32_1);
3261 tcg_temp_free_i32(tmp32_2);
3262 tcg_temp_free_i32(tmp32_3);
3263 break;
3264 default:
3265 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3266 gen_illegal_opcode(s, 2);
3267 break;
3270 #undef FP_HELPER_CC
3271 #undef FP_HELPER
3274 static void disas_b9(DisasContext *s, int op, int r1, int r2)
3276 TCGv_i64 tmp, tmp2, tmp3;
3277 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3279 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3280 switch (op) {
3281 case 0x0: /* LPGR R1,R2 [RRE] */
3282 case 0x1: /* LNGR R1,R2 [RRE] */
3283 case 0x2: /* LTGR R1,R2 [RRE] */
3284 case 0x3: /* LCGR R1,R2 [RRE] */
3285 case 0x10: /* LPGFR R1,R2 [RRE] */
3286 case 0x11: /* LNFGR R1,R2 [RRE] */
3287 case 0x12: /* LTGFR R1,R2 [RRE] */
3288 case 0x13: /* LCGFR R1,R2 [RRE] */
3289 if (op & 0x10) {
3290 tmp = load_reg32_i64(r2);
3291 } else {
3292 tmp = load_reg(r2);
3294 switch (op & 0xf) {
3295 case 0x0: /* LP?GR */
3296 set_cc_abs64(s, tmp);
3297 gen_helper_abs_i64(tmp, tmp);
3298 store_reg(r1, tmp);
3299 break;
3300 case 0x1: /* LN?GR */
3301 set_cc_nabs64(s, tmp);
3302 gen_helper_nabs_i64(tmp, tmp);
3303 store_reg(r1, tmp);
3304 break;
3305 case 0x2: /* LT?GR */
3306 if (r1 != r2) {
3307 store_reg(r1, tmp);
3309 set_cc_s64(s, tmp);
3310 break;
3311 case 0x3: /* LC?GR */
3312 tcg_gen_neg_i64(regs[r1], tmp);
3313 set_cc_comp64(s, regs[r1]);
3314 break;
3316 tcg_temp_free_i64(tmp);
3317 break;
3318 case 0x4: /* LGR R1,R2 [RRE] */
3319 store_reg(r1, regs[r2]);
3320 break;
3321 case 0x6: /* LGBR R1,R2 [RRE] */
3322 tmp2 = load_reg(r2);
3323 tcg_gen_ext8s_i64(tmp2, tmp2);
3324 store_reg(r1, tmp2);
3325 tcg_temp_free_i64(tmp2);
3326 break;
3327 case 0x8: /* AGR R1,R2 [RRE] */
3328 case 0xa: /* ALGR R1,R2 [RRE] */
3329 tmp = load_reg(r1);
3330 tmp2 = load_reg(r2);
3331 tmp3 = tcg_temp_new_i64();
3332 tcg_gen_add_i64(tmp3, tmp, tmp2);
3333 store_reg(r1, tmp3);
3334 switch (op) {
3335 case 0x8:
3336 set_cc_add64(s, tmp, tmp2, tmp3);
3337 break;
3338 case 0xa:
3339 set_cc_addu64(s, tmp, tmp2, tmp3);
3340 break;
3341 default:
3342 tcg_abort();
3344 tcg_temp_free_i64(tmp);
3345 tcg_temp_free_i64(tmp2);
3346 tcg_temp_free_i64(tmp3);
3347 break;
3348 case 0x9: /* SGR R1,R2 [RRE] */
3349 case 0xb: /* SLGR R1,R2 [RRE] */
3350 case 0x1b: /* SLGFR R1,R2 [RRE] */
3351 case 0x19: /* SGFR R1,R2 [RRE] */
3352 tmp = load_reg(r1);
3353 switch (op) {
3354 case 0x1b:
3355 tmp32_1 = load_reg32(r2);
3356 tmp2 = tcg_temp_new_i64();
3357 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3358 tcg_temp_free_i32(tmp32_1);
3359 break;
3360 case 0x19:
3361 tmp32_1 = load_reg32(r2);
3362 tmp2 = tcg_temp_new_i64();
3363 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3364 tcg_temp_free_i32(tmp32_1);
3365 break;
3366 default:
3367 tmp2 = load_reg(r2);
3368 break;
3370 tmp3 = tcg_temp_new_i64();
3371 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3372 store_reg(r1, tmp3);
3373 switch (op) {
3374 case 0x9:
3375 case 0x19:
3376 set_cc_sub64(s, tmp, tmp2, tmp3);
3377 break;
3378 case 0xb:
3379 case 0x1b:
3380 set_cc_subu64(s, tmp, tmp2, tmp3);
3381 break;
3382 default:
3383 tcg_abort();
3385 tcg_temp_free_i64(tmp);
3386 tcg_temp_free_i64(tmp2);
3387 tcg_temp_free_i64(tmp3);
3388 break;
3389 case 0xc: /* MSGR R1,R2 [RRE] */
3390 case 0x1c: /* MSGFR R1,R2 [RRE] */
3391 tmp = load_reg(r1);
3392 tmp2 = load_reg(r2);
3393 if (op == 0x1c) {
3394 tcg_gen_ext32s_i64(tmp2, tmp2);
3396 tcg_gen_mul_i64(tmp, tmp, tmp2);
3397 store_reg(r1, tmp);
3398 tcg_temp_free_i64(tmp);
3399 tcg_temp_free_i64(tmp2);
3400 break;
3401 case 0xd: /* DSGR R1,R2 [RRE] */
3402 case 0x1d: /* DSGFR R1,R2 [RRE] */
3403 tmp = load_reg(r1 + 1);
3404 if (op == 0xd) {
3405 tmp2 = load_reg(r2);
3406 } else {
3407 tmp32_1 = load_reg32(r2);
3408 tmp2 = tcg_temp_new_i64();
3409 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3410 tcg_temp_free_i32(tmp32_1);
3412 tmp3 = tcg_temp_new_i64();
3413 tcg_gen_div_i64(tmp3, tmp, tmp2);
3414 store_reg(r1 + 1, tmp3);
3415 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3416 store_reg(r1, tmp3);
3417 tcg_temp_free_i64(tmp);
3418 tcg_temp_free_i64(tmp2);
3419 tcg_temp_free_i64(tmp3);
3420 break;
3421 case 0x14: /* LGFR R1,R2 [RRE] */
3422 tmp32_1 = load_reg32(r2);
3423 tmp = tcg_temp_new_i64();
3424 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3425 store_reg(r1, tmp);
3426 tcg_temp_free_i32(tmp32_1);
3427 tcg_temp_free_i64(tmp);
3428 break;
3429 case 0x16: /* LLGFR R1,R2 [RRE] */
3430 tmp32_1 = load_reg32(r2);
3431 tmp = tcg_temp_new_i64();
3432 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3433 store_reg(r1, tmp);
3434 tcg_temp_free_i32(tmp32_1);
3435 tcg_temp_free_i64(tmp);
3436 break;
3437 case 0x17: /* LLGTR R1,R2 [RRE] */
3438 tmp32_1 = load_reg32(r2);
3439 tmp = tcg_temp_new_i64();
3440 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3441 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3442 store_reg(r1, tmp);
3443 tcg_temp_free_i32(tmp32_1);
3444 tcg_temp_free_i64(tmp);
3445 break;
3446 case 0x18: /* AGFR R1,R2 [RRE] */
3447 case 0x1a: /* ALGFR R1,R2 [RRE] */
3448 tmp32_1 = load_reg32(r2);
3449 tmp2 = tcg_temp_new_i64();
3450 if (op == 0x18) {
3451 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3452 } else {
3453 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3455 tcg_temp_free_i32(tmp32_1);
3456 tmp = load_reg(r1);
3457 tmp3 = tcg_temp_new_i64();
3458 tcg_gen_add_i64(tmp3, tmp, tmp2);
3459 store_reg(r1, tmp3);
3460 if (op == 0x18) {
3461 set_cc_add64(s, tmp, tmp2, tmp3);
3462 } else {
3463 set_cc_addu64(s, tmp, tmp2, tmp3);
3465 tcg_temp_free_i64(tmp);
3466 tcg_temp_free_i64(tmp2);
3467 tcg_temp_free_i64(tmp3);
3468 break;
3469 case 0x0f: /* LRVGR R1,R2 [RRE] */
3470 tcg_gen_bswap64_i64(regs[r1], regs[r2]);
3471 break;
3472 case 0x1f: /* LRVR R1,R2 [RRE] */
3473 tmp32_1 = load_reg32(r2);
3474 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3475 store_reg32(r1, tmp32_1);
3476 tcg_temp_free_i32(tmp32_1);
3477 break;
3478 case 0x20: /* CGR R1,R2 [RRE] */
3479 case 0x30: /* CGFR R1,R2 [RRE] */
3480 tmp2 = load_reg(r2);
3481 if (op == 0x30) {
3482 tcg_gen_ext32s_i64(tmp2, tmp2);
3484 tmp = load_reg(r1);
3485 cmp_s64(s, tmp, tmp2);
3486 tcg_temp_free_i64(tmp);
3487 tcg_temp_free_i64(tmp2);
3488 break;
3489 case 0x21: /* CLGR R1,R2 [RRE] */
3490 case 0x31: /* CLGFR R1,R2 [RRE] */
3491 tmp2 = load_reg(r2);
3492 if (op == 0x31) {
3493 tcg_gen_ext32u_i64(tmp2, tmp2);
3495 tmp = load_reg(r1);
3496 cmp_u64(s, tmp, tmp2);
3497 tcg_temp_free_i64(tmp);
3498 tcg_temp_free_i64(tmp2);
3499 break;
3500 case 0x26: /* LBR R1,R2 [RRE] */
3501 tmp32_1 = load_reg32(r2);
3502 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3503 store_reg32(r1, tmp32_1);
3504 tcg_temp_free_i32(tmp32_1);
3505 break;
3506 case 0x27: /* LHR R1,R2 [RRE] */
3507 tmp32_1 = load_reg32(r2);
3508 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3509 store_reg32(r1, tmp32_1);
3510 tcg_temp_free_i32(tmp32_1);
3511 break;
3512 case 0x80: /* NGR R1,R2 [RRE] */
3513 case 0x81: /* OGR R1,R2 [RRE] */
3514 case 0x82: /* XGR R1,R2 [RRE] */
3515 tmp = load_reg(r1);
3516 tmp2 = load_reg(r2);
3517 switch (op) {
3518 case 0x80:
3519 tcg_gen_and_i64(tmp, tmp, tmp2);
3520 break;
3521 case 0x81:
3522 tcg_gen_or_i64(tmp, tmp, tmp2);
3523 break;
3524 case 0x82:
3525 tcg_gen_xor_i64(tmp, tmp, tmp2);
3526 break;
3527 default:
3528 tcg_abort();
3530 store_reg(r1, tmp);
3531 set_cc_nz_u64(s, tmp);
3532 tcg_temp_free_i64(tmp);
3533 tcg_temp_free_i64(tmp2);
3534 break;
3535 case 0x83: /* FLOGR R1,R2 [RRE] */
3536 tmp = load_reg(r2);
3537 tmp32_1 = tcg_const_i32(r1);
3538 gen_helper_flogr(cc_op, cpu_env, tmp32_1, tmp);
3539 set_cc_static(s);
3540 tcg_temp_free_i64(tmp);
3541 tcg_temp_free_i32(tmp32_1);
3542 break;
3543 case 0x84: /* LLGCR R1,R2 [RRE] */
3544 tmp = load_reg(r2);
3545 tcg_gen_andi_i64(tmp, tmp, 0xff);
3546 store_reg(r1, tmp);
3547 tcg_temp_free_i64(tmp);
3548 break;
3549 case 0x85: /* LLGHR R1,R2 [RRE] */
3550 tmp = load_reg(r2);
3551 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3552 store_reg(r1, tmp);
3553 tcg_temp_free_i64(tmp);
3554 break;
3555 case 0x87: /* DLGR R1,R2 [RRE] */
3556 tmp32_1 = tcg_const_i32(r1);
3557 tmp = load_reg(r2);
3558 gen_helper_dlg(cpu_env, tmp32_1, tmp);
3559 tcg_temp_free_i64(tmp);
3560 tcg_temp_free_i32(tmp32_1);
3561 break;
3562 case 0x88: /* ALCGR R1,R2 [RRE] */
3563 tmp = load_reg(r1);
3564 tmp2 = load_reg(r2);
3565 tmp3 = tcg_temp_new_i64();
3566 gen_op_calc_cc(s);
3567 tcg_gen_extu_i32_i64(tmp3, cc_op);
3568 tcg_gen_shri_i64(tmp3, tmp3, 1);
3569 tcg_gen_andi_i64(tmp3, tmp3, 1);
3570 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3571 tcg_gen_add_i64(tmp3, tmp, tmp3);
3572 store_reg(r1, tmp3);
3573 set_cc_addu64(s, tmp, tmp2, tmp3);
3574 tcg_temp_free_i64(tmp);
3575 tcg_temp_free_i64(tmp2);
3576 tcg_temp_free_i64(tmp3);
3577 break;
3578 case 0x89: /* SLBGR R1,R2 [RRE] */
3579 tmp = load_reg(r1);
3580 tmp2 = load_reg(r2);
3581 tmp32_1 = tcg_const_i32(r1);
3582 gen_op_calc_cc(s);
3583 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, tmp, tmp2);
3584 set_cc_static(s);
3585 tcg_temp_free_i64(tmp);
3586 tcg_temp_free_i64(tmp2);
3587 tcg_temp_free_i32(tmp32_1);
3588 break;
3589 case 0x94: /* LLCR R1,R2 [RRE] */
3590 tmp32_1 = load_reg32(r2);
3591 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3592 store_reg32(r1, tmp32_1);
3593 tcg_temp_free_i32(tmp32_1);
3594 break;
3595 case 0x95: /* LLHR R1,R2 [RRE] */
3596 tmp32_1 = load_reg32(r2);
3597 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3598 store_reg32(r1, tmp32_1);
3599 tcg_temp_free_i32(tmp32_1);
3600 break;
3601 case 0x96: /* MLR R1,R2 [RRE] */
3602 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3603 tmp2 = load_reg(r2);
3604 tmp3 = load_reg((r1 + 1) & 15);
3605 tcg_gen_ext32u_i64(tmp2, tmp2);
3606 tcg_gen_ext32u_i64(tmp3, tmp3);
3607 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3608 store_reg32_i64((r1 + 1) & 15, tmp2);
3609 tcg_gen_shri_i64(tmp2, tmp2, 32);
3610 store_reg32_i64(r1, tmp2);
3611 tcg_temp_free_i64(tmp2);
3612 tcg_temp_free_i64(tmp3);
3613 break;
3614 case 0x97: /* DLR R1,R2 [RRE] */
3615 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3616 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3617 tmp = load_reg(r1);
3618 tmp2 = load_reg(r2);
3619 tmp3 = load_reg((r1 + 1) & 15);
3620 tcg_gen_ext32u_i64(tmp2, tmp2);
3621 tcg_gen_ext32u_i64(tmp3, tmp3);
3622 tcg_gen_shli_i64(tmp, tmp, 32);
3623 tcg_gen_or_i64(tmp, tmp, tmp3);
3625 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3626 tcg_gen_div_i64(tmp, tmp, tmp2);
3627 store_reg32_i64((r1 + 1) & 15, tmp);
3628 store_reg32_i64(r1, tmp3);
3629 tcg_temp_free_i64(tmp);
3630 tcg_temp_free_i64(tmp2);
3631 tcg_temp_free_i64(tmp3);
3632 break;
3633 case 0x98: /* ALCR R1,R2 [RRE] */
3634 tmp32_1 = load_reg32(r1);
3635 tmp32_2 = load_reg32(r2);
3636 tmp32_3 = tcg_temp_new_i32();
3637 /* XXX possible optimization point */
3638 gen_op_calc_cc(s);
3639 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3640 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3641 store_reg32(r1, tmp32_3);
3642 tcg_temp_free_i32(tmp32_1);
3643 tcg_temp_free_i32(tmp32_2);
3644 tcg_temp_free_i32(tmp32_3);
3645 break;
3646 case 0x99: /* SLBR R1,R2 [RRE] */
3647 tmp32_1 = load_reg32(r2);
3648 tmp32_2 = tcg_const_i32(r1);
3649 gen_op_calc_cc(s);
3650 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_2, tmp32_1);
3651 set_cc_static(s);
3652 tcg_temp_free_i32(tmp32_1);
3653 tcg_temp_free_i32(tmp32_2);
3654 break;
3655 default:
3656 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3657 gen_illegal_opcode(s, 2);
3658 break;
3662 static void disas_c0(DisasContext *s, int op, int r1, int i2)
3664 TCGv_i64 tmp;
3665 TCGv_i32 tmp32_1, tmp32_2;
3666 uint64_t target = s->pc + i2 * 2LL;
3667 int l1;
3669 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3671 switch (op) {
3672 case 0: /* larl r1, i2 */
3673 tmp = tcg_const_i64(target);
3674 store_reg(r1, tmp);
3675 tcg_temp_free_i64(tmp);
3676 break;
3677 case 0x1: /* LGFI R1,I2 [RIL] */
3678 tmp = tcg_const_i64((int64_t)i2);
3679 store_reg(r1, tmp);
3680 tcg_temp_free_i64(tmp);
3681 break;
3682 case 0x4: /* BRCL M1,I2 [RIL] */
3683 /* m1 & (1 << (3 - cc)) */
3684 tmp32_1 = tcg_const_i32(3);
3685 tmp32_2 = tcg_const_i32(1);
3686 gen_op_calc_cc(s);
3687 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3688 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3689 tcg_temp_free_i32(tmp32_1);
3690 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3691 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3692 l1 = gen_new_label();
3693 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3694 gen_goto_tb(s, 0, target);
3695 gen_set_label(l1);
3696 gen_goto_tb(s, 1, s->pc + 6);
3697 s->is_jmp = DISAS_TB_JUMP;
3698 tcg_temp_free_i32(tmp32_1);
3699 tcg_temp_free_i32(tmp32_2);
3700 break;
3701 case 0x5: /* brasl r1, i2 */
3702 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3703 store_reg(r1, tmp);
3704 tcg_temp_free_i64(tmp);
3705 gen_goto_tb(s, 0, target);
3706 s->is_jmp = DISAS_TB_JUMP;
3707 break;
3708 case 0x7: /* XILF R1,I2 [RIL] */
3709 case 0xb: /* NILF R1,I2 [RIL] */
3710 case 0xd: /* OILF R1,I2 [RIL] */
3711 tmp32_1 = load_reg32(r1);
3712 switch (op) {
3713 case 0x7:
3714 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3715 break;
3716 case 0xb:
3717 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3718 break;
3719 case 0xd:
3720 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3721 break;
3722 default:
3723 tcg_abort();
3725 store_reg32(r1, tmp32_1);
3726 set_cc_nz_u32(s, tmp32_1);
3727 tcg_temp_free_i32(tmp32_1);
3728 break;
3729 case 0x9: /* IILF R1,I2 [RIL] */
3730 tmp32_1 = tcg_const_i32((uint32_t)i2);
3731 store_reg32(r1, tmp32_1);
3732 tcg_temp_free_i32(tmp32_1);
3733 break;
3734 case 0xa: /* NIHF R1,I2 [RIL] */
3735 tmp = load_reg(r1);
3736 tmp32_1 = tcg_temp_new_i32();
3737 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3738 | 0xffffffffULL);
3739 store_reg(r1, tmp);
3740 tcg_gen_shri_i64(tmp, tmp, 32);
3741 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3742 set_cc_nz_u32(s, tmp32_1);
3743 tcg_temp_free_i64(tmp);
3744 tcg_temp_free_i32(tmp32_1);
3745 break;
3746 case 0xe: /* LLIHF R1,I2 [RIL] */
3747 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3748 store_reg(r1, tmp);
3749 tcg_temp_free_i64(tmp);
3750 break;
3751 case 0xf: /* LLILF R1,I2 [RIL] */
3752 tmp = tcg_const_i64((uint32_t)i2);
3753 store_reg(r1, tmp);
3754 tcg_temp_free_i64(tmp);
3755 break;
3756 default:
3757 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3758 gen_illegal_opcode(s, 3);
3759 break;
3763 static void disas_c2(DisasContext *s, int op, int r1, int i2)
3765 TCGv_i64 tmp, tmp2, tmp3;
3766 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3768 switch (op) {
3769 case 0x4: /* SLGFI R1,I2 [RIL] */
3770 case 0xa: /* ALGFI R1,I2 [RIL] */
3771 tmp = load_reg(r1);
3772 tmp2 = tcg_const_i64((uint64_t)(uint32_t)i2);
3773 tmp3 = tcg_temp_new_i64();
3774 switch (op) {
3775 case 0x4:
3776 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3777 set_cc_subu64(s, tmp, tmp2, tmp3);
3778 break;
3779 case 0xa:
3780 tcg_gen_add_i64(tmp3, tmp, tmp2);
3781 set_cc_addu64(s, tmp, tmp2, tmp3);
3782 break;
3783 default:
3784 tcg_abort();
3786 store_reg(r1, tmp3);
3787 tcg_temp_free_i64(tmp);
3788 tcg_temp_free_i64(tmp2);
3789 tcg_temp_free_i64(tmp3);
3790 break;
3791 case 0x5: /* SLFI R1,I2 [RIL] */
3792 case 0xb: /* ALFI R1,I2 [RIL] */
3793 tmp32_1 = load_reg32(r1);
3794 tmp32_2 = tcg_const_i32(i2);
3795 tmp32_3 = tcg_temp_new_i32();
3796 switch (op) {
3797 case 0x5:
3798 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3799 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
3800 break;
3801 case 0xb:
3802 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3803 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3804 break;
3805 default:
3806 tcg_abort();
3808 store_reg32(r1, tmp32_3);
3809 tcg_temp_free_i32(tmp32_1);
3810 tcg_temp_free_i32(tmp32_2);
3811 tcg_temp_free_i32(tmp32_3);
3812 break;
3813 case 0xc: /* CGFI R1,I2 [RIL] */
3814 tmp = load_reg(r1);
3815 cmp_s64c(s, tmp, (int64_t)i2);
3816 tcg_temp_free_i64(tmp);
3817 break;
3818 case 0xe: /* CLGFI R1,I2 [RIL] */
3819 tmp = load_reg(r1);
3820 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3821 tcg_temp_free_i64(tmp);
3822 break;
3823 case 0xd: /* CFI R1,I2 [RIL] */
3824 tmp32_1 = load_reg32(r1);
3825 cmp_s32c(s, tmp32_1, i2);
3826 tcg_temp_free_i32(tmp32_1);
3827 break;
3828 case 0xf: /* CLFI R1,I2 [RIL] */
3829 tmp32_1 = load_reg32(r1);
3830 cmp_u32c(s, tmp32_1, i2);
3831 tcg_temp_free_i32(tmp32_1);
3832 break;
3833 default:
3834 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3835 gen_illegal_opcode(s, 3);
3836 break;
3840 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3842 switch (opc & 0xf) {
3843 case 0x4:
3844 tcg_gen_and_i32(tmp, tmp, tmp2);
3845 break;
3846 case 0x6:
3847 tcg_gen_or_i32(tmp, tmp, tmp2);
3848 break;
3849 case 0x7:
3850 tcg_gen_xor_i32(tmp, tmp, tmp2);
3851 break;
3852 default:
3853 tcg_abort();
3857 static void disas_s390_insn(DisasContext *s)
3859 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3860 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3861 unsigned char opc;
3862 uint64_t insn;
3863 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3864 TCGv_i32 vl;
3865 int ilc;
3866 int l1;
3868 opc = cpu_ldub_code(cpu_single_env, s->pc);
3869 LOG_DISAS("opc 0x%x\n", opc);
3871 ilc = get_ilc(opc);
3873 switch (opc) {
3874 #ifndef CONFIG_USER_ONLY
3875 case 0x01: /* SAM */
3876 insn = ld_code2(s->pc);
3877 /* set addressing mode, but we only do 64bit anyways */
3878 break;
3879 #endif
3880 case 0x6: /* BCTR R1,R2 [RR] */
3881 insn = ld_code2(s->pc);
3882 decode_rr(s, insn, &r1, &r2);
3883 tmp32_1 = load_reg32(r1);
3884 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3885 store_reg32(r1, tmp32_1);
3887 if (r2) {
3888 gen_update_cc_op(s);
3889 l1 = gen_new_label();
3890 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3892 /* not taking the branch, jump to after the instruction */
3893 gen_goto_tb(s, 0, s->pc + 2);
3894 gen_set_label(l1);
3896 /* take the branch, move R2 into psw.addr */
3897 tmp32_1 = load_reg32(r2);
3898 tmp = tcg_temp_new_i64();
3899 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3900 tcg_gen_mov_i64(psw_addr, tmp);
3901 s->is_jmp = DISAS_JUMP;
3902 tcg_temp_free_i32(tmp32_1);
3903 tcg_temp_free_i64(tmp);
3905 break;
3906 case 0x7: /* BCR M1,R2 [RR] */
3907 insn = ld_code2(s->pc);
3908 decode_rr(s, insn, &r1, &r2);
3909 if (r2) {
3910 tmp = load_reg(r2);
3911 gen_bcr(s, r1, tmp, s->pc);
3912 tcg_temp_free_i64(tmp);
3913 s->is_jmp = DISAS_TB_JUMP;
3914 } else {
3915 /* XXX: "serialization and checkpoint-synchronization function"? */
3917 break;
3918 case 0xa: /* SVC I [RR] */
3919 insn = ld_code2(s->pc);
3920 debug_insn(insn);
3921 i = insn & 0xff;
3922 update_psw_addr(s);
3923 gen_op_calc_cc(s);
3924 tmp32_1 = tcg_const_i32(i);
3925 tmp32_2 = tcg_const_i32(ilc * 2);
3926 tmp32_3 = tcg_const_i32(EXCP_SVC);
3927 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, int_svc_code));
3928 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUS390XState, int_svc_ilc));
3929 gen_helper_exception(cpu_env, tmp32_3);
3930 s->is_jmp = DISAS_EXCP;
3931 tcg_temp_free_i32(tmp32_1);
3932 tcg_temp_free_i32(tmp32_2);
3933 tcg_temp_free_i32(tmp32_3);
3934 break;
3935 case 0xd: /* BASR R1,R2 [RR] */
3936 insn = ld_code2(s->pc);
3937 decode_rr(s, insn, &r1, &r2);
3938 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3939 store_reg(r1, tmp);
3940 if (r2) {
3941 tmp2 = load_reg(r2);
3942 tcg_gen_mov_i64(psw_addr, tmp2);
3943 tcg_temp_free_i64(tmp2);
3944 s->is_jmp = DISAS_JUMP;
3946 tcg_temp_free_i64(tmp);
3947 break;
3948 case 0xe: /* MVCL R1,R2 [RR] */
3949 insn = ld_code2(s->pc);
3950 decode_rr(s, insn, &r1, &r2);
3951 tmp32_1 = tcg_const_i32(r1);
3952 tmp32_2 = tcg_const_i32(r2);
3953 potential_page_fault(s);
3954 gen_helper_mvcl(cc_op, cpu_env, tmp32_1, tmp32_2);
3955 set_cc_static(s);
3956 tcg_temp_free_i32(tmp32_1);
3957 tcg_temp_free_i32(tmp32_2);
3958 break;
3959 case 0x10: /* LPR R1,R2 [RR] */
3960 insn = ld_code2(s->pc);
3961 decode_rr(s, insn, &r1, &r2);
3962 tmp32_1 = load_reg32(r2);
3963 set_cc_abs32(s, tmp32_1);
3964 gen_helper_abs_i32(tmp32_1, tmp32_1);
3965 store_reg32(r1, tmp32_1);
3966 tcg_temp_free_i32(tmp32_1);
3967 break;
3968 case 0x11: /* LNR R1,R2 [RR] */
3969 insn = ld_code2(s->pc);
3970 decode_rr(s, insn, &r1, &r2);
3971 tmp32_1 = load_reg32(r2);
3972 set_cc_nabs32(s, tmp32_1);
3973 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3974 store_reg32(r1, tmp32_1);
3975 tcg_temp_free_i32(tmp32_1);
3976 break;
3977 case 0x12: /* LTR R1,R2 [RR] */
3978 insn = ld_code2(s->pc);
3979 decode_rr(s, insn, &r1, &r2);
3980 tmp32_1 = load_reg32(r2);
3981 if (r1 != r2) {
3982 store_reg32(r1, tmp32_1);
3984 set_cc_s32(s, tmp32_1);
3985 tcg_temp_free_i32(tmp32_1);
3986 break;
3987 case 0x13: /* LCR R1,R2 [RR] */
3988 insn = ld_code2(s->pc);
3989 decode_rr(s, insn, &r1, &r2);
3990 tmp32_1 = load_reg32(r2);
3991 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3992 store_reg32(r1, tmp32_1);
3993 set_cc_comp32(s, tmp32_1);
3994 tcg_temp_free_i32(tmp32_1);
3995 break;
3996 case 0x14: /* NR R1,R2 [RR] */
3997 case 0x16: /* OR R1,R2 [RR] */
3998 case 0x17: /* XR R1,R2 [RR] */
3999 insn = ld_code2(s->pc);
4000 decode_rr(s, insn, &r1, &r2);
4001 tmp32_2 = load_reg32(r2);
4002 tmp32_1 = load_reg32(r1);
4003 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4004 store_reg32(r1, tmp32_1);
4005 set_cc_nz_u32(s, tmp32_1);
4006 tcg_temp_free_i32(tmp32_1);
4007 tcg_temp_free_i32(tmp32_2);
4008 break;
4009 case 0x18: /* LR R1,R2 [RR] */
4010 insn = ld_code2(s->pc);
4011 decode_rr(s, insn, &r1, &r2);
4012 tmp32_1 = load_reg32(r2);
4013 store_reg32(r1, tmp32_1);
4014 tcg_temp_free_i32(tmp32_1);
4015 break;
4016 case 0x15: /* CLR R1,R2 [RR] */
4017 case 0x19: /* CR R1,R2 [RR] */
4018 insn = ld_code2(s->pc);
4019 decode_rr(s, insn, &r1, &r2);
4020 tmp32_1 = load_reg32(r1);
4021 tmp32_2 = load_reg32(r2);
4022 if (opc == 0x15) {
4023 cmp_u32(s, tmp32_1, tmp32_2);
4024 } else {
4025 cmp_s32(s, tmp32_1, tmp32_2);
4027 tcg_temp_free_i32(tmp32_1);
4028 tcg_temp_free_i32(tmp32_2);
4029 break;
4030 case 0x1a: /* AR R1,R2 [RR] */
4031 case 0x1e: /* ALR R1,R2 [RR] */
4032 insn = ld_code2(s->pc);
4033 decode_rr(s, insn, &r1, &r2);
4034 tmp32_1 = load_reg32(r1);
4035 tmp32_2 = load_reg32(r2);
4036 tmp32_3 = tcg_temp_new_i32();
4037 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4038 store_reg32(r1, tmp32_3);
4039 if (opc == 0x1a) {
4040 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4041 } else {
4042 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4044 tcg_temp_free_i32(tmp32_1);
4045 tcg_temp_free_i32(tmp32_2);
4046 tcg_temp_free_i32(tmp32_3);
4047 break;
4048 case 0x1b: /* SR R1,R2 [RR] */
4049 case 0x1f: /* SLR R1,R2 [RR] */
4050 insn = ld_code2(s->pc);
4051 decode_rr(s, insn, &r1, &r2);
4052 tmp32_1 = load_reg32(r1);
4053 tmp32_2 = load_reg32(r2);
4054 tmp32_3 = tcg_temp_new_i32();
4055 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4056 store_reg32(r1, tmp32_3);
4057 if (opc == 0x1b) {
4058 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4059 } else {
4060 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4062 tcg_temp_free_i32(tmp32_1);
4063 tcg_temp_free_i32(tmp32_2);
4064 tcg_temp_free_i32(tmp32_3);
4065 break;
4066 case 0x1c: /* MR R1,R2 [RR] */
4067 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4068 insn = ld_code2(s->pc);
4069 decode_rr(s, insn, &r1, &r2);
4070 tmp2 = load_reg(r2);
4071 tmp3 = load_reg((r1 + 1) & 15);
4072 tcg_gen_ext32s_i64(tmp2, tmp2);
4073 tcg_gen_ext32s_i64(tmp3, tmp3);
4074 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4075 store_reg32_i64((r1 + 1) & 15, tmp2);
4076 tcg_gen_shri_i64(tmp2, tmp2, 32);
4077 store_reg32_i64(r1, tmp2);
4078 tcg_temp_free_i64(tmp2);
4079 tcg_temp_free_i64(tmp3);
4080 break;
4081 case 0x1d: /* DR R1,R2 [RR] */
4082 insn = ld_code2(s->pc);
4083 decode_rr(s, insn, &r1, &r2);
4084 tmp32_1 = load_reg32(r1);
4085 tmp32_2 = load_reg32(r1 + 1);
4086 tmp32_3 = load_reg32(r2);
4088 tmp = tcg_temp_new_i64(); /* dividend */
4089 tmp2 = tcg_temp_new_i64(); /* divisor */
4090 tmp3 = tcg_temp_new_i64();
4092 /* dividend is r(r1 << 32) | r(r1 + 1) */
4093 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4094 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4095 tcg_gen_shli_i64(tmp, tmp, 32);
4096 tcg_gen_or_i64(tmp, tmp, tmp2);
4098 /* divisor is r(r2) */
4099 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
4101 tcg_gen_div_i64(tmp3, tmp, tmp2);
4102 tcg_gen_rem_i64(tmp, tmp, tmp2);
4104 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4105 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4107 store_reg32(r1, tmp32_1); /* remainder */
4108 store_reg32(r1 + 1, tmp32_2); /* quotient */
4109 tcg_temp_free_i32(tmp32_1);
4110 tcg_temp_free_i32(tmp32_2);
4111 tcg_temp_free_i32(tmp32_3);
4112 tcg_temp_free_i64(tmp);
4113 tcg_temp_free_i64(tmp2);
4114 tcg_temp_free_i64(tmp3);
4115 break;
4116 case 0x28: /* LDR R1,R2 [RR] */
4117 insn = ld_code2(s->pc);
4118 decode_rr(s, insn, &r1, &r2);
4119 tmp = load_freg(r2);
4120 store_freg(r1, tmp);
4121 tcg_temp_free_i64(tmp);
4122 break;
4123 case 0x38: /* LER R1,R2 [RR] */
4124 insn = ld_code2(s->pc);
4125 decode_rr(s, insn, &r1, &r2);
4126 tmp32_1 = load_freg32(r2);
4127 store_freg32(r1, tmp32_1);
4128 tcg_temp_free_i32(tmp32_1);
4129 break;
4130 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4131 insn = ld_code4(s->pc);
4132 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4133 tmp2 = load_reg(r1);
4134 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
4135 tcg_temp_free_i64(tmp);
4136 tcg_temp_free_i64(tmp2);
4137 break;
4138 case 0x41: /* la */
4139 insn = ld_code4(s->pc);
4140 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4141 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
4142 tcg_temp_free_i64(tmp);
4143 break;
4144 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4145 insn = ld_code4(s->pc);
4146 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4147 tmp2 = load_reg(r1);
4148 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4149 tcg_temp_free_i64(tmp);
4150 tcg_temp_free_i64(tmp2);
4151 break;
4152 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4153 insn = ld_code4(s->pc);
4154 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4155 tmp2 = tcg_temp_new_i64();
4156 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4157 store_reg8(r1, tmp2);
4158 tcg_temp_free_i64(tmp);
4159 tcg_temp_free_i64(tmp2);
4160 break;
4161 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4162 insn = ld_code4(s->pc);
4163 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4164 tmp2 = load_reg(r1);
4165 tmp3 = tcg_const_i64(s->pc + 4);
4166 update_psw_addr(s);
4167 gen_op_calc_cc(s);
4168 gen_helper_ex(cc_op, cpu_env, cc_op, tmp2, tmp, tmp3);
4169 set_cc_static(s);
4170 tcg_temp_free_i64(tmp);
4171 tcg_temp_free_i64(tmp2);
4172 tcg_temp_free_i64(tmp3);
4173 break;
4174 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4175 insn = ld_code4(s->pc);
4176 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4177 tcg_temp_free_i64(tmp);
4179 tmp32_1 = load_reg32(r1);
4180 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
4181 store_reg32(r1, tmp32_1);
4183 gen_update_cc_op(s);
4184 l1 = gen_new_label();
4185 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
4187 /* not taking the branch, jump to after the instruction */
4188 gen_goto_tb(s, 0, s->pc + 4);
4189 gen_set_label(l1);
4191 /* take the branch, move R2 into psw.addr */
4192 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4193 tcg_gen_mov_i64(psw_addr, tmp);
4194 s->is_jmp = DISAS_JUMP;
4195 tcg_temp_free_i32(tmp32_1);
4196 tcg_temp_free_i64(tmp);
4197 break;
4198 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4199 insn = ld_code4(s->pc);
4200 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4201 gen_bcr(s, r1, tmp, s->pc + 4);
4202 tcg_temp_free_i64(tmp);
4203 s->is_jmp = DISAS_TB_JUMP;
4204 break;
4205 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4206 insn = ld_code4(s->pc);
4207 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4208 tmp2 = tcg_temp_new_i64();
4209 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4210 store_reg32_i64(r1, tmp2);
4211 tcg_temp_free_i64(tmp);
4212 tcg_temp_free_i64(tmp2);
4213 break;
4214 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4215 insn = ld_code4(s->pc);
4216 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4217 tmp32_1 = load_reg32(r1);
4218 tmp32_2 = tcg_temp_new_i32();
4219 tmp2 = tcg_temp_new_i64();
4220 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4221 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4222 cmp_s32(s, tmp32_1, tmp32_2);
4223 tcg_temp_free_i32(tmp32_1);
4224 tcg_temp_free_i32(tmp32_2);
4225 tcg_temp_free_i64(tmp);
4226 tcg_temp_free_i64(tmp2);
4227 break;
4228 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4229 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4230 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4231 insn = ld_code4(s->pc);
4232 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4233 tmp2 = tcg_temp_new_i64();
4234 tmp32_1 = load_reg32(r1);
4235 tmp32_2 = tcg_temp_new_i32();
4236 tmp32_3 = tcg_temp_new_i32();
4238 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4239 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4240 switch (opc) {
4241 case 0x4a:
4242 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4243 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4244 break;
4245 case 0x4b:
4246 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4247 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4248 break;
4249 case 0x4c:
4250 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
4251 break;
4252 default:
4253 tcg_abort();
4255 store_reg32(r1, tmp32_3);
4257 tcg_temp_free_i32(tmp32_1);
4258 tcg_temp_free_i32(tmp32_2);
4259 tcg_temp_free_i32(tmp32_3);
4260 tcg_temp_free_i64(tmp);
4261 tcg_temp_free_i64(tmp2);
4262 break;
4263 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4264 insn = ld_code4(s->pc);
4265 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4266 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
4267 store_reg(r1, tmp2);
4268 tcg_gen_mov_i64(psw_addr, tmp);
4269 tcg_temp_free_i64(tmp);
4270 tcg_temp_free_i64(tmp2);
4271 s->is_jmp = DISAS_JUMP;
4272 break;
4273 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4274 insn = ld_code4(s->pc);
4275 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4276 tmp2 = tcg_temp_new_i64();
4277 tmp32_1 = tcg_temp_new_i32();
4278 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
4279 gen_helper_cvd(tmp2, tmp32_1);
4280 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4281 tcg_temp_free_i64(tmp);
4282 tcg_temp_free_i64(tmp2);
4283 tcg_temp_free_i32(tmp32_1);
4284 break;
4285 case 0x50: /* st r1, d2(x2, b2) */
4286 insn = ld_code4(s->pc);
4287 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4288 tmp2 = load_reg(r1);
4289 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4290 tcg_temp_free_i64(tmp);
4291 tcg_temp_free_i64(tmp2);
4292 break;
4293 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4294 insn = ld_code4(s->pc);
4295 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4296 tmp2 = tcg_temp_new_i64();
4297 tmp32_1 = tcg_temp_new_i32();
4298 tmp32_2 = load_reg32(r1);
4299 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4300 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4301 cmp_u32(s, tmp32_2, tmp32_1);
4302 tcg_temp_free_i64(tmp);
4303 tcg_temp_free_i64(tmp2);
4304 tcg_temp_free_i32(tmp32_1);
4305 tcg_temp_free_i32(tmp32_2);
4306 break;
4307 case 0x54: /* N R1,D2(X2,B2) [RX] */
4308 case 0x56: /* O R1,D2(X2,B2) [RX] */
4309 case 0x57: /* X R1,D2(X2,B2) [RX] */
4310 insn = ld_code4(s->pc);
4311 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4312 tmp2 = tcg_temp_new_i64();
4313 tmp32_1 = load_reg32(r1);
4314 tmp32_2 = tcg_temp_new_i32();
4315 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4316 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4317 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4318 store_reg32(r1, tmp32_1);
4319 set_cc_nz_u32(s, tmp32_1);
4320 tcg_temp_free_i64(tmp);
4321 tcg_temp_free_i64(tmp2);
4322 tcg_temp_free_i32(tmp32_1);
4323 tcg_temp_free_i32(tmp32_2);
4324 break;
4325 case 0x58: /* l r1, d2(x2, b2) */
4326 insn = ld_code4(s->pc);
4327 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4328 tmp2 = tcg_temp_new_i64();
4329 tmp32_1 = tcg_temp_new_i32();
4330 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4331 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4332 store_reg32(r1, tmp32_1);
4333 tcg_temp_free_i64(tmp);
4334 tcg_temp_free_i64(tmp2);
4335 tcg_temp_free_i32(tmp32_1);
4336 break;
4337 case 0x59: /* C R1,D2(X2,B2) [RX] */
4338 insn = ld_code4(s->pc);
4339 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4340 tmp2 = tcg_temp_new_i64();
4341 tmp32_1 = tcg_temp_new_i32();
4342 tmp32_2 = load_reg32(r1);
4343 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4344 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4345 cmp_s32(s, tmp32_2, tmp32_1);
4346 tcg_temp_free_i64(tmp);
4347 tcg_temp_free_i64(tmp2);
4348 tcg_temp_free_i32(tmp32_1);
4349 tcg_temp_free_i32(tmp32_2);
4350 break;
4351 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4352 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4353 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4354 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4355 insn = ld_code4(s->pc);
4356 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4357 tmp32_1 = load_reg32(r1);
4358 tmp32_2 = tcg_temp_new_i32();
4359 tmp32_3 = tcg_temp_new_i32();
4360 tcg_gen_qemu_ld32s(tmp, tmp, get_mem_index(s));
4361 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4362 switch (opc) {
4363 case 0x5a:
4364 case 0x5e:
4365 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4366 break;
4367 case 0x5b:
4368 case 0x5f:
4369 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4370 break;
4371 default:
4372 tcg_abort();
4374 store_reg32(r1, tmp32_3);
4375 switch (opc) {
4376 case 0x5a:
4377 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4378 break;
4379 case 0x5e:
4380 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4381 break;
4382 case 0x5b:
4383 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4384 break;
4385 case 0x5f:
4386 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4387 break;
4388 default:
4389 tcg_abort();
4391 tcg_temp_free_i64(tmp);
4392 tcg_temp_free_i32(tmp32_1);
4393 tcg_temp_free_i32(tmp32_2);
4394 tcg_temp_free_i32(tmp32_3);
4395 break;
4396 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4397 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4398 insn = ld_code4(s->pc);
4399 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4400 tmp2 = tcg_temp_new_i64();
4401 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4402 tmp3 = load_reg((r1 + 1) & 15);
4403 tcg_gen_ext32s_i64(tmp2, tmp2);
4404 tcg_gen_ext32s_i64(tmp3, tmp3);
4405 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4406 store_reg32_i64((r1 + 1) & 15, tmp2);
4407 tcg_gen_shri_i64(tmp2, tmp2, 32);
4408 store_reg32_i64(r1, tmp2);
4409 tcg_temp_free_i64(tmp);
4410 tcg_temp_free_i64(tmp2);
4411 tcg_temp_free_i64(tmp3);
4412 break;
4413 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4414 insn = ld_code4(s->pc);
4415 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4416 tmp32_1 = load_reg32(r1);
4417 tmp32_2 = load_reg32(r1 + 1);
4419 tmp = tcg_temp_new_i64();
4420 tmp2 = tcg_temp_new_i64();
4422 /* dividend is r(r1 << 32) | r(r1 + 1) */
4423 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4424 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4425 tcg_gen_shli_i64(tmp, tmp, 32);
4426 tcg_gen_or_i64(tmp, tmp, tmp2);
4428 /* divisor is in memory */
4429 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
4431 /* XXX divisor == 0 -> FixP divide exception */
4433 tcg_gen_div_i64(tmp3, tmp, tmp2);
4434 tcg_gen_rem_i64(tmp, tmp, tmp2);
4436 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4437 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4439 store_reg32(r1, tmp32_1); /* remainder */
4440 store_reg32(r1 + 1, tmp32_2); /* quotient */
4441 tcg_temp_free_i32(tmp32_1);
4442 tcg_temp_free_i32(tmp32_2);
4443 tcg_temp_free_i64(tmp);
4444 tcg_temp_free_i64(tmp2);
4445 tcg_temp_free_i64(tmp3);
4446 break;
4447 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4448 insn = ld_code4(s->pc);
4449 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4450 tmp2 = load_freg(r1);
4451 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4452 tcg_temp_free_i64(tmp);
4453 tcg_temp_free_i64(tmp2);
4454 break;
4455 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4456 insn = ld_code4(s->pc);
4457 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4458 tmp2 = tcg_temp_new_i64();
4459 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4460 store_freg(r1, tmp2);
4461 tcg_temp_free_i64(tmp);
4462 tcg_temp_free_i64(tmp2);
4463 break;
4464 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4465 insn = ld_code4(s->pc);
4466 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4467 tmp2 = tcg_temp_new_i64();
4468 tmp32_1 = load_freg32(r1);
4469 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4470 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4471 tcg_temp_free_i64(tmp);
4472 tcg_temp_free_i64(tmp2);
4473 tcg_temp_free_i32(tmp32_1);
4474 break;
4475 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4476 insn = ld_code4(s->pc);
4477 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4478 tmp2 = tcg_temp_new_i64();
4479 tmp32_1 = load_reg32(r1);
4480 tmp32_2 = tcg_temp_new_i32();
4481 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4482 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4483 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4484 store_reg32(r1, tmp32_1);
4485 tcg_temp_free_i64(tmp);
4486 tcg_temp_free_i64(tmp2);
4487 tcg_temp_free_i32(tmp32_1);
4488 tcg_temp_free_i32(tmp32_2);
4489 break;
4490 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4491 insn = ld_code4(s->pc);
4492 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4493 tmp2 = tcg_temp_new_i64();
4494 tmp32_1 = tcg_temp_new_i32();
4495 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4496 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4497 store_freg32(r1, tmp32_1);
4498 tcg_temp_free_i64(tmp);
4499 tcg_temp_free_i64(tmp2);
4500 tcg_temp_free_i32(tmp32_1);
4501 break;
4502 #ifndef CONFIG_USER_ONLY
4503 case 0x80: /* SSM D2(B2) [S] */
4504 /* Set System Mask */
4505 check_privileged(s, ilc);
4506 insn = ld_code4(s->pc);
4507 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4508 tmp = get_address(s, 0, b2, d2);
4509 tmp2 = tcg_temp_new_i64();
4510 tmp3 = tcg_temp_new_i64();
4511 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4512 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4513 tcg_gen_shli_i64(tmp2, tmp2, 56);
4514 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4515 tcg_temp_free_i64(tmp);
4516 tcg_temp_free_i64(tmp2);
4517 tcg_temp_free_i64(tmp3);
4518 break;
4519 case 0x82: /* LPSW D2(B2) [S] */
4520 /* Load PSW */
4521 check_privileged(s, ilc);
4522 insn = ld_code4(s->pc);
4523 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4524 tmp = get_address(s, 0, b2, d2);
4525 tmp2 = tcg_temp_new_i64();
4526 tmp3 = tcg_temp_new_i64();
4527 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4528 tcg_gen_addi_i64(tmp, tmp, 4);
4529 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4530 gen_helper_load_psw(cpu_env, tmp2, tmp3);
4531 tcg_temp_free_i64(tmp);
4532 tcg_temp_free_i64(tmp2);
4533 tcg_temp_free_i64(tmp3);
4534 /* we need to keep cc_op intact */
4535 s->is_jmp = DISAS_JUMP;
4536 break;
4537 case 0x83: /* DIAG R1,R3,D2 [RS] */
4538 /* Diagnose call (KVM hypercall) */
4539 check_privileged(s, ilc);
4540 potential_page_fault(s);
4541 insn = ld_code4(s->pc);
4542 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4543 tmp32_1 = tcg_const_i32(insn & 0xfff);
4544 tmp2 = load_reg(2);
4545 tmp3 = load_reg(1);
4546 gen_helper_diag(tmp2, cpu_env, tmp32_1, tmp2, tmp3);
4547 store_reg(2, tmp2);
4548 tcg_temp_free_i32(tmp32_1);
4549 tcg_temp_free_i64(tmp2);
4550 tcg_temp_free_i64(tmp3);
4551 break;
4552 #endif
4553 case 0x88: /* SRL R1,D2(B2) [RS] */
4554 case 0x89: /* SLL R1,D2(B2) [RS] */
4555 case 0x8a: /* SRA R1,D2(B2) [RS] */
4556 insn = ld_code4(s->pc);
4557 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4558 tmp = get_address(s, 0, b2, d2);
4559 tmp32_1 = load_reg32(r1);
4560 tmp32_2 = tcg_temp_new_i32();
4561 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4562 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4563 switch (opc) {
4564 case 0x88:
4565 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4566 break;
4567 case 0x89:
4568 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4569 break;
4570 case 0x8a:
4571 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4572 set_cc_s32(s, tmp32_1);
4573 break;
4574 default:
4575 tcg_abort();
4577 store_reg32(r1, tmp32_1);
4578 tcg_temp_free_i64(tmp);
4579 tcg_temp_free_i32(tmp32_1);
4580 tcg_temp_free_i32(tmp32_2);
4581 break;
4582 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4583 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4584 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4585 insn = ld_code4(s->pc);
4586 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4587 tmp = get_address(s, 0, b2, d2); /* shift */
4588 tmp2 = tcg_temp_new_i64();
4589 tmp32_1 = load_reg32(r1);
4590 tmp32_2 = load_reg32(r1 + 1);
4591 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4592 switch (opc) {
4593 case 0x8c:
4594 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4595 break;
4596 case 0x8d:
4597 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4598 break;
4599 case 0x8e:
4600 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4601 set_cc_s64(s, tmp2);
4602 break;
4604 tcg_gen_shri_i64(tmp, tmp2, 32);
4605 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4606 store_reg32(r1, tmp32_1);
4607 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4608 store_reg32(r1 + 1, tmp32_2);
4609 tcg_temp_free_i64(tmp);
4610 tcg_temp_free_i64(tmp2);
4611 break;
4612 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4613 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4614 insn = ld_code4(s->pc);
4615 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4617 tmp = get_address(s, 0, b2, d2);
4618 tmp2 = tcg_temp_new_i64();
4619 tmp3 = tcg_const_i64(4);
4620 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4621 for (i = r1;; i = (i + 1) % 16) {
4622 if (opc == 0x98) {
4623 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4624 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4625 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4626 } else {
4627 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4629 if (i == r3) {
4630 break;
4632 tcg_gen_add_i64(tmp, tmp, tmp3);
4634 tcg_temp_free_i64(tmp);
4635 tcg_temp_free_i64(tmp2);
4636 tcg_temp_free_i64(tmp3);
4637 tcg_temp_free_i64(tmp4);
4638 break;
4639 case 0x91: /* TM D1(B1),I2 [SI] */
4640 insn = ld_code4(s->pc);
4641 tmp = decode_si(s, insn, &i2, &b1, &d1);
4642 tmp2 = tcg_const_i64(i2);
4643 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4644 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4645 tcg_temp_free_i64(tmp);
4646 tcg_temp_free_i64(tmp2);
4647 break;
4648 case 0x92: /* MVI D1(B1),I2 [SI] */
4649 insn = ld_code4(s->pc);
4650 tmp = decode_si(s, insn, &i2, &b1, &d1);
4651 tmp2 = tcg_const_i64(i2);
4652 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4653 tcg_temp_free_i64(tmp);
4654 tcg_temp_free_i64(tmp2);
4655 break;
4656 case 0x94: /* NI D1(B1),I2 [SI] */
4657 case 0x96: /* OI D1(B1),I2 [SI] */
4658 case 0x97: /* XI D1(B1),I2 [SI] */
4659 insn = ld_code4(s->pc);
4660 tmp = decode_si(s, insn, &i2, &b1, &d1);
4661 tmp2 = tcg_temp_new_i64();
4662 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4663 switch (opc) {
4664 case 0x94:
4665 tcg_gen_andi_i64(tmp2, tmp2, i2);
4666 break;
4667 case 0x96:
4668 tcg_gen_ori_i64(tmp2, tmp2, i2);
4669 break;
4670 case 0x97:
4671 tcg_gen_xori_i64(tmp2, tmp2, i2);
4672 break;
4673 default:
4674 tcg_abort();
4676 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4677 set_cc_nz_u64(s, tmp2);
4678 tcg_temp_free_i64(tmp);
4679 tcg_temp_free_i64(tmp2);
4680 break;
4681 case 0x95: /* CLI D1(B1),I2 [SI] */
4682 insn = ld_code4(s->pc);
4683 tmp = decode_si(s, insn, &i2, &b1, &d1);
4684 tmp2 = tcg_temp_new_i64();
4685 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4686 cmp_u64c(s, tmp2, i2);
4687 tcg_temp_free_i64(tmp);
4688 tcg_temp_free_i64(tmp2);
4689 break;
4690 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4691 insn = ld_code4(s->pc);
4692 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4693 tmp = get_address(s, 0, b2, d2);
4694 tmp32_1 = tcg_const_i32(r1);
4695 tmp32_2 = tcg_const_i32(r3);
4696 potential_page_fault(s);
4697 gen_helper_lam(cpu_env, tmp32_1, tmp, tmp32_2);
4698 tcg_temp_free_i64(tmp);
4699 tcg_temp_free_i32(tmp32_1);
4700 tcg_temp_free_i32(tmp32_2);
4701 break;
4702 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4703 insn = ld_code4(s->pc);
4704 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4705 tmp = get_address(s, 0, b2, d2);
4706 tmp32_1 = tcg_const_i32(r1);
4707 tmp32_2 = tcg_const_i32(r3);
4708 potential_page_fault(s);
4709 gen_helper_stam(cpu_env, tmp32_1, tmp, tmp32_2);
4710 tcg_temp_free_i64(tmp);
4711 tcg_temp_free_i32(tmp32_1);
4712 tcg_temp_free_i32(tmp32_2);
4713 break;
4714 case 0xa5:
4715 insn = ld_code4(s->pc);
4716 r1 = (insn >> 20) & 0xf;
4717 op = (insn >> 16) & 0xf;
4718 i2 = insn & 0xffff;
4719 disas_a5(s, op, r1, i2);
4720 break;
4721 case 0xa7:
4722 insn = ld_code4(s->pc);
4723 r1 = (insn >> 20) & 0xf;
4724 op = (insn >> 16) & 0xf;
4725 i2 = (short)insn;
4726 disas_a7(s, op, r1, i2);
4727 break;
4728 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4729 insn = ld_code4(s->pc);
4730 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4731 tmp = get_address(s, 0, b2, d2);
4732 tmp32_1 = tcg_const_i32(r1);
4733 tmp32_2 = tcg_const_i32(r3);
4734 potential_page_fault(s);
4735 gen_helper_mvcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4736 set_cc_static(s);
4737 tcg_temp_free_i64(tmp);
4738 tcg_temp_free_i32(tmp32_1);
4739 tcg_temp_free_i32(tmp32_2);
4740 break;
4741 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4742 insn = ld_code4(s->pc);
4743 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4744 tmp = get_address(s, 0, b2, d2);
4745 tmp32_1 = tcg_const_i32(r1);
4746 tmp32_2 = tcg_const_i32(r3);
4747 potential_page_fault(s);
4748 gen_helper_clcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4749 set_cc_static(s);
4750 tcg_temp_free_i64(tmp);
4751 tcg_temp_free_i32(tmp32_1);
4752 tcg_temp_free_i32(tmp32_2);
4753 break;
4754 #ifndef CONFIG_USER_ONLY
4755 case 0xac: /* STNSM D1(B1),I2 [SI] */
4756 case 0xad: /* STOSM D1(B1),I2 [SI] */
4757 check_privileged(s, ilc);
4758 insn = ld_code4(s->pc);
4759 tmp = decode_si(s, insn, &i2, &b1, &d1);
4760 tmp2 = tcg_temp_new_i64();
4761 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4762 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4763 if (opc == 0xac) {
4764 tcg_gen_andi_i64(psw_mask, psw_mask,
4765 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4766 } else {
4767 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4769 tcg_temp_free_i64(tmp);
4770 tcg_temp_free_i64(tmp2);
4771 break;
4772 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4773 check_privileged(s, ilc);
4774 insn = ld_code4(s->pc);
4775 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4776 tmp = get_address(s, 0, b2, d2);
4777 tmp2 = load_reg(r3);
4778 tmp32_1 = tcg_const_i32(r1);
4779 potential_page_fault(s);
4780 gen_helper_sigp(cc_op, cpu_env, tmp, tmp32_1, tmp2);
4781 set_cc_static(s);
4782 tcg_temp_free_i64(tmp);
4783 tcg_temp_free_i64(tmp2);
4784 tcg_temp_free_i32(tmp32_1);
4785 break;
4786 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4787 check_privileged(s, ilc);
4788 insn = ld_code4(s->pc);
4789 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4790 tmp32_1 = tcg_const_i32(r1);
4791 potential_page_fault(s);
4792 gen_helper_lra(cc_op, cpu_env, tmp, tmp32_1);
4793 set_cc_static(s);
4794 tcg_temp_free_i64(tmp);
4795 tcg_temp_free_i32(tmp32_1);
4796 break;
4797 #endif
4798 case 0xb2:
4799 insn = ld_code4(s->pc);
4800 op = (insn >> 16) & 0xff;
4801 switch (op) {
4802 case 0x9c: /* STFPC D2(B2) [S] */
4803 d2 = insn & 0xfff;
4804 b2 = (insn >> 12) & 0xf;
4805 tmp32_1 = tcg_temp_new_i32();
4806 tmp = tcg_temp_new_i64();
4807 tmp2 = get_address(s, 0, b2, d2);
4808 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
4809 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4810 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4811 tcg_temp_free_i32(tmp32_1);
4812 tcg_temp_free_i64(tmp);
4813 tcg_temp_free_i64(tmp2);
4814 break;
4815 default:
4816 disas_b2(s, op, insn);
4817 break;
4819 break;
4820 case 0xb3:
4821 insn = ld_code4(s->pc);
4822 op = (insn >> 16) & 0xff;
4823 r3 = (insn >> 12) & 0xf; /* aka m3 */
4824 r1 = (insn >> 4) & 0xf;
4825 r2 = insn & 0xf;
4826 disas_b3(s, op, r3, r1, r2);
4827 break;
4828 #ifndef CONFIG_USER_ONLY
4829 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4830 /* Store Control */
4831 check_privileged(s, ilc);
4832 insn = ld_code4(s->pc);
4833 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4834 tmp = get_address(s, 0, b2, d2);
4835 tmp32_1 = tcg_const_i32(r1);
4836 tmp32_2 = tcg_const_i32(r3);
4837 potential_page_fault(s);
4838 gen_helper_stctl(cpu_env, tmp32_1, tmp, tmp32_2);
4839 tcg_temp_free_i64(tmp);
4840 tcg_temp_free_i32(tmp32_1);
4841 tcg_temp_free_i32(tmp32_2);
4842 break;
4843 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4844 /* Load Control */
4845 check_privileged(s, ilc);
4846 insn = ld_code4(s->pc);
4847 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4848 tmp = get_address(s, 0, b2, d2);
4849 tmp32_1 = tcg_const_i32(r1);
4850 tmp32_2 = tcg_const_i32(r3);
4851 potential_page_fault(s);
4852 gen_helper_lctl(cpu_env, tmp32_1, tmp, tmp32_2);
4853 tcg_temp_free_i64(tmp);
4854 tcg_temp_free_i32(tmp32_1);
4855 tcg_temp_free_i32(tmp32_2);
4856 break;
4857 #endif
4858 case 0xb9:
4859 insn = ld_code4(s->pc);
4860 r1 = (insn >> 4) & 0xf;
4861 r2 = insn & 0xf;
4862 op = (insn >> 16) & 0xff;
4863 disas_b9(s, op, r1, r2);
4864 break;
4865 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4866 insn = ld_code4(s->pc);
4867 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4868 tmp = get_address(s, 0, b2, d2);
4869 tmp32_1 = tcg_const_i32(r1);
4870 tmp32_2 = tcg_const_i32(r3);
4871 potential_page_fault(s);
4872 gen_helper_cs(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4873 set_cc_static(s);
4874 tcg_temp_free_i64(tmp);
4875 tcg_temp_free_i32(tmp32_1);
4876 tcg_temp_free_i32(tmp32_2);
4877 break;
4878 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4879 insn = ld_code4(s->pc);
4880 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4881 tmp = get_address(s, 0, b2, d2);
4882 tmp32_1 = load_reg32(r1);
4883 tmp32_2 = tcg_const_i32(r3);
4884 potential_page_fault(s);
4885 gen_helper_clm(cc_op, cpu_env, tmp32_1, tmp32_2, tmp);
4886 set_cc_static(s);
4887 tcg_temp_free_i64(tmp);
4888 tcg_temp_free_i32(tmp32_1);
4889 tcg_temp_free_i32(tmp32_2);
4890 break;
4891 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4892 insn = ld_code4(s->pc);
4893 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4894 tmp = get_address(s, 0, b2, d2);
4895 tmp32_1 = load_reg32(r1);
4896 tmp32_2 = tcg_const_i32(r3);
4897 potential_page_fault(s);
4898 gen_helper_stcm(cpu_env, tmp32_1, tmp32_2, tmp);
4899 tcg_temp_free_i64(tmp);
4900 tcg_temp_free_i32(tmp32_1);
4901 tcg_temp_free_i32(tmp32_2);
4902 break;
4903 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4904 insn = ld_code4(s->pc);
4905 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4906 if (r3 == 15) {
4907 /* effectively a 32-bit load */
4908 tmp = get_address(s, 0, b2, d2);
4909 tmp32_1 = tcg_temp_new_i32();
4910 tmp32_2 = tcg_const_i32(r3);
4911 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4912 store_reg32_i64(r1, tmp);
4913 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4914 set_cc_icm(s, tmp32_2, tmp32_1);
4915 tcg_temp_free_i64(tmp);
4916 tcg_temp_free_i32(tmp32_1);
4917 tcg_temp_free_i32(tmp32_2);
4918 } else if (r3) {
4919 uint32_t mask = 0x00ffffffUL;
4920 uint32_t shift = 24;
4921 int m3 = r3;
4922 tmp = get_address(s, 0, b2, d2);
4923 tmp2 = tcg_temp_new_i64();
4924 tmp32_1 = load_reg32(r1);
4925 tmp32_2 = tcg_temp_new_i32();
4926 tmp32_3 = tcg_const_i32(r3);
4927 tmp32_4 = tcg_const_i32(0);
4928 while (m3) {
4929 if (m3 & 8) {
4930 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4931 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4932 if (shift) {
4933 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4935 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4936 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4937 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4938 tcg_gen_addi_i64(tmp, tmp, 1);
4940 m3 = (m3 << 1) & 0xf;
4941 mask = (mask >> 8) | 0xff000000UL;
4942 shift -= 8;
4944 store_reg32(r1, tmp32_1);
4945 set_cc_icm(s, tmp32_3, tmp32_4);
4946 tcg_temp_free_i64(tmp);
4947 tcg_temp_free_i64(tmp2);
4948 tcg_temp_free_i32(tmp32_1);
4949 tcg_temp_free_i32(tmp32_2);
4950 tcg_temp_free_i32(tmp32_3);
4951 tcg_temp_free_i32(tmp32_4);
4952 } else {
4953 /* i.e. env->cc = 0 */
4954 gen_op_movi_cc(s, 0);
4956 break;
4957 case 0xc0:
4958 case 0xc2:
4959 insn = ld_code6(s->pc);
4960 r1 = (insn >> 36) & 0xf;
4961 op = (insn >> 32) & 0xf;
4962 i2 = (int)insn;
4963 switch (opc) {
4964 case 0xc0:
4965 disas_c0(s, op, r1, i2);
4966 break;
4967 case 0xc2:
4968 disas_c2(s, op, r1, i2);
4969 break;
4970 default:
4971 tcg_abort();
4973 break;
4974 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4975 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4976 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4977 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4978 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4979 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4980 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4981 insn = ld_code6(s->pc);
4982 vl = tcg_const_i32((insn >> 32) & 0xff);
4983 b1 = (insn >> 28) & 0xf;
4984 b2 = (insn >> 12) & 0xf;
4985 d1 = (insn >> 16) & 0xfff;
4986 d2 = insn & 0xfff;
4987 tmp = get_address(s, 0, b1, d1);
4988 tmp2 = get_address(s, 0, b2, d2);
4989 switch (opc) {
4990 case 0xd2:
4991 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4992 break;
4993 case 0xd4:
4994 potential_page_fault(s);
4995 gen_helper_nc(cc_op, cpu_env, vl, tmp, tmp2);
4996 set_cc_static(s);
4997 break;
4998 case 0xd5:
4999 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
5000 break;
5001 case 0xd6:
5002 potential_page_fault(s);
5003 gen_helper_oc(cc_op, cpu_env, vl, tmp, tmp2);
5004 set_cc_static(s);
5005 break;
5006 case 0xd7:
5007 potential_page_fault(s);
5008 gen_helper_xc(cc_op, cpu_env, vl, tmp, tmp2);
5009 set_cc_static(s);
5010 break;
5011 case 0xdc:
5012 potential_page_fault(s);
5013 gen_helper_tr(cpu_env, vl, tmp, tmp2);
5014 set_cc_static(s);
5015 break;
5016 case 0xf3:
5017 potential_page_fault(s);
5018 gen_helper_unpk(cpu_env, vl, tmp, tmp2);
5019 break;
5020 default:
5021 tcg_abort();
5023 tcg_temp_free_i64(tmp);
5024 tcg_temp_free_i64(tmp2);
5025 break;
5026 #ifndef CONFIG_USER_ONLY
5027 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5028 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5029 check_privileged(s, ilc);
5030 potential_page_fault(s);
5031 insn = ld_code6(s->pc);
5032 r1 = (insn >> 36) & 0xf;
5033 r3 = (insn >> 32) & 0xf;
5034 b1 = (insn >> 28) & 0xf;
5035 d1 = (insn >> 16) & 0xfff;
5036 b2 = (insn >> 12) & 0xf;
5037 d2 = insn & 0xfff;
5038 tmp = load_reg(r1);
5039 /* XXX key in r3 */
5040 tmp2 = get_address(s, 0, b1, d1);
5041 tmp3 = get_address(s, 0, b2, d2);
5042 if (opc == 0xda) {
5043 gen_helper_mvcp(cc_op, cpu_env, tmp, tmp2, tmp3);
5044 } else {
5045 gen_helper_mvcs(cc_op, cpu_env, tmp, tmp2, tmp3);
5047 set_cc_static(s);
5048 tcg_temp_free_i64(tmp);
5049 tcg_temp_free_i64(tmp2);
5050 tcg_temp_free_i64(tmp3);
5051 break;
5052 #endif
5053 case 0xe3:
5054 insn = ld_code6(s->pc);
5055 debug_insn(insn);
5056 op = insn & 0xff;
5057 r1 = (insn >> 36) & 0xf;
5058 x2 = (insn >> 32) & 0xf;
5059 b2 = (insn >> 28) & 0xf;
5060 d2 = ((int)((((insn >> 16) & 0xfff)
5061 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5062 disas_e3(s, op, r1, x2, b2, d2 );
5063 break;
5064 #ifndef CONFIG_USER_ONLY
5065 case 0xe5:
5066 /* Test Protection */
5067 check_privileged(s, ilc);
5068 insn = ld_code6(s->pc);
5069 debug_insn(insn);
5070 disas_e5(s, insn);
5071 break;
5072 #endif
5073 case 0xeb:
5074 insn = ld_code6(s->pc);
5075 debug_insn(insn);
5076 op = insn & 0xff;
5077 r1 = (insn >> 36) & 0xf;
5078 r3 = (insn >> 32) & 0xf;
5079 b2 = (insn >> 28) & 0xf;
5080 d2 = ((int)((((insn >> 16) & 0xfff)
5081 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5082 disas_eb(s, op, r1, r3, b2, d2);
5083 break;
5084 case 0xed:
5085 insn = ld_code6(s->pc);
5086 debug_insn(insn);
5087 op = insn & 0xff;
5088 r1 = (insn >> 36) & 0xf;
5089 x2 = (insn >> 32) & 0xf;
5090 b2 = (insn >> 28) & 0xf;
5091 d2 = (short)((insn >> 16) & 0xfff);
5092 r1b = (insn >> 12) & 0xf;
5093 disas_ed(s, op, r1, x2, b2, d2, r1b);
5094 break;
5095 default:
5096 qemu_log_mask(LOG_UNIMP, "unimplemented opcode 0x%x\n", opc);
5097 gen_illegal_opcode(s, ilc);
5098 break;
5101 /* Instruction length is encoded in the opcode */
5102 s->pc += (ilc * 2);
5105 static inline void gen_intermediate_code_internal(CPUS390XState *env,
5106 TranslationBlock *tb,
5107 int search_pc)
5109 DisasContext dc;
5110 target_ulong pc_start;
5111 uint64_t next_page_start;
5112 uint16_t *gen_opc_end;
5113 int j, lj = -1;
5114 int num_insns, max_insns;
5115 CPUBreakpoint *bp;
5117 pc_start = tb->pc;
5119 /* 31-bit mode */
5120 if (!(tb->flags & FLAG_MASK_64)) {
5121 pc_start &= 0x7fffffff;
5124 dc.pc = pc_start;
5125 dc.is_jmp = DISAS_NEXT;
5126 dc.tb = tb;
5127 dc.cc_op = CC_OP_DYNAMIC;
5129 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5131 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5133 num_insns = 0;
5134 max_insns = tb->cflags & CF_COUNT_MASK;
5135 if (max_insns == 0) {
5136 max_insns = CF_COUNT_MASK;
5139 gen_icount_start();
5141 do {
5142 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5143 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5144 if (bp->pc == dc.pc) {
5145 gen_debug(&dc);
5146 break;
5150 if (search_pc) {
5151 j = gen_opc_ptr - gen_opc_buf;
5152 if (lj < j) {
5153 lj++;
5154 while (lj < j) {
5155 gen_opc_instr_start[lj++] = 0;
5158 gen_opc_pc[lj] = dc.pc;
5159 gen_opc_cc_op[lj] = dc.cc_op;
5160 gen_opc_instr_start[lj] = 1;
5161 gen_opc_icount[lj] = num_insns;
5163 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
5164 gen_io_start();
5166 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5167 LOG_DISAS("pc " TARGET_FMT_lx "\n",
5168 dc.pc);
5169 #endif
5170 disas_s390_insn(&dc);
5172 num_insns++;
5173 if (env->singlestep_enabled) {
5174 gen_debug(&dc);
5176 } while (!dc.is_jmp && gen_opc_ptr < gen_opc_end && dc.pc < next_page_start
5177 && num_insns < max_insns && !env->singlestep_enabled
5178 && !singlestep);
5180 if (!dc.is_jmp) {
5181 update_psw_addr(&dc);
5184 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5185 gen_op_calc_cc(&dc);
5186 } else {
5187 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5188 is in env */
5189 gen_op_set_cc_op(&dc);
5192 if (tb->cflags & CF_LAST_IO) {
5193 gen_io_end();
5195 /* Generate the return instruction */
5196 if (dc.is_jmp != DISAS_TB_JUMP) {
5197 tcg_gen_exit_tb(0);
5199 gen_icount_end(tb, num_insns);
5200 *gen_opc_ptr = INDEX_op_end;
5201 if (search_pc) {
5202 j = gen_opc_ptr - gen_opc_buf;
5203 lj++;
5204 while (lj <= j) {
5205 gen_opc_instr_start[lj++] = 0;
5207 } else {
5208 tb->size = dc.pc - pc_start;
5209 tb->icount = num_insns;
5211 #if defined(S390X_DEBUG_DISAS)
5212 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
5213 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5214 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5215 log_target_disas(pc_start, dc.pc - pc_start, 1);
5216 qemu_log("\n");
5218 #endif
5221 void gen_intermediate_code (CPUS390XState *env, struct TranslationBlock *tb)
5223 gen_intermediate_code_internal(env, tb, 0);
5226 void gen_intermediate_code_pc (CPUS390XState *env, struct TranslationBlock *tb)
5228 gen_intermediate_code_internal(env, tb, 1);
5231 void restore_state_to_opc(CPUS390XState *env, TranslationBlock *tb, int pc_pos)
5233 int cc_op;
5234 env->psw.addr = gen_opc_pc[pc_pos];
5235 cc_op = gen_opc_cc_op[pc_pos];
5236 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5237 env->cc_op = cc_op;