Merge commit 'afb63ebd0a9599312c27ecceb839a399740e00ef' into upstream-merge
[qemu-kvm.git] / target-s390x / translate.c
blobdb464cc073a8a0be70382b46258f1696f7d0e1ce
1 /*
2 * S/390 translation
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
22 /* #define DEBUG_INLINE_BRANCHES */
23 #define S390X_DEBUG_DISAS
24 /* #define S390X_DEBUG_DISAS_VERBOSE */
26 #ifdef S390X_DEBUG_DISAS_VERBOSE
27 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
28 #else
29 # define LOG_DISAS(...) do { } while (0)
30 #endif
32 #include "cpu.h"
33 #include "disas.h"
34 #include "tcg-op.h"
35 #include "qemu-log.h"
37 /* global register indexes */
38 static TCGv_ptr cpu_env;
40 #include "gen-icount.h"
41 #include "helper.h"
42 #define GEN_HELPER 1
43 #include "helper.h"
45 typedef struct DisasContext DisasContext;
46 struct DisasContext {
47 uint64_t pc;
48 int is_jmp;
49 enum cc_op cc_op;
50 struct TranslationBlock *tb;
53 #define DISAS_EXCP 4
55 static void gen_op_calc_cc(DisasContext *s);
57 #ifdef DEBUG_INLINE_BRANCHES
58 static uint64_t inline_branch_hit[CC_OP_MAX];
59 static uint64_t inline_branch_miss[CC_OP_MAX];
60 #endif
62 static inline void debug_insn(uint64_t insn)
64 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
67 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
69 if (!(s->tb->flags & FLAG_MASK_64)) {
70 if (s->tb->flags & FLAG_MASK_32) {
71 return pc | 0x80000000;
74 return pc;
77 void cpu_dump_state(CPUS390XState *env, FILE *f, fprintf_function cpu_fprintf,
78 int flags)
80 int i;
82 if (env->cc_op > 3) {
83 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
84 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
85 } else {
86 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
87 env->psw.mask, env->psw.addr, env->cc_op);
90 for (i = 0; i < 16; i++) {
91 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
92 if ((i % 4) == 3) {
93 cpu_fprintf(f, "\n");
94 } else {
95 cpu_fprintf(f, " ");
99 for (i = 0; i < 16; i++) {
100 cpu_fprintf(f, "F%02d=%016" PRIx64, i, *(uint64_t *)&env->fregs[i]);
101 if ((i % 4) == 3) {
102 cpu_fprintf(f, "\n");
103 } else {
104 cpu_fprintf(f, " ");
108 #ifndef CONFIG_USER_ONLY
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
117 #endif
119 #ifdef DEBUG_INLINE_BRANCHES
120 for (i = 0; i < CC_OP_MAX; i++) {
121 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
122 inline_branch_miss[i], inline_branch_hit[i]);
124 #endif
126 cpu_fprintf(f, "\n");
129 static TCGv_i64 psw_addr;
130 static TCGv_i64 psw_mask;
132 static TCGv_i32 cc_op;
133 static TCGv_i64 cc_src;
134 static TCGv_i64 cc_dst;
135 static TCGv_i64 cc_vr;
137 static char cpu_reg_names[10*3 + 6*4];
138 static TCGv_i64 regs[16];
140 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
142 void s390x_translate_init(void)
144 int i;
145 size_t cpu_reg_names_size = sizeof(cpu_reg_names);
146 char *p;
148 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
149 psw_addr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, psw.addr),
150 "psw_addr");
151 psw_mask = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, psw.mask),
152 "psw_mask");
154 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUS390XState, cc_op),
155 "cc_op");
156 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_src),
157 "cc_src");
158 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_dst),
159 "cc_dst");
160 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_vr),
161 "cc_vr");
163 p = cpu_reg_names;
164 for (i = 0; i < 16; i++) {
165 snprintf(p, cpu_reg_names_size, "r%d", i);
166 regs[i] = tcg_global_mem_new(TCG_AREG0,
167 offsetof(CPUS390XState, regs[i]), p);
168 p += (i < 10) ? 3 : 4;
169 cpu_reg_names_size -= (i < 10) ? 3 : 4;
173 static inline TCGv_i64 load_reg(int reg)
175 TCGv_i64 r = tcg_temp_new_i64();
176 tcg_gen_mov_i64(r, regs[reg]);
177 return r;
180 static inline TCGv_i64 load_freg(int reg)
182 TCGv_i64 r = tcg_temp_new_i64();
183 tcg_gen_ld_i64(r, cpu_env, offsetof(CPUS390XState, fregs[reg].d));
184 return r;
187 static inline TCGv_i32 load_freg32(int reg)
189 TCGv_i32 r = tcg_temp_new_i32();
190 tcg_gen_ld_i32(r, cpu_env, offsetof(CPUS390XState, fregs[reg].l.upper));
191 return r;
194 static inline TCGv_i32 load_reg32(int reg)
196 TCGv_i32 r = tcg_temp_new_i32();
197 tcg_gen_trunc_i64_i32(r, regs[reg]);
198 return r;
201 static inline TCGv_i64 load_reg32_i64(int reg)
203 TCGv_i64 r = tcg_temp_new_i64();
204 tcg_gen_ext32s_i64(r, regs[reg]);
205 return r;
208 static inline void store_reg(int reg, TCGv_i64 v)
210 tcg_gen_mov_i64(regs[reg], v);
213 static inline void store_freg(int reg, TCGv_i64 v)
215 tcg_gen_st_i64(v, cpu_env, offsetof(CPUS390XState, fregs[reg].d));
218 static inline void store_reg32(int reg, TCGv_i32 v)
220 #if HOST_LONG_BITS == 32
221 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
222 #else
223 TCGv_i64 tmp = tcg_temp_new_i64();
224 tcg_gen_extu_i32_i64(tmp, v);
225 /* 32 bit register writes keep the upper half */
226 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 32);
227 tcg_temp_free_i64(tmp);
228 #endif
231 static inline void store_reg32_i64(int reg, TCGv_i64 v)
233 /* 32 bit register writes keep the upper half */
234 #if HOST_LONG_BITS == 32
235 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), TCGV_LOW(v));
236 #else
237 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
238 #endif
241 static inline void store_reg16(int reg, TCGv_i32 v)
243 TCGv_i64 tmp = tcg_temp_new_i64();
244 tcg_gen_extu_i32_i64(tmp, v);
245 /* 16 bit register writes keep the upper bytes */
246 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 16);
247 tcg_temp_free_i64(tmp);
250 static inline void store_reg8(int reg, TCGv_i64 v)
252 /* 8 bit register writes keep the upper bytes */
253 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
256 static inline void store_freg32(int reg, TCGv_i32 v)
258 tcg_gen_st_i32(v, cpu_env, offsetof(CPUS390XState, fregs[reg].l.upper));
261 static inline void update_psw_addr(DisasContext *s)
263 /* psw.addr */
264 tcg_gen_movi_i64(psw_addr, s->pc);
267 static inline void potential_page_fault(DisasContext *s)
269 #ifndef CONFIG_USER_ONLY
270 update_psw_addr(s);
271 gen_op_calc_cc(s);
272 #endif
275 static inline uint64_t ld_code2(CPUS390XState *env, uint64_t pc)
277 return (uint64_t)cpu_lduw_code(env, pc);
280 static inline uint64_t ld_code4(CPUS390XState *env, uint64_t pc)
282 return (uint64_t)cpu_ldl_code(env, pc);
285 static inline uint64_t ld_code6(CPUS390XState *env, uint64_t pc)
287 uint64_t opc;
288 opc = (uint64_t)cpu_lduw_code(env, pc) << 32;
289 opc |= (uint64_t)(uint32_t)cpu_ldl_code(env, pc + 2);
290 return opc;
293 static inline int get_mem_index(DisasContext *s)
295 switch (s->tb->flags & FLAG_MASK_ASC) {
296 case PSW_ASC_PRIMARY >> 32:
297 return 0;
298 case PSW_ASC_SECONDARY >> 32:
299 return 1;
300 case PSW_ASC_HOME >> 32:
301 return 2;
302 default:
303 tcg_abort();
304 break;
308 static inline void gen_debug(DisasContext *s)
310 TCGv_i32 tmp = tcg_const_i32(EXCP_DEBUG);
311 update_psw_addr(s);
312 gen_op_calc_cc(s);
313 gen_helper_exception(cpu_env, tmp);
314 tcg_temp_free_i32(tmp);
315 s->is_jmp = DISAS_EXCP;
318 #ifdef CONFIG_USER_ONLY
320 static void gen_illegal_opcode(CPUS390XState *env, DisasContext *s, int ilc)
322 TCGv_i32 tmp = tcg_const_i32(EXCP_SPEC);
323 update_psw_addr(s);
324 gen_op_calc_cc(s);
325 gen_helper_exception(cpu_env, tmp);
326 tcg_temp_free_i32(tmp);
327 s->is_jmp = DISAS_EXCP;
330 #else /* CONFIG_USER_ONLY */
332 static void debug_print_inst(CPUS390XState *env, DisasContext *s, int ilc)
334 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
335 uint64_t inst = 0;
337 switch (ilc & 3) {
338 case 1:
339 inst = ld_code2(env, s->pc);
340 break;
341 case 2:
342 inst = ld_code4(env, s->pc);
343 break;
344 case 3:
345 inst = ld_code6(env, s->pc);
346 break;
349 fprintf(stderr, "Illegal instruction [%d at %016" PRIx64 "]: 0x%016"
350 PRIx64 "\n", ilc, s->pc, inst);
351 #endif
354 static void gen_program_exception(CPUS390XState *env, DisasContext *s, int ilc,
355 int code)
357 TCGv_i32 tmp;
359 debug_print_inst(env, s, ilc);
361 /* remember what pgm exeption this was */
362 tmp = tcg_const_i32(code);
363 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_code));
364 tcg_temp_free_i32(tmp);
366 tmp = tcg_const_i32(ilc);
367 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_ilc));
368 tcg_temp_free_i32(tmp);
370 /* advance past instruction */
371 s->pc += (ilc * 2);
372 update_psw_addr(s);
374 /* save off cc */
375 gen_op_calc_cc(s);
377 /* trigger exception */
378 tmp = tcg_const_i32(EXCP_PGM);
379 gen_helper_exception(cpu_env, tmp);
380 tcg_temp_free_i32(tmp);
382 /* end TB here */
383 s->is_jmp = DISAS_EXCP;
387 static void gen_illegal_opcode(CPUS390XState *env, DisasContext *s, int ilc)
389 gen_program_exception(env, s, ilc, PGM_SPECIFICATION);
392 static void gen_privileged_exception(CPUS390XState *env, DisasContext *s,
393 int ilc)
395 gen_program_exception(env, s, ilc, PGM_PRIVILEGED);
398 static void check_privileged(CPUS390XState *env, DisasContext *s, int ilc)
400 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
401 gen_privileged_exception(env, s, ilc);
405 #endif /* CONFIG_USER_ONLY */
407 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
409 TCGv_i64 tmp;
411 /* 31-bitify the immediate part; register contents are dealt with below */
412 if (!(s->tb->flags & FLAG_MASK_64)) {
413 d2 &= 0x7fffffffUL;
416 if (x2) {
417 if (d2) {
418 tmp = tcg_const_i64(d2);
419 tcg_gen_add_i64(tmp, tmp, regs[x2]);
420 } else {
421 tmp = load_reg(x2);
423 if (b2) {
424 tcg_gen_add_i64(tmp, tmp, regs[b2]);
426 } else if (b2) {
427 if (d2) {
428 tmp = tcg_const_i64(d2);
429 tcg_gen_add_i64(tmp, tmp, regs[b2]);
430 } else {
431 tmp = load_reg(b2);
433 } else {
434 tmp = tcg_const_i64(d2);
437 /* 31-bit mode mask if there are values loaded from registers */
438 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
439 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
442 return tmp;
445 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
447 s->cc_op = CC_OP_CONST0 + val;
450 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
452 tcg_gen_discard_i64(cc_src);
453 tcg_gen_mov_i64(cc_dst, dst);
454 tcg_gen_discard_i64(cc_vr);
455 s->cc_op = op;
458 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
460 tcg_gen_discard_i64(cc_src);
461 tcg_gen_extu_i32_i64(cc_dst, dst);
462 tcg_gen_discard_i64(cc_vr);
463 s->cc_op = op;
466 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
467 TCGv_i64 dst)
469 tcg_gen_mov_i64(cc_src, src);
470 tcg_gen_mov_i64(cc_dst, dst);
471 tcg_gen_discard_i64(cc_vr);
472 s->cc_op = op;
475 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
476 TCGv_i32 dst)
478 tcg_gen_extu_i32_i64(cc_src, src);
479 tcg_gen_extu_i32_i64(cc_dst, dst);
480 tcg_gen_discard_i64(cc_vr);
481 s->cc_op = op;
484 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
485 TCGv_i64 dst, TCGv_i64 vr)
487 tcg_gen_mov_i64(cc_src, src);
488 tcg_gen_mov_i64(cc_dst, dst);
489 tcg_gen_mov_i64(cc_vr, vr);
490 s->cc_op = op;
493 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
494 TCGv_i32 dst, TCGv_i32 vr)
496 tcg_gen_extu_i32_i64(cc_src, src);
497 tcg_gen_extu_i32_i64(cc_dst, dst);
498 tcg_gen_extu_i32_i64(cc_vr, vr);
499 s->cc_op = op;
502 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
504 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
507 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
509 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
512 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
513 enum cc_op cond)
515 gen_op_update2_cc_i32(s, cond, v1, v2);
518 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
519 enum cc_op cond)
521 gen_op_update2_cc_i64(s, cond, v1, v2);
524 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
526 cmp_32(s, v1, v2, CC_OP_LTGT_32);
529 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
531 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
534 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
536 /* XXX optimize for the constant? put it in s? */
537 TCGv_i32 tmp = tcg_const_i32(v2);
538 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
539 tcg_temp_free_i32(tmp);
542 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
544 TCGv_i32 tmp = tcg_const_i32(v2);
545 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
546 tcg_temp_free_i32(tmp);
549 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
551 cmp_64(s, v1, v2, CC_OP_LTGT_64);
554 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
556 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
559 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
561 TCGv_i64 tmp = tcg_const_i64(v2);
562 cmp_s64(s, v1, tmp);
563 tcg_temp_free_i64(tmp);
566 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
568 TCGv_i64 tmp = tcg_const_i64(v2);
569 cmp_u64(s, v1, tmp);
570 tcg_temp_free_i64(tmp);
573 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
575 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
578 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
580 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
583 static void set_cc_add64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
585 gen_op_update3_cc_i64(s, CC_OP_ADD_64, v1, v2, vr);
588 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
589 TCGv_i64 vr)
591 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
594 static void set_cc_sub64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
596 gen_op_update3_cc_i64(s, CC_OP_SUB_64, v1, v2, vr);
599 static void set_cc_subu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
600 TCGv_i64 vr)
602 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, v1, v2, vr);
605 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
607 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
610 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
612 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
615 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
617 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
620 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
621 TCGv_i32 vr)
623 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
626 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
628 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
631 static void set_cc_subu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
632 TCGv_i32 vr)
634 gen_op_update3_cc_i32(s, CC_OP_SUBU_32, v1, v2, vr);
637 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
639 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
642 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
644 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
647 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
649 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
652 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
654 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
657 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
659 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
662 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
664 tcg_gen_extu_i32_i64(cc_src, v1);
665 tcg_gen_mov_i64(cc_dst, v2);
666 tcg_gen_discard_i64(cc_vr);
667 s->cc_op = CC_OP_LTGT_F32;
670 static void gen_set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
672 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
675 /* CC value is in env->cc_op */
676 static inline void set_cc_static(DisasContext *s)
678 tcg_gen_discard_i64(cc_src);
679 tcg_gen_discard_i64(cc_dst);
680 tcg_gen_discard_i64(cc_vr);
681 s->cc_op = CC_OP_STATIC;
684 static inline void gen_op_set_cc_op(DisasContext *s)
686 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
687 tcg_gen_movi_i32(cc_op, s->cc_op);
691 static inline void gen_update_cc_op(DisasContext *s)
693 gen_op_set_cc_op(s);
696 /* calculates cc into cc_op */
697 static void gen_op_calc_cc(DisasContext *s)
699 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
700 TCGv_i64 dummy = tcg_const_i64(0);
702 switch (s->cc_op) {
703 case CC_OP_CONST0:
704 case CC_OP_CONST1:
705 case CC_OP_CONST2:
706 case CC_OP_CONST3:
707 /* s->cc_op is the cc value */
708 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
709 break;
710 case CC_OP_STATIC:
711 /* env->cc_op already is the cc value */
712 break;
713 case CC_OP_NZ:
714 case CC_OP_ABS_64:
715 case CC_OP_NABS_64:
716 case CC_OP_ABS_32:
717 case CC_OP_NABS_32:
718 case CC_OP_LTGT0_32:
719 case CC_OP_LTGT0_64:
720 case CC_OP_COMP_32:
721 case CC_OP_COMP_64:
722 case CC_OP_NZ_F32:
723 case CC_OP_NZ_F64:
724 /* 1 argument */
725 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, dummy, cc_dst, dummy);
726 break;
727 case CC_OP_ICM:
728 case CC_OP_LTGT_32:
729 case CC_OP_LTGT_64:
730 case CC_OP_LTUGTU_32:
731 case CC_OP_LTUGTU_64:
732 case CC_OP_TM_32:
733 case CC_OP_TM_64:
734 case CC_OP_LTGT_F32:
735 case CC_OP_LTGT_F64:
736 case CC_OP_SLAG:
737 /* 2 arguments */
738 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, dummy);
739 break;
740 case CC_OP_ADD_64:
741 case CC_OP_ADDU_64:
742 case CC_OP_SUB_64:
743 case CC_OP_SUBU_64:
744 case CC_OP_ADD_32:
745 case CC_OP_ADDU_32:
746 case CC_OP_SUB_32:
747 case CC_OP_SUBU_32:
748 /* 3 arguments */
749 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, cc_vr);
750 break;
751 case CC_OP_DYNAMIC:
752 /* unknown operation - assume 3 arguments and cc_op in env */
753 gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
754 break;
755 default:
756 tcg_abort();
759 tcg_temp_free_i32(local_cc_op);
761 /* We now have cc in cc_op as constant */
762 set_cc_static(s);
765 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
767 debug_insn(insn);
769 *r1 = (insn >> 4) & 0xf;
770 *r2 = insn & 0xf;
773 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
774 int *x2, int *b2, int *d2)
776 debug_insn(insn);
778 *r1 = (insn >> 20) & 0xf;
779 *x2 = (insn >> 16) & 0xf;
780 *b2 = (insn >> 12) & 0xf;
781 *d2 = insn & 0xfff;
783 return get_address(s, *x2, *b2, *d2);
786 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
787 int *b2, int *d2)
789 debug_insn(insn);
791 *r1 = (insn >> 20) & 0xf;
792 /* aka m3 */
793 *r3 = (insn >> 16) & 0xf;
794 *b2 = (insn >> 12) & 0xf;
795 *d2 = insn & 0xfff;
798 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
799 int *b1, int *d1)
801 debug_insn(insn);
803 *i2 = (insn >> 16) & 0xff;
804 *b1 = (insn >> 12) & 0xf;
805 *d1 = insn & 0xfff;
807 return get_address(s, 0, *b1, *d1);
810 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
812 TranslationBlock *tb;
814 gen_update_cc_op(s);
816 tb = s->tb;
817 /* NOTE: we handle the case where the TB spans two pages here */
818 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
819 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
820 /* jump to same page: we can use a direct jump */
821 tcg_gen_goto_tb(tb_num);
822 tcg_gen_movi_i64(psw_addr, pc);
823 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
824 } else {
825 /* jump to another page: currently not optimized */
826 tcg_gen_movi_i64(psw_addr, pc);
827 tcg_gen_exit_tb(0);
831 static inline void account_noninline_branch(DisasContext *s, int cc_op)
833 #ifdef DEBUG_INLINE_BRANCHES
834 inline_branch_miss[cc_op]++;
835 #endif
838 static inline void account_inline_branch(DisasContext *s)
840 #ifdef DEBUG_INLINE_BRANCHES
841 inline_branch_hit[s->cc_op]++;
842 #endif
845 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
847 TCGv_i32 tmp, tmp2, r;
848 TCGv_i64 tmp64;
849 int old_cc_op;
851 switch (s->cc_op) {
852 case CC_OP_LTGT0_32:
853 tmp = tcg_temp_new_i32();
854 tcg_gen_trunc_i64_i32(tmp, cc_dst);
855 switch (mask) {
856 case 0x8 | 0x4: /* dst <= 0 */
857 tcg_gen_brcondi_i32(TCG_COND_GT, tmp, 0, skip);
858 break;
859 case 0x8 | 0x2: /* dst >= 0 */
860 tcg_gen_brcondi_i32(TCG_COND_LT, tmp, 0, skip);
861 break;
862 case 0x8: /* dst == 0 */
863 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
864 break;
865 case 0x7: /* dst != 0 */
866 case 0x6: /* dst != 0 */
867 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
868 break;
869 case 0x4: /* dst < 0 */
870 tcg_gen_brcondi_i32(TCG_COND_GE, tmp, 0, skip);
871 break;
872 case 0x2: /* dst > 0 */
873 tcg_gen_brcondi_i32(TCG_COND_LE, tmp, 0, skip);
874 break;
875 default:
876 tcg_temp_free_i32(tmp);
877 goto do_dynamic;
879 account_inline_branch(s);
880 tcg_temp_free_i32(tmp);
881 break;
882 case CC_OP_LTGT0_64:
883 switch (mask) {
884 case 0x8 | 0x4: /* dst <= 0 */
885 tcg_gen_brcondi_i64(TCG_COND_GT, cc_dst, 0, skip);
886 break;
887 case 0x8 | 0x2: /* dst >= 0 */
888 tcg_gen_brcondi_i64(TCG_COND_LT, cc_dst, 0, skip);
889 break;
890 case 0x8: /* dst == 0 */
891 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
892 break;
893 case 0x7: /* dst != 0 */
894 case 0x6: /* dst != 0 */
895 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
896 break;
897 case 0x4: /* dst < 0 */
898 tcg_gen_brcondi_i64(TCG_COND_GE, cc_dst, 0, skip);
899 break;
900 case 0x2: /* dst > 0 */
901 tcg_gen_brcondi_i64(TCG_COND_LE, cc_dst, 0, skip);
902 break;
903 default:
904 goto do_dynamic;
906 account_inline_branch(s);
907 break;
908 case CC_OP_LTGT_32:
909 tmp = tcg_temp_new_i32();
910 tmp2 = tcg_temp_new_i32();
911 tcg_gen_trunc_i64_i32(tmp, cc_src);
912 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
913 switch (mask) {
914 case 0x8 | 0x4: /* src <= dst */
915 tcg_gen_brcond_i32(TCG_COND_GT, tmp, tmp2, skip);
916 break;
917 case 0x8 | 0x2: /* src >= dst */
918 tcg_gen_brcond_i32(TCG_COND_LT, tmp, tmp2, skip);
919 break;
920 case 0x8: /* src == dst */
921 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
922 break;
923 case 0x7: /* src != dst */
924 case 0x6: /* src != dst */
925 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
926 break;
927 case 0x4: /* src < dst */
928 tcg_gen_brcond_i32(TCG_COND_GE, tmp, tmp2, skip);
929 break;
930 case 0x2: /* src > dst */
931 tcg_gen_brcond_i32(TCG_COND_LE, tmp, tmp2, skip);
932 break;
933 default:
934 tcg_temp_free_i32(tmp);
935 tcg_temp_free_i32(tmp2);
936 goto do_dynamic;
938 account_inline_branch(s);
939 tcg_temp_free_i32(tmp);
940 tcg_temp_free_i32(tmp2);
941 break;
942 case CC_OP_LTGT_64:
943 switch (mask) {
944 case 0x8 | 0x4: /* src <= dst */
945 tcg_gen_brcond_i64(TCG_COND_GT, cc_src, cc_dst, skip);
946 break;
947 case 0x8 | 0x2: /* src >= dst */
948 tcg_gen_brcond_i64(TCG_COND_LT, cc_src, cc_dst, skip);
949 break;
950 case 0x8: /* src == dst */
951 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
952 break;
953 case 0x7: /* src != dst */
954 case 0x6: /* src != dst */
955 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
956 break;
957 case 0x4: /* src < dst */
958 tcg_gen_brcond_i64(TCG_COND_GE, cc_src, cc_dst, skip);
959 break;
960 case 0x2: /* src > dst */
961 tcg_gen_brcond_i64(TCG_COND_LE, cc_src, cc_dst, skip);
962 break;
963 default:
964 goto do_dynamic;
966 account_inline_branch(s);
967 break;
968 case CC_OP_LTUGTU_32:
969 tmp = tcg_temp_new_i32();
970 tmp2 = tcg_temp_new_i32();
971 tcg_gen_trunc_i64_i32(tmp, cc_src);
972 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
973 switch (mask) {
974 case 0x8 | 0x4: /* src <= dst */
975 tcg_gen_brcond_i32(TCG_COND_GTU, tmp, tmp2, skip);
976 break;
977 case 0x8 | 0x2: /* src >= dst */
978 tcg_gen_brcond_i32(TCG_COND_LTU, tmp, tmp2, skip);
979 break;
980 case 0x8: /* src == dst */
981 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
982 break;
983 case 0x7: /* src != dst */
984 case 0x6: /* src != dst */
985 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
986 break;
987 case 0x4: /* src < dst */
988 tcg_gen_brcond_i32(TCG_COND_GEU, tmp, tmp2, skip);
989 break;
990 case 0x2: /* src > dst */
991 tcg_gen_brcond_i32(TCG_COND_LEU, tmp, tmp2, skip);
992 break;
993 default:
994 tcg_temp_free_i32(tmp);
995 tcg_temp_free_i32(tmp2);
996 goto do_dynamic;
998 account_inline_branch(s);
999 tcg_temp_free_i32(tmp);
1000 tcg_temp_free_i32(tmp2);
1001 break;
1002 case CC_OP_LTUGTU_64:
1003 switch (mask) {
1004 case 0x8 | 0x4: /* src <= dst */
1005 tcg_gen_brcond_i64(TCG_COND_GTU, cc_src, cc_dst, skip);
1006 break;
1007 case 0x8 | 0x2: /* src >= dst */
1008 tcg_gen_brcond_i64(TCG_COND_LTU, cc_src, cc_dst, skip);
1009 break;
1010 case 0x8: /* src == dst */
1011 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
1012 break;
1013 case 0x7: /* src != dst */
1014 case 0x6: /* src != dst */
1015 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
1016 break;
1017 case 0x4: /* src < dst */
1018 tcg_gen_brcond_i64(TCG_COND_GEU, cc_src, cc_dst, skip);
1019 break;
1020 case 0x2: /* src > dst */
1021 tcg_gen_brcond_i64(TCG_COND_LEU, cc_src, cc_dst, skip);
1022 break;
1023 default:
1024 goto do_dynamic;
1026 account_inline_branch(s);
1027 break;
1028 case CC_OP_NZ:
1029 switch (mask) {
1030 /* dst == 0 || dst != 0 */
1031 case 0x8 | 0x4:
1032 case 0x8 | 0x4 | 0x2:
1033 case 0x8 | 0x4 | 0x2 | 0x1:
1034 case 0x8 | 0x4 | 0x1:
1035 break;
1036 /* dst == 0 */
1037 case 0x8:
1038 case 0x8 | 0x2:
1039 case 0x8 | 0x2 | 0x1:
1040 case 0x8 | 0x1:
1041 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1042 break;
1043 /* dst != 0 */
1044 case 0x4:
1045 case 0x4 | 0x2:
1046 case 0x4 | 0x2 | 0x1:
1047 case 0x4 | 0x1:
1048 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1049 break;
1050 default:
1051 goto do_dynamic;
1053 account_inline_branch(s);
1054 break;
1055 case CC_OP_TM_32:
1056 tmp = tcg_temp_new_i32();
1057 tmp2 = tcg_temp_new_i32();
1059 tcg_gen_trunc_i64_i32(tmp, cc_src);
1060 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
1061 tcg_gen_and_i32(tmp, tmp, tmp2);
1062 switch (mask) {
1063 case 0x8: /* val & mask == 0 */
1064 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1065 break;
1066 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1067 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1068 break;
1069 default:
1070 tcg_temp_free_i32(tmp);
1071 tcg_temp_free_i32(tmp2);
1072 goto do_dynamic;
1074 tcg_temp_free_i32(tmp);
1075 tcg_temp_free_i32(tmp2);
1076 account_inline_branch(s);
1077 break;
1078 case CC_OP_TM_64:
1079 tmp64 = tcg_temp_new_i64();
1081 tcg_gen_and_i64(tmp64, cc_src, cc_dst);
1082 switch (mask) {
1083 case 0x8: /* val & mask == 0 */
1084 tcg_gen_brcondi_i64(TCG_COND_NE, tmp64, 0, skip);
1085 break;
1086 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1087 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp64, 0, skip);
1088 break;
1089 default:
1090 tcg_temp_free_i64(tmp64);
1091 goto do_dynamic;
1093 tcg_temp_free_i64(tmp64);
1094 account_inline_branch(s);
1095 break;
1096 case CC_OP_ICM:
1097 switch (mask) {
1098 case 0x8: /* val == 0 */
1099 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1100 break;
1101 case 0x4 | 0x2 | 0x1: /* val != 0 */
1102 case 0x4 | 0x2: /* val != 0 */
1103 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1104 break;
1105 default:
1106 goto do_dynamic;
1108 account_inline_branch(s);
1109 break;
1110 case CC_OP_STATIC:
1111 old_cc_op = s->cc_op;
1112 goto do_dynamic_nocccalc;
1113 case CC_OP_DYNAMIC:
1114 default:
1115 do_dynamic:
1116 old_cc_op = s->cc_op;
1117 /* calculate cc value */
1118 gen_op_calc_cc(s);
1120 do_dynamic_nocccalc:
1121 /* jump based on cc */
1122 account_noninline_branch(s, old_cc_op);
1124 switch (mask) {
1125 case 0x8 | 0x4 | 0x2 | 0x1:
1126 /* always true */
1127 break;
1128 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1129 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 3, skip);
1130 break;
1131 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1132 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 2, skip);
1133 break;
1134 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1135 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 1, skip);
1136 break;
1137 case 0x8 | 0x2: /* cc == 0 || cc == 2 */
1138 tmp = tcg_temp_new_i32();
1139 tcg_gen_andi_i32(tmp, cc_op, 1);
1140 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1141 tcg_temp_free_i32(tmp);
1142 break;
1143 case 0x8 | 0x4: /* cc < 2 */
1144 tcg_gen_brcondi_i32(TCG_COND_GEU, cc_op, 2, skip);
1145 break;
1146 case 0x8: /* cc == 0 */
1147 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 0, skip);
1148 break;
1149 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1150 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 0, skip);
1151 break;
1152 case 0x4 | 0x1: /* cc == 1 || cc == 3 */
1153 tmp = tcg_temp_new_i32();
1154 tcg_gen_andi_i32(tmp, cc_op, 1);
1155 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1156 tcg_temp_free_i32(tmp);
1157 break;
1158 case 0x4: /* cc == 1 */
1159 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 1, skip);
1160 break;
1161 case 0x2 | 0x1: /* cc > 1 */
1162 tcg_gen_brcondi_i32(TCG_COND_LEU, cc_op, 1, skip);
1163 break;
1164 case 0x2: /* cc == 2 */
1165 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 2, skip);
1166 break;
1167 case 0x1: /* cc == 3 */
1168 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 3, skip);
1169 break;
1170 default: /* cc is masked by something else */
1171 tmp = tcg_const_i32(3);
1172 /* 3 - cc */
1173 tcg_gen_sub_i32(tmp, tmp, cc_op);
1174 tmp2 = tcg_const_i32(1);
1175 /* 1 << (3 - cc) */
1176 tcg_gen_shl_i32(tmp2, tmp2, tmp);
1177 r = tcg_const_i32(mask);
1178 /* mask & (1 << (3 - cc)) */
1179 tcg_gen_and_i32(r, r, tmp2);
1180 tcg_temp_free_i32(tmp);
1181 tcg_temp_free_i32(tmp2);
1183 tcg_gen_brcondi_i32(TCG_COND_EQ, r, 0, skip);
1184 tcg_temp_free_i32(r);
1185 break;
1187 break;
1191 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1192 uint64_t offset)
1194 int skip;
1196 if (mask == 0xf) {
1197 /* unconditional */
1198 tcg_gen_mov_i64(psw_addr, target);
1199 tcg_gen_exit_tb(0);
1200 } else if (mask == 0) {
1201 /* ignore cc and never match */
1202 gen_goto_tb(s, 0, offset + 2);
1203 } else {
1204 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1206 tcg_gen_mov_i64(new_addr, target);
1207 skip = gen_new_label();
1208 gen_jcc(s, mask, skip);
1209 tcg_gen_mov_i64(psw_addr, new_addr);
1210 tcg_temp_free_i64(new_addr);
1211 tcg_gen_exit_tb(0);
1212 gen_set_label(skip);
1213 tcg_temp_free_i64(new_addr);
1214 gen_goto_tb(s, 1, offset + 2);
1218 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1220 int skip;
1222 if (mask == 0xf) {
1223 /* unconditional */
1224 gen_goto_tb(s, 0, s->pc + offset);
1225 } else if (mask == 0) {
1226 /* ignore cc and never match */
1227 gen_goto_tb(s, 0, s->pc + 4);
1228 } else {
1229 skip = gen_new_label();
1230 gen_jcc(s, mask, skip);
1231 gen_goto_tb(s, 0, s->pc + offset);
1232 gen_set_label(skip);
1233 gen_goto_tb(s, 1, s->pc + 4);
1235 s->is_jmp = DISAS_TB_JUMP;
1238 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1240 TCGv_i64 tmp, tmp2;
1241 int i;
1242 int l_memset = gen_new_label();
1243 int l_out = gen_new_label();
1244 TCGv_i64 dest = tcg_temp_local_new_i64();
1245 TCGv_i64 src = tcg_temp_local_new_i64();
1246 TCGv_i32 vl;
1248 /* Find out if we should use the inline version of mvc */
1249 switch (l) {
1250 case 0:
1251 case 1:
1252 case 2:
1253 case 3:
1254 case 4:
1255 case 5:
1256 case 6:
1257 case 7:
1258 case 11:
1259 case 15:
1260 /* use inline */
1261 break;
1262 default:
1263 /* Fall back to helper */
1264 vl = tcg_const_i32(l);
1265 potential_page_fault(s);
1266 gen_helper_mvc(cpu_env, vl, s1, s2);
1267 tcg_temp_free_i32(vl);
1268 return;
1271 tcg_gen_mov_i64(dest, s1);
1272 tcg_gen_mov_i64(src, s2);
1274 if (!(s->tb->flags & FLAG_MASK_64)) {
1275 /* XXX what if we overflow while moving? */
1276 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1277 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1280 tmp = tcg_temp_new_i64();
1281 tcg_gen_addi_i64(tmp, src, 1);
1282 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1283 tcg_temp_free_i64(tmp);
1285 switch (l) {
1286 case 0:
1287 tmp = tcg_temp_new_i64();
1289 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1290 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1292 tcg_temp_free_i64(tmp);
1293 break;
1294 case 1:
1295 tmp = tcg_temp_new_i64();
1297 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1298 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1300 tcg_temp_free_i64(tmp);
1301 break;
1302 case 3:
1303 tmp = tcg_temp_new_i64();
1305 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1306 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1308 tcg_temp_free_i64(tmp);
1309 break;
1310 case 4:
1311 tmp = tcg_temp_new_i64();
1312 tmp2 = tcg_temp_new_i64();
1314 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1315 tcg_gen_addi_i64(src, src, 4);
1316 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1317 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1318 tcg_gen_addi_i64(dest, dest, 4);
1319 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1321 tcg_temp_free_i64(tmp);
1322 tcg_temp_free_i64(tmp2);
1323 break;
1324 case 7:
1325 tmp = tcg_temp_new_i64();
1327 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1328 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1330 tcg_temp_free_i64(tmp);
1331 break;
1332 default:
1333 /* The inline version can become too big for too uneven numbers, only
1334 use it on known good lengths */
1335 tmp = tcg_temp_new_i64();
1336 tmp2 = tcg_const_i64(8);
1337 for (i = 0; (i + 7) <= l; i += 8) {
1338 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1339 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1341 tcg_gen_add_i64(src, src, tmp2);
1342 tcg_gen_add_i64(dest, dest, tmp2);
1345 tcg_temp_free_i64(tmp2);
1346 tmp2 = tcg_const_i64(1);
1348 for (; i <= l; i++) {
1349 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1350 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1352 tcg_gen_add_i64(src, src, tmp2);
1353 tcg_gen_add_i64(dest, dest, tmp2);
1356 tcg_temp_free_i64(tmp2);
1357 tcg_temp_free_i64(tmp);
1358 break;
1361 tcg_gen_br(l_out);
1363 gen_set_label(l_memset);
1364 /* memset case (dest == (src + 1)) */
1366 tmp = tcg_temp_new_i64();
1367 tmp2 = tcg_temp_new_i64();
1368 /* fill tmp with the byte */
1369 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1370 tcg_gen_shli_i64(tmp2, tmp, 8);
1371 tcg_gen_or_i64(tmp, tmp, tmp2);
1372 tcg_gen_shli_i64(tmp2, tmp, 16);
1373 tcg_gen_or_i64(tmp, tmp, tmp2);
1374 tcg_gen_shli_i64(tmp2, tmp, 32);
1375 tcg_gen_or_i64(tmp, tmp, tmp2);
1376 tcg_temp_free_i64(tmp2);
1378 tmp2 = tcg_const_i64(8);
1380 for (i = 0; (i + 7) <= l; i += 8) {
1381 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1382 tcg_gen_addi_i64(dest, dest, 8);
1385 tcg_temp_free_i64(tmp2);
1386 tmp2 = tcg_const_i64(1);
1388 for (; i <= l; i++) {
1389 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1390 tcg_gen_addi_i64(dest, dest, 1);
1393 tcg_temp_free_i64(tmp2);
1394 tcg_temp_free_i64(tmp);
1396 gen_set_label(l_out);
1398 tcg_temp_free(dest);
1399 tcg_temp_free(src);
1402 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1404 TCGv_i64 tmp;
1405 TCGv_i64 tmp2;
1406 TCGv_i32 vl;
1408 /* check for simple 32bit or 64bit match */
1409 switch (l) {
1410 case 0:
1411 tmp = tcg_temp_new_i64();
1412 tmp2 = tcg_temp_new_i64();
1414 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1415 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1416 cmp_u64(s, tmp, tmp2);
1418 tcg_temp_free_i64(tmp);
1419 tcg_temp_free_i64(tmp2);
1420 return;
1421 case 1:
1422 tmp = tcg_temp_new_i64();
1423 tmp2 = tcg_temp_new_i64();
1425 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1426 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1427 cmp_u64(s, tmp, tmp2);
1429 tcg_temp_free_i64(tmp);
1430 tcg_temp_free_i64(tmp2);
1431 return;
1432 case 3:
1433 tmp = tcg_temp_new_i64();
1434 tmp2 = tcg_temp_new_i64();
1436 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1437 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1438 cmp_u64(s, tmp, tmp2);
1440 tcg_temp_free_i64(tmp);
1441 tcg_temp_free_i64(tmp2);
1442 return;
1443 case 7:
1444 tmp = tcg_temp_new_i64();
1445 tmp2 = tcg_temp_new_i64();
1447 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1448 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1449 cmp_u64(s, tmp, tmp2);
1451 tcg_temp_free_i64(tmp);
1452 tcg_temp_free_i64(tmp2);
1453 return;
1456 potential_page_fault(s);
1457 vl = tcg_const_i32(l);
1458 gen_helper_clc(cc_op, cpu_env, vl, s1, s2);
1459 tcg_temp_free_i32(vl);
1460 set_cc_static(s);
1463 static void disas_e3(CPUS390XState *env, DisasContext* s, int op, int r1,
1464 int x2, int b2, int d2)
1466 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1467 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1469 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1470 op, r1, x2, b2, d2);
1471 addr = get_address(s, x2, b2, d2);
1472 switch (op) {
1473 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1474 case 0x4: /* lg r1,d2(x2,b2) */
1475 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1476 if (op == 0x2) {
1477 set_cc_s64(s, regs[r1]);
1479 break;
1480 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1481 tmp2 = tcg_temp_new_i64();
1482 tmp32_1 = tcg_temp_new_i32();
1483 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1484 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1485 store_reg32(r1, tmp32_1);
1486 set_cc_s32(s, tmp32_1);
1487 tcg_temp_free_i64(tmp2);
1488 tcg_temp_free_i32(tmp32_1);
1489 break;
1490 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1491 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1492 tmp2 = tcg_temp_new_i64();
1493 if (op == 0xc) {
1494 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1495 } else {
1496 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1498 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1499 tcg_temp_free_i64(tmp2);
1500 break;
1501 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1502 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1503 tmp2 = tcg_temp_new_i64();
1504 if (op == 0x1d) {
1505 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1506 } else {
1507 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1509 tmp4 = load_reg(r1 + 1);
1510 tmp3 = tcg_temp_new_i64();
1511 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1512 store_reg(r1 + 1, tmp3);
1513 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1514 store_reg(r1, tmp3);
1515 tcg_temp_free_i64(tmp2);
1516 tcg_temp_free_i64(tmp3);
1517 tcg_temp_free_i64(tmp4);
1518 break;
1519 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1520 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1521 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1522 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1523 if (op == 0x1a) {
1524 tmp2 = tcg_temp_new_i64();
1525 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1526 } else if (op == 0x18) {
1527 tmp2 = tcg_temp_new_i64();
1528 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1529 } else {
1530 tmp2 = tcg_temp_new_i64();
1531 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1533 tmp4 = load_reg(r1);
1534 tmp3 = tcg_temp_new_i64();
1535 tcg_gen_add_i64(tmp3, tmp4, tmp2);
1536 store_reg(r1, tmp3);
1537 switch (op) {
1538 case 0x8:
1539 case 0x18:
1540 set_cc_add64(s, tmp4, tmp2, tmp3);
1541 break;
1542 case 0xa:
1543 case 0x1a:
1544 set_cc_addu64(s, tmp4, tmp2, tmp3);
1545 break;
1546 default:
1547 tcg_abort();
1549 tcg_temp_free_i64(tmp2);
1550 tcg_temp_free_i64(tmp3);
1551 tcg_temp_free_i64(tmp4);
1552 break;
1553 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1554 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1555 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1556 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1557 tmp2 = tcg_temp_new_i64();
1558 if (op == 0x19) {
1559 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1560 } else if (op == 0x1b) {
1561 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1562 } else {
1563 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1565 tmp4 = load_reg(r1);
1566 tmp3 = tcg_temp_new_i64();
1567 tcg_gen_sub_i64(tmp3, tmp4, tmp2);
1568 store_reg(r1, tmp3);
1569 switch (op) {
1570 case 0x9:
1571 case 0x19:
1572 set_cc_sub64(s, tmp4, tmp2, tmp3);
1573 break;
1574 case 0xb:
1575 case 0x1b:
1576 set_cc_subu64(s, tmp4, tmp2, tmp3);
1577 break;
1578 default:
1579 tcg_abort();
1581 tcg_temp_free_i64(tmp2);
1582 tcg_temp_free_i64(tmp3);
1583 tcg_temp_free_i64(tmp4);
1584 break;
1585 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1586 tmp2 = tcg_temp_new_i64();
1587 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1588 tcg_gen_bswap64_i64(tmp2, tmp2);
1589 store_reg(r1, tmp2);
1590 tcg_temp_free_i64(tmp2);
1591 break;
1592 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1593 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1594 tmp2 = tcg_temp_new_i64();
1595 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1596 if (op == 0x14) {
1597 tcg_gen_ext32s_i64(tmp2, tmp2);
1599 store_reg(r1, tmp2);
1600 tcg_temp_free_i64(tmp2);
1601 break;
1602 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1603 tmp2 = tcg_temp_new_i64();
1604 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1605 store_reg(r1, tmp2);
1606 tcg_temp_free_i64(tmp2);
1607 break;
1608 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1609 tmp2 = tcg_temp_new_i64();
1610 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1611 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1612 store_reg(r1, tmp2);
1613 tcg_temp_free_i64(tmp2);
1614 break;
1615 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1616 tmp2 = tcg_temp_new_i64();
1617 tmp32_1 = tcg_temp_new_i32();
1618 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1619 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1620 tcg_temp_free_i64(tmp2);
1621 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1622 store_reg32(r1, tmp32_1);
1623 tcg_temp_free_i32(tmp32_1);
1624 break;
1625 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1626 tmp2 = tcg_temp_new_i64();
1627 tmp32_1 = tcg_temp_new_i32();
1628 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1629 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1630 tcg_temp_free_i64(tmp2);
1631 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1632 store_reg16(r1, tmp32_1);
1633 tcg_temp_free_i32(tmp32_1);
1634 break;
1635 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1636 case 0x21: /* CLG R1,D2(X2,B2) */
1637 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1638 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1639 tmp2 = tcg_temp_new_i64();
1640 switch (op) {
1641 case 0x20:
1642 case 0x21:
1643 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1644 break;
1645 case 0x30:
1646 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1647 break;
1648 case 0x31:
1649 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1650 break;
1651 default:
1652 tcg_abort();
1654 switch (op) {
1655 case 0x20:
1656 case 0x30:
1657 cmp_s64(s, regs[r1], tmp2);
1658 break;
1659 case 0x21:
1660 case 0x31:
1661 cmp_u64(s, regs[r1], tmp2);
1662 break;
1663 default:
1664 tcg_abort();
1666 tcg_temp_free_i64(tmp2);
1667 break;
1668 case 0x24: /* stg r1, d2(x2,b2) */
1669 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1670 break;
1671 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1672 tmp32_1 = load_reg32(r1);
1673 tmp2 = tcg_temp_new_i64();
1674 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1675 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1676 tcg_temp_free_i32(tmp32_1);
1677 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1678 tcg_temp_free_i64(tmp2);
1679 break;
1680 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1681 tmp32_1 = load_reg32(r1);
1682 tmp2 = tcg_temp_new_i64();
1683 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1684 tcg_temp_free_i32(tmp32_1);
1685 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1686 tcg_temp_free_i64(tmp2);
1687 break;
1688 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1689 tmp32_1 = load_reg32(r1);
1690 tmp32_2 = tcg_temp_new_i32();
1691 tmp2 = tcg_temp_new_i64();
1692 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1693 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1694 tcg_temp_free_i64(tmp2);
1695 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1696 store_reg32(r1, tmp32_2);
1697 set_cc_nz_u32(s, tmp32_2);
1698 tcg_temp_free_i32(tmp32_1);
1699 tcg_temp_free_i32(tmp32_2);
1700 break;
1701 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1702 tmp3 = tcg_temp_new_i64();
1703 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1704 store_reg32_i64(r1, tmp3);
1705 tcg_temp_free_i64(tmp3);
1706 break;
1707 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1708 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1709 tmp32_1 = load_reg32(r1);
1710 tmp32_2 = tcg_temp_new_i32();
1711 tmp32_3 = tcg_temp_new_i32();
1712 tmp2 = tcg_temp_new_i64();
1713 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1714 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1715 tcg_temp_free_i64(tmp2);
1716 switch (op) {
1717 case 0x5a:
1718 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
1719 break;
1720 case 0x5b:
1721 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
1722 break;
1723 default:
1724 tcg_abort();
1726 store_reg32(r1, tmp32_3);
1727 switch (op) {
1728 case 0x5a:
1729 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
1730 break;
1731 case 0x5b:
1732 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
1733 break;
1734 default:
1735 tcg_abort();
1737 tcg_temp_free_i32(tmp32_1);
1738 tcg_temp_free_i32(tmp32_2);
1739 tcg_temp_free_i32(tmp32_3);
1740 break;
1741 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1742 store_reg(r1, addr);
1743 break;
1744 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1745 tmp32_1 = load_reg32(r1);
1746 tmp2 = tcg_temp_new_i64();
1747 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1748 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1749 tcg_temp_free_i32(tmp32_1);
1750 tcg_temp_free_i64(tmp2);
1751 break;
1752 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1753 tmp3 = tcg_temp_new_i64();
1754 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1755 store_reg8(r1, tmp3);
1756 tcg_temp_free_i64(tmp3);
1757 break;
1758 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1759 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1760 tmp2 = tcg_temp_new_i64();
1761 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1762 switch (op) {
1763 case 0x76:
1764 tcg_gen_ext8s_i64(tmp2, tmp2);
1765 store_reg32_i64(r1, tmp2);
1766 break;
1767 case 0x77:
1768 tcg_gen_ext8s_i64(tmp2, tmp2);
1769 store_reg(r1, tmp2);
1770 break;
1771 default:
1772 tcg_abort();
1774 tcg_temp_free_i64(tmp2);
1775 break;
1776 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1777 tmp2 = tcg_temp_new_i64();
1778 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1779 store_reg32_i64(r1, tmp2);
1780 tcg_temp_free_i64(tmp2);
1781 break;
1782 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1783 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1784 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1785 tmp3 = tcg_temp_new_i64();
1786 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1787 switch (op) {
1788 case 0x80:
1789 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1790 break;
1791 case 0x81:
1792 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1793 break;
1794 case 0x82:
1795 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1796 break;
1797 default:
1798 tcg_abort();
1800 set_cc_nz_u64(s, regs[r1]);
1801 tcg_temp_free_i64(tmp3);
1802 break;
1803 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1804 tmp2 = tcg_temp_new_i64();
1805 tmp32_1 = tcg_const_i32(r1);
1806 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1807 gen_helper_mlg(cpu_env, tmp32_1, tmp2);
1808 tcg_temp_free_i64(tmp2);
1809 tcg_temp_free_i32(tmp32_1);
1810 break;
1811 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1812 tmp2 = tcg_temp_new_i64();
1813 tmp32_1 = tcg_const_i32(r1);
1814 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1815 gen_helper_dlg(cpu_env, tmp32_1, tmp2);
1816 tcg_temp_free_i64(tmp2);
1817 tcg_temp_free_i32(tmp32_1);
1818 break;
1819 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1820 tmp2 = tcg_temp_new_i64();
1821 tmp3 = tcg_temp_new_i64();
1822 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1823 /* XXX possible optimization point */
1824 gen_op_calc_cc(s);
1825 tcg_gen_extu_i32_i64(tmp3, cc_op);
1826 tcg_gen_shri_i64(tmp3, tmp3, 1);
1827 tcg_gen_andi_i64(tmp3, tmp3, 1);
1828 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1829 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1830 store_reg(r1, tmp3);
1831 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1832 tcg_temp_free_i64(tmp2);
1833 tcg_temp_free_i64(tmp3);
1834 break;
1835 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1836 tmp2 = tcg_temp_new_i64();
1837 tmp32_1 = tcg_const_i32(r1);
1838 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1839 /* XXX possible optimization point */
1840 gen_op_calc_cc(s);
1841 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, regs[r1], tmp2);
1842 set_cc_static(s);
1843 tcg_temp_free_i64(tmp2);
1844 tcg_temp_free_i32(tmp32_1);
1845 break;
1846 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1847 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1848 break;
1849 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1850 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1851 break;
1852 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1853 tmp2 = tcg_temp_new_i64();
1854 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1855 store_reg32_i64(r1, tmp2);
1856 tcg_temp_free_i64(tmp2);
1857 break;
1858 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1859 tmp2 = tcg_temp_new_i64();
1860 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1861 store_reg32_i64(r1, tmp2);
1862 tcg_temp_free_i64(tmp2);
1863 break;
1864 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1865 tmp2 = tcg_temp_new_i64();
1866 tmp3 = load_reg((r1 + 1) & 15);
1867 tcg_gen_ext32u_i64(tmp3, tmp3);
1868 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1869 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1870 store_reg32_i64((r1 + 1) & 15, tmp2);
1871 tcg_gen_shri_i64(tmp2, tmp2, 32);
1872 store_reg32_i64(r1, tmp2);
1873 tcg_temp_free_i64(tmp2);
1874 tcg_temp_free_i64(tmp3);
1875 break;
1876 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1877 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1878 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1879 tmp = load_reg(r1);
1880 tmp2 = tcg_temp_new_i64();
1881 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1882 tmp3 = load_reg((r1 + 1) & 15);
1883 tcg_gen_ext32u_i64(tmp2, tmp2);
1884 tcg_gen_ext32u_i64(tmp3, tmp3);
1885 tcg_gen_shli_i64(tmp, tmp, 32);
1886 tcg_gen_or_i64(tmp, tmp, tmp3);
1888 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1889 tcg_gen_div_i64(tmp, tmp, tmp2);
1890 store_reg32_i64((r1 + 1) & 15, tmp);
1891 store_reg32_i64(r1, tmp3);
1892 tcg_temp_free_i64(tmp);
1893 tcg_temp_free_i64(tmp2);
1894 tcg_temp_free_i64(tmp3);
1895 break;
1896 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1897 tmp2 = tcg_temp_new_i64();
1898 tmp32_1 = load_reg32(r1);
1899 tmp32_2 = tcg_temp_new_i32();
1900 tmp32_3 = tcg_temp_new_i32();
1901 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1902 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1903 /* XXX possible optimization point */
1904 gen_op_calc_cc(s);
1905 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1906 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1907 store_reg32(r1, tmp32_3);
1908 tcg_temp_free_i64(tmp2);
1909 tcg_temp_free_i32(tmp32_1);
1910 tcg_temp_free_i32(tmp32_2);
1911 tcg_temp_free_i32(tmp32_3);
1912 break;
1913 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1914 tmp2 = tcg_temp_new_i64();
1915 tmp32_1 = tcg_const_i32(r1);
1916 tmp32_2 = tcg_temp_new_i32();
1917 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1918 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1919 /* XXX possible optimization point */
1920 gen_op_calc_cc(s);
1921 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_1, tmp32_2);
1922 set_cc_static(s);
1923 tcg_temp_free_i64(tmp2);
1924 tcg_temp_free_i32(tmp32_1);
1925 tcg_temp_free_i32(tmp32_2);
1926 break;
1927 default:
1928 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1929 gen_illegal_opcode(env, s, 3);
1930 break;
1932 tcg_temp_free_i64(addr);
1935 #ifndef CONFIG_USER_ONLY
1936 static void disas_e5(CPUS390XState *env, DisasContext* s, uint64_t insn)
1938 TCGv_i64 tmp, tmp2;
1939 int op = (insn >> 32) & 0xff;
1941 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1942 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1944 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1945 switch (op) {
1946 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1947 /* Test Protection */
1948 potential_page_fault(s);
1949 gen_helper_tprot(cc_op, tmp, tmp2);
1950 set_cc_static(s);
1951 break;
1952 default:
1953 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1954 gen_illegal_opcode(env, s, 3);
1955 break;
1958 tcg_temp_free_i64(tmp);
1959 tcg_temp_free_i64(tmp2);
1961 #endif
1963 static void disas_eb(CPUS390XState *env, DisasContext *s, int op, int r1,
1964 int r3, int b2, int d2)
1966 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1967 TCGv_i32 tmp32_1, tmp32_2;
1968 int i, stm_len;
1969 int ilc = 3;
1971 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1972 op, r1, r3, b2, d2);
1973 switch (op) {
1974 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1975 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1976 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1977 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1978 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1979 if (b2) {
1980 tmp = get_address(s, 0, b2, d2);
1981 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1982 } else {
1983 tmp = tcg_const_i64(d2 & 0x3f);
1985 switch (op) {
1986 case 0xc:
1987 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1988 break;
1989 case 0xd:
1990 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1991 break;
1992 case 0xa:
1993 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
1994 break;
1995 case 0xb:
1996 tmp2 = tcg_temp_new_i64();
1997 tmp3 = tcg_temp_new_i64();
1998 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
1999 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
2000 /* override sign bit with source sign */
2001 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
2002 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
2003 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
2004 tcg_temp_free_i64(tmp2);
2005 tcg_temp_free_i64(tmp3);
2006 break;
2007 case 0x1c:
2008 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
2009 break;
2010 default:
2011 tcg_abort();
2012 break;
2014 if (op == 0xa) {
2015 set_cc_s64(s, regs[r1]);
2017 tcg_temp_free_i64(tmp);
2018 break;
2019 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2020 if (b2) {
2021 tmp = get_address(s, 0, b2, d2);
2022 tcg_gen_andi_i64(tmp, tmp, 0x3f);
2023 } else {
2024 tmp = tcg_const_i64(d2 & 0x3f);
2026 tmp32_1 = tcg_temp_new_i32();
2027 tmp32_2 = load_reg32(r3);
2028 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2029 switch (op) {
2030 case 0x1d:
2031 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
2032 break;
2033 default:
2034 tcg_abort();
2035 break;
2037 store_reg32(r1, tmp32_1);
2038 tcg_temp_free_i64(tmp);
2039 tcg_temp_free_i32(tmp32_1);
2040 tcg_temp_free_i32(tmp32_2);
2041 break;
2042 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2043 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2044 stm_len = 8;
2045 goto do_mh;
2046 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2047 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2048 stm_len = 4;
2049 do_mh:
2050 /* Apparently, unrolling lmg/stmg of any size gains performance -
2051 even for very long ones... */
2052 tmp = get_address(s, 0, b2, d2);
2053 tmp3 = tcg_const_i64(stm_len);
2054 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
2055 for (i = r1;; i = (i + 1) % 16) {
2056 switch (op) {
2057 case 0x4:
2058 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
2059 break;
2060 case 0x96:
2061 tmp2 = tcg_temp_new_i64();
2062 #if HOST_LONG_BITS == 32
2063 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2064 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
2065 #else
2066 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2067 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
2068 tcg_gen_ext32u_i64(regs[i], regs[i]);
2069 tcg_gen_or_i64(regs[i], regs[i], tmp2);
2070 #endif
2071 tcg_temp_free_i64(tmp2);
2072 break;
2073 case 0x24:
2074 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
2075 break;
2076 case 0x26:
2077 tmp2 = tcg_temp_new_i64();
2078 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
2079 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2080 tcg_temp_free_i64(tmp2);
2081 break;
2082 default:
2083 tcg_abort();
2085 if (i == r3) {
2086 break;
2088 tcg_gen_add_i64(tmp, tmp, tmp3);
2090 tcg_temp_free_i64(tmp);
2091 tcg_temp_free_i64(tmp3);
2092 tcg_temp_free_i64(tmp4);
2093 break;
2094 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2095 tmp = get_address(s, 0, b2, d2);
2096 tmp32_1 = tcg_const_i32(r1);
2097 tmp32_2 = tcg_const_i32(r3);
2098 potential_page_fault(s);
2099 gen_helper_stcmh(cpu_env, tmp32_1, tmp, tmp32_2);
2100 tcg_temp_free_i64(tmp);
2101 tcg_temp_free_i32(tmp32_1);
2102 tcg_temp_free_i32(tmp32_2);
2103 break;
2104 #ifndef CONFIG_USER_ONLY
2105 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2106 /* Load Control */
2107 check_privileged(env, s, ilc);
2108 tmp = get_address(s, 0, b2, d2);
2109 tmp32_1 = tcg_const_i32(r1);
2110 tmp32_2 = tcg_const_i32(r3);
2111 potential_page_fault(s);
2112 gen_helper_lctlg(cpu_env, tmp32_1, tmp, tmp32_2);
2113 tcg_temp_free_i64(tmp);
2114 tcg_temp_free_i32(tmp32_1);
2115 tcg_temp_free_i32(tmp32_2);
2116 break;
2117 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2118 /* Store Control */
2119 check_privileged(env, s, ilc);
2120 tmp = get_address(s, 0, b2, d2);
2121 tmp32_1 = tcg_const_i32(r1);
2122 tmp32_2 = tcg_const_i32(r3);
2123 potential_page_fault(s);
2124 gen_helper_stctg(cpu_env, tmp32_1, tmp, tmp32_2);
2125 tcg_temp_free_i64(tmp);
2126 tcg_temp_free_i32(tmp32_1);
2127 tcg_temp_free_i32(tmp32_2);
2128 break;
2129 #endif
2130 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2131 tmp = get_address(s, 0, b2, d2);
2132 tmp32_1 = tcg_const_i32(r1);
2133 tmp32_2 = tcg_const_i32(r3);
2134 potential_page_fault(s);
2135 /* XXX rewrite in tcg */
2136 gen_helper_csg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2137 set_cc_static(s);
2138 tcg_temp_free_i64(tmp);
2139 tcg_temp_free_i32(tmp32_1);
2140 tcg_temp_free_i32(tmp32_2);
2141 break;
2142 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2143 tmp = get_address(s, 0, b2, d2);
2144 tmp32_1 = tcg_const_i32(r1);
2145 tmp32_2 = tcg_const_i32(r3);
2146 potential_page_fault(s);
2147 /* XXX rewrite in tcg */
2148 gen_helper_cdsg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2149 set_cc_static(s);
2150 tcg_temp_free_i64(tmp);
2151 tcg_temp_free_i32(tmp32_1);
2152 tcg_temp_free_i32(tmp32_2);
2153 break;
2154 case 0x51: /* TMY D1(B1),I2 [SIY] */
2155 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2156 tmp2 = tcg_const_i64((r1 << 4) | r3);
2157 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
2158 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2159 that incurs less conversions */
2160 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
2161 tcg_temp_free_i64(tmp);
2162 tcg_temp_free_i64(tmp2);
2163 break;
2164 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2165 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2166 tmp2 = tcg_const_i64((r1 << 4) | r3);
2167 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
2168 tcg_temp_free_i64(tmp);
2169 tcg_temp_free_i64(tmp2);
2170 break;
2171 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2172 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
2173 tmp = tcg_temp_new_i64();
2174 tmp32_1 = tcg_temp_new_i32();
2175 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
2176 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2177 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
2178 tcg_temp_free_i64(tmp);
2179 tcg_temp_free_i64(tmp3);
2180 tcg_temp_free_i32(tmp32_1);
2181 break;
2182 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2183 tmp = get_address(s, 0, b2, d2);
2184 tmp32_1 = tcg_const_i32(r1);
2185 tmp32_2 = tcg_const_i32(r3);
2186 potential_page_fault(s);
2187 /* XXX split CC calculation out */
2188 gen_helper_icmh(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
2189 set_cc_static(s);
2190 tcg_temp_free_i64(tmp);
2191 tcg_temp_free_i32(tmp32_1);
2192 tcg_temp_free_i32(tmp32_2);
2193 break;
2194 default:
2195 LOG_DISAS("illegal eb operation 0x%x\n", op);
2196 gen_illegal_opcode(env, s, ilc);
2197 break;
2201 static void disas_ed(CPUS390XState *env, DisasContext *s, int op, int r1,
2202 int x2, int b2, int d2, int r1b)
2204 TCGv_i32 tmp_r1, tmp32;
2205 TCGv_i64 addr, tmp;
2206 addr = get_address(s, x2, b2, d2);
2207 tmp_r1 = tcg_const_i32(r1);
2208 switch (op) {
2209 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
2210 potential_page_fault(s);
2211 gen_helper_ldeb(cpu_env, tmp_r1, addr);
2212 break;
2213 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2214 potential_page_fault(s);
2215 gen_helper_lxdb(cpu_env, tmp_r1, addr);
2216 break;
2217 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2218 tmp = tcg_temp_new_i64();
2219 tmp32 = load_freg32(r1);
2220 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2221 set_cc_cmp_f32_i64(s, tmp32, tmp);
2222 tcg_temp_free_i64(tmp);
2223 tcg_temp_free_i32(tmp32);
2224 break;
2225 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2226 tmp = tcg_temp_new_i64();
2227 tmp32 = tcg_temp_new_i32();
2228 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2229 tcg_gen_trunc_i64_i32(tmp32, tmp);
2230 gen_helper_aeb(cpu_env, tmp_r1, tmp32);
2231 tcg_temp_free_i64(tmp);
2232 tcg_temp_free_i32(tmp32);
2234 tmp32 = load_freg32(r1);
2235 gen_set_cc_nz_f32(s, tmp32);
2236 tcg_temp_free_i32(tmp32);
2237 break;
2238 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2239 tmp = tcg_temp_new_i64();
2240 tmp32 = tcg_temp_new_i32();
2241 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2242 tcg_gen_trunc_i64_i32(tmp32, tmp);
2243 gen_helper_seb(cpu_env, tmp_r1, tmp32);
2244 tcg_temp_free_i64(tmp);
2245 tcg_temp_free_i32(tmp32);
2247 tmp32 = load_freg32(r1);
2248 gen_set_cc_nz_f32(s, tmp32);
2249 tcg_temp_free_i32(tmp32);
2250 break;
2251 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2252 tmp = tcg_temp_new_i64();
2253 tmp32 = tcg_temp_new_i32();
2254 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2255 tcg_gen_trunc_i64_i32(tmp32, tmp);
2256 gen_helper_deb(cpu_env, tmp_r1, tmp32);
2257 tcg_temp_free_i64(tmp);
2258 tcg_temp_free_i32(tmp32);
2259 break;
2260 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2261 potential_page_fault(s);
2262 gen_helper_tceb(cc_op, cpu_env, tmp_r1, addr);
2263 set_cc_static(s);
2264 break;
2265 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2266 potential_page_fault(s);
2267 gen_helper_tcdb(cc_op, cpu_env, tmp_r1, addr);
2268 set_cc_static(s);
2269 break;
2270 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2271 potential_page_fault(s);
2272 gen_helper_tcxb(cc_op, cpu_env, tmp_r1, addr);
2273 set_cc_static(s);
2274 break;
2275 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2276 tmp = tcg_temp_new_i64();
2277 tmp32 = tcg_temp_new_i32();
2278 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2279 tcg_gen_trunc_i64_i32(tmp32, tmp);
2280 gen_helper_meeb(cpu_env, tmp_r1, tmp32);
2281 tcg_temp_free_i64(tmp);
2282 tcg_temp_free_i32(tmp32);
2283 break;
2284 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2285 potential_page_fault(s);
2286 gen_helper_cdb(cc_op, cpu_env, tmp_r1, addr);
2287 set_cc_static(s);
2288 break;
2289 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2290 potential_page_fault(s);
2291 gen_helper_adb(cc_op, cpu_env, tmp_r1, addr);
2292 set_cc_static(s);
2293 break;
2294 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2295 potential_page_fault(s);
2296 gen_helper_sdb(cc_op, cpu_env, tmp_r1, addr);
2297 set_cc_static(s);
2298 break;
2299 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2300 potential_page_fault(s);
2301 gen_helper_mdb(cpu_env, tmp_r1, addr);
2302 break;
2303 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2304 potential_page_fault(s);
2305 gen_helper_ddb(cpu_env, tmp_r1, addr);
2306 break;
2307 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2308 /* for RXF insns, r1 is R3 and r1b is R1 */
2309 tmp32 = tcg_const_i32(r1b);
2310 potential_page_fault(s);
2311 gen_helper_madb(cpu_env, tmp32, addr, tmp_r1);
2312 tcg_temp_free_i32(tmp32);
2313 break;
2314 default:
2315 LOG_DISAS("illegal ed operation 0x%x\n", op);
2316 gen_illegal_opcode(env, s, 3);
2317 return;
2319 tcg_temp_free_i32(tmp_r1);
2320 tcg_temp_free_i64(addr);
2323 static void disas_a5(CPUS390XState *env, DisasContext *s, int op, int r1,
2324 int i2)
2326 TCGv_i64 tmp, tmp2;
2327 TCGv_i32 tmp32;
2328 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2329 switch (op) {
2330 case 0x0: /* IIHH R1,I2 [RI] */
2331 tmp = tcg_const_i64(i2);
2332 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2333 tcg_temp_free_i64(tmp);
2334 break;
2335 case 0x1: /* IIHL R1,I2 [RI] */
2336 tmp = tcg_const_i64(i2);
2337 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2338 tcg_temp_free_i64(tmp);
2339 break;
2340 case 0x2: /* IILH R1,I2 [RI] */
2341 tmp = tcg_const_i64(i2);
2342 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2343 tcg_temp_free_i64(tmp);
2344 break;
2345 case 0x3: /* IILL R1,I2 [RI] */
2346 tmp = tcg_const_i64(i2);
2347 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2348 tcg_temp_free_i64(tmp);
2349 break;
2350 case 0x4: /* NIHH R1,I2 [RI] */
2351 case 0x8: /* OIHH R1,I2 [RI] */
2352 tmp = load_reg(r1);
2353 tmp32 = tcg_temp_new_i32();
2354 switch (op) {
2355 case 0x4:
2356 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2357 | 0x0000ffffffffffffULL);
2358 tcg_gen_and_i64(tmp, tmp, tmp2);
2359 break;
2360 case 0x8:
2361 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2362 tcg_gen_or_i64(tmp, tmp, tmp2);
2363 break;
2364 default:
2365 tcg_abort();
2367 store_reg(r1, tmp);
2368 tcg_gen_shri_i64(tmp2, tmp, 48);
2369 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2370 set_cc_nz_u32(s, tmp32);
2371 tcg_temp_free_i64(tmp2);
2372 tcg_temp_free_i32(tmp32);
2373 tcg_temp_free_i64(tmp);
2374 break;
2375 case 0x5: /* NIHL R1,I2 [RI] */
2376 case 0x9: /* OIHL R1,I2 [RI] */
2377 tmp = load_reg(r1);
2378 tmp32 = tcg_temp_new_i32();
2379 switch (op) {
2380 case 0x5:
2381 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2382 | 0xffff0000ffffffffULL);
2383 tcg_gen_and_i64(tmp, tmp, tmp2);
2384 break;
2385 case 0x9:
2386 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2387 tcg_gen_or_i64(tmp, tmp, tmp2);
2388 break;
2389 default:
2390 tcg_abort();
2392 store_reg(r1, tmp);
2393 tcg_gen_shri_i64(tmp2, tmp, 32);
2394 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2395 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2396 set_cc_nz_u32(s, tmp32);
2397 tcg_temp_free_i64(tmp2);
2398 tcg_temp_free_i32(tmp32);
2399 tcg_temp_free_i64(tmp);
2400 break;
2401 case 0x6: /* NILH R1,I2 [RI] */
2402 case 0xa: /* OILH R1,I2 [RI] */
2403 tmp = load_reg(r1);
2404 tmp32 = tcg_temp_new_i32();
2405 switch (op) {
2406 case 0x6:
2407 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2408 | 0xffffffff0000ffffULL);
2409 tcg_gen_and_i64(tmp, tmp, tmp2);
2410 break;
2411 case 0xa:
2412 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2413 tcg_gen_or_i64(tmp, tmp, tmp2);
2414 break;
2415 default:
2416 tcg_abort();
2418 store_reg(r1, tmp);
2419 tcg_gen_shri_i64(tmp, tmp, 16);
2420 tcg_gen_trunc_i64_i32(tmp32, tmp);
2421 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2422 set_cc_nz_u32(s, tmp32);
2423 tcg_temp_free_i64(tmp2);
2424 tcg_temp_free_i32(tmp32);
2425 tcg_temp_free_i64(tmp);
2426 break;
2427 case 0x7: /* NILL R1,I2 [RI] */
2428 case 0xb: /* OILL R1,I2 [RI] */
2429 tmp = load_reg(r1);
2430 tmp32 = tcg_temp_new_i32();
2431 switch (op) {
2432 case 0x7:
2433 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2434 tcg_gen_and_i64(tmp, tmp, tmp2);
2435 break;
2436 case 0xb:
2437 tmp2 = tcg_const_i64(i2);
2438 tcg_gen_or_i64(tmp, tmp, tmp2);
2439 break;
2440 default:
2441 tcg_abort();
2443 store_reg(r1, tmp);
2444 tcg_gen_trunc_i64_i32(tmp32, tmp);
2445 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2446 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2447 tcg_temp_free_i64(tmp2);
2448 tcg_temp_free_i32(tmp32);
2449 tcg_temp_free_i64(tmp);
2450 break;
2451 case 0xc: /* LLIHH R1,I2 [RI] */
2452 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2453 store_reg(r1, tmp);
2454 tcg_temp_free_i64(tmp);
2455 break;
2456 case 0xd: /* LLIHL R1,I2 [RI] */
2457 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2458 store_reg(r1, tmp);
2459 tcg_temp_free_i64(tmp);
2460 break;
2461 case 0xe: /* LLILH R1,I2 [RI] */
2462 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2463 store_reg(r1, tmp);
2464 tcg_temp_free_i64(tmp);
2465 break;
2466 case 0xf: /* LLILL R1,I2 [RI] */
2467 tmp = tcg_const_i64(i2);
2468 store_reg(r1, tmp);
2469 tcg_temp_free_i64(tmp);
2470 break;
2471 default:
2472 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2473 gen_illegal_opcode(env, s, 2);
2474 return;
2478 static void disas_a7(CPUS390XState *env, DisasContext *s, int op, int r1,
2479 int i2)
2481 TCGv_i64 tmp, tmp2;
2482 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2483 int l1;
2485 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2486 switch (op) {
2487 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2488 case 0x1: /* TMLL or TML R1,I2 [RI] */
2489 case 0x2: /* TMHH R1,I2 [RI] */
2490 case 0x3: /* TMHL R1,I2 [RI] */
2491 tmp = load_reg(r1);
2492 tmp2 = tcg_const_i64((uint16_t)i2);
2493 switch (op) {
2494 case 0x0:
2495 tcg_gen_shri_i64(tmp, tmp, 16);
2496 break;
2497 case 0x1:
2498 break;
2499 case 0x2:
2500 tcg_gen_shri_i64(tmp, tmp, 48);
2501 break;
2502 case 0x3:
2503 tcg_gen_shri_i64(tmp, tmp, 32);
2504 break;
2506 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2507 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2508 tcg_temp_free_i64(tmp);
2509 tcg_temp_free_i64(tmp2);
2510 break;
2511 case 0x4: /* brc m1, i2 */
2512 gen_brc(r1, s, i2 * 2LL);
2513 return;
2514 case 0x5: /* BRAS R1,I2 [RI] */
2515 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2516 store_reg(r1, tmp);
2517 tcg_temp_free_i64(tmp);
2518 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2519 s->is_jmp = DISAS_TB_JUMP;
2520 break;
2521 case 0x6: /* BRCT R1,I2 [RI] */
2522 tmp32_1 = load_reg32(r1);
2523 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2524 store_reg32(r1, tmp32_1);
2525 gen_update_cc_op(s);
2526 l1 = gen_new_label();
2527 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2528 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2529 gen_set_label(l1);
2530 gen_goto_tb(s, 1, s->pc + 4);
2531 s->is_jmp = DISAS_TB_JUMP;
2532 tcg_temp_free_i32(tmp32_1);
2533 break;
2534 case 0x7: /* BRCTG R1,I2 [RI] */
2535 tmp = load_reg(r1);
2536 tcg_gen_subi_i64(tmp, tmp, 1);
2537 store_reg(r1, tmp);
2538 gen_update_cc_op(s);
2539 l1 = gen_new_label();
2540 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2541 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2542 gen_set_label(l1);
2543 gen_goto_tb(s, 1, s->pc + 4);
2544 s->is_jmp = DISAS_TB_JUMP;
2545 tcg_temp_free_i64(tmp);
2546 break;
2547 case 0x8: /* lhi r1, i2 */
2548 tmp32_1 = tcg_const_i32(i2);
2549 store_reg32(r1, tmp32_1);
2550 tcg_temp_free_i32(tmp32_1);
2551 break;
2552 case 0x9: /* lghi r1, i2 */
2553 tmp = tcg_const_i64(i2);
2554 store_reg(r1, tmp);
2555 tcg_temp_free_i64(tmp);
2556 break;
2557 case 0xa: /* AHI R1,I2 [RI] */
2558 tmp32_1 = load_reg32(r1);
2559 tmp32_2 = tcg_temp_new_i32();
2560 tmp32_3 = tcg_const_i32(i2);
2562 if (i2 < 0) {
2563 tcg_gen_subi_i32(tmp32_2, tmp32_1, -i2);
2564 } else {
2565 tcg_gen_add_i32(tmp32_2, tmp32_1, tmp32_3);
2568 store_reg32(r1, tmp32_2);
2569 set_cc_add32(s, tmp32_1, tmp32_3, tmp32_2);
2570 tcg_temp_free_i32(tmp32_1);
2571 tcg_temp_free_i32(tmp32_2);
2572 tcg_temp_free_i32(tmp32_3);
2573 break;
2574 case 0xb: /* aghi r1, i2 */
2575 tmp = load_reg(r1);
2576 tmp2 = tcg_const_i64(i2);
2578 if (i2 < 0) {
2579 tcg_gen_subi_i64(regs[r1], tmp, -i2);
2580 } else {
2581 tcg_gen_add_i64(regs[r1], tmp, tmp2);
2583 set_cc_add64(s, tmp, tmp2, regs[r1]);
2584 tcg_temp_free_i64(tmp);
2585 tcg_temp_free_i64(tmp2);
2586 break;
2587 case 0xc: /* MHI R1,I2 [RI] */
2588 tmp32_1 = load_reg32(r1);
2589 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2590 store_reg32(r1, tmp32_1);
2591 tcg_temp_free_i32(tmp32_1);
2592 break;
2593 case 0xd: /* MGHI R1,I2 [RI] */
2594 tmp = load_reg(r1);
2595 tcg_gen_muli_i64(tmp, tmp, i2);
2596 store_reg(r1, tmp);
2597 tcg_temp_free_i64(tmp);
2598 break;
2599 case 0xe: /* CHI R1,I2 [RI] */
2600 tmp32_1 = load_reg32(r1);
2601 cmp_s32c(s, tmp32_1, i2);
2602 tcg_temp_free_i32(tmp32_1);
2603 break;
2604 case 0xf: /* CGHI R1,I2 [RI] */
2605 tmp = load_reg(r1);
2606 cmp_s64c(s, tmp, i2);
2607 tcg_temp_free_i64(tmp);
2608 break;
2609 default:
2610 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2611 gen_illegal_opcode(env, s, 2);
2612 return;
2616 static void disas_b2(CPUS390XState *env, DisasContext *s, int op,
2617 uint32_t insn)
2619 TCGv_i64 tmp, tmp2, tmp3;
2620 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2621 int r1, r2;
2622 int ilc = 2;
2623 #ifndef CONFIG_USER_ONLY
2624 int r3, d2, b2;
2625 #endif
2627 r1 = (insn >> 4) & 0xf;
2628 r2 = insn & 0xf;
2630 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2632 switch (op) {
2633 case 0x22: /* IPM R1 [RRE] */
2634 tmp32_1 = tcg_const_i32(r1);
2635 gen_op_calc_cc(s);
2636 gen_helper_ipm(cpu_env, cc_op, tmp32_1);
2637 tcg_temp_free_i32(tmp32_1);
2638 break;
2639 case 0x41: /* CKSM R1,R2 [RRE] */
2640 tmp32_1 = tcg_const_i32(r1);
2641 tmp32_2 = tcg_const_i32(r2);
2642 potential_page_fault(s);
2643 gen_helper_cksm(cpu_env, tmp32_1, tmp32_2);
2644 tcg_temp_free_i32(tmp32_1);
2645 tcg_temp_free_i32(tmp32_2);
2646 gen_op_movi_cc(s, 0);
2647 break;
2648 case 0x4e: /* SAR R1,R2 [RRE] */
2649 tmp32_1 = load_reg32(r2);
2650 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r1]));
2651 tcg_temp_free_i32(tmp32_1);
2652 break;
2653 case 0x4f: /* EAR R1,R2 [RRE] */
2654 tmp32_1 = tcg_temp_new_i32();
2655 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r2]));
2656 store_reg32(r1, tmp32_1);
2657 tcg_temp_free_i32(tmp32_1);
2658 break;
2659 case 0x52: /* MSR R1,R2 [RRE] */
2660 tmp32_1 = load_reg32(r1);
2661 tmp32_2 = load_reg32(r2);
2662 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2663 store_reg32(r1, tmp32_1);
2664 tcg_temp_free_i32(tmp32_1);
2665 tcg_temp_free_i32(tmp32_2);
2666 break;
2667 case 0x54: /* MVPG R1,R2 [RRE] */
2668 tmp = load_reg(0);
2669 tmp2 = load_reg(r1);
2670 tmp3 = load_reg(r2);
2671 potential_page_fault(s);
2672 gen_helper_mvpg(cpu_env, tmp, tmp2, tmp3);
2673 tcg_temp_free_i64(tmp);
2674 tcg_temp_free_i64(tmp2);
2675 tcg_temp_free_i64(tmp3);
2676 /* XXX check CCO bit and set CC accordingly */
2677 gen_op_movi_cc(s, 0);
2678 break;
2679 case 0x55: /* MVST R1,R2 [RRE] */
2680 tmp32_1 = load_reg32(0);
2681 tmp32_2 = tcg_const_i32(r1);
2682 tmp32_3 = tcg_const_i32(r2);
2683 potential_page_fault(s);
2684 gen_helper_mvst(cpu_env, tmp32_1, tmp32_2, tmp32_3);
2685 tcg_temp_free_i32(tmp32_1);
2686 tcg_temp_free_i32(tmp32_2);
2687 tcg_temp_free_i32(tmp32_3);
2688 gen_op_movi_cc(s, 1);
2689 break;
2690 case 0x5d: /* CLST R1,R2 [RRE] */
2691 tmp32_1 = load_reg32(0);
2692 tmp32_2 = tcg_const_i32(r1);
2693 tmp32_3 = tcg_const_i32(r2);
2694 potential_page_fault(s);
2695 gen_helper_clst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2696 set_cc_static(s);
2697 tcg_temp_free_i32(tmp32_1);
2698 tcg_temp_free_i32(tmp32_2);
2699 tcg_temp_free_i32(tmp32_3);
2700 break;
2701 case 0x5e: /* SRST R1,R2 [RRE] */
2702 tmp32_1 = load_reg32(0);
2703 tmp32_2 = tcg_const_i32(r1);
2704 tmp32_3 = tcg_const_i32(r2);
2705 potential_page_fault(s);
2706 gen_helper_srst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2707 set_cc_static(s);
2708 tcg_temp_free_i32(tmp32_1);
2709 tcg_temp_free_i32(tmp32_2);
2710 tcg_temp_free_i32(tmp32_3);
2711 break;
2713 #ifndef CONFIG_USER_ONLY
2714 case 0x02: /* STIDP D2(B2) [S] */
2715 /* Store CPU ID */
2716 check_privileged(env, s, ilc);
2717 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2718 tmp = get_address(s, 0, b2, d2);
2719 potential_page_fault(s);
2720 gen_helper_stidp(cpu_env, tmp);
2721 tcg_temp_free_i64(tmp);
2722 break;
2723 case 0x04: /* SCK D2(B2) [S] */
2724 /* Set Clock */
2725 check_privileged(env, s, ilc);
2726 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2727 tmp = get_address(s, 0, b2, d2);
2728 potential_page_fault(s);
2729 gen_helper_sck(cc_op, tmp);
2730 set_cc_static(s);
2731 tcg_temp_free_i64(tmp);
2732 break;
2733 case 0x05: /* STCK D2(B2) [S] */
2734 /* Store Clock */
2735 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2736 tmp = get_address(s, 0, b2, d2);
2737 potential_page_fault(s);
2738 gen_helper_stck(cc_op, cpu_env, tmp);
2739 set_cc_static(s);
2740 tcg_temp_free_i64(tmp);
2741 break;
2742 case 0x06: /* SCKC D2(B2) [S] */
2743 /* Set Clock Comparator */
2744 check_privileged(env, s, ilc);
2745 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2746 tmp = get_address(s, 0, b2, d2);
2747 potential_page_fault(s);
2748 gen_helper_sckc(cpu_env, tmp);
2749 tcg_temp_free_i64(tmp);
2750 break;
2751 case 0x07: /* STCKC D2(B2) [S] */
2752 /* Store Clock Comparator */
2753 check_privileged(env, s, ilc);
2754 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2755 tmp = get_address(s, 0, b2, d2);
2756 potential_page_fault(s);
2757 gen_helper_stckc(cpu_env, tmp);
2758 tcg_temp_free_i64(tmp);
2759 break;
2760 case 0x08: /* SPT D2(B2) [S] */
2761 /* Set CPU Timer */
2762 check_privileged(env, s, ilc);
2763 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2764 tmp = get_address(s, 0, b2, d2);
2765 potential_page_fault(s);
2766 gen_helper_spt(cpu_env, tmp);
2767 tcg_temp_free_i64(tmp);
2768 break;
2769 case 0x09: /* STPT D2(B2) [S] */
2770 /* Store CPU Timer */
2771 check_privileged(env, s, ilc);
2772 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2773 tmp = get_address(s, 0, b2, d2);
2774 potential_page_fault(s);
2775 gen_helper_stpt(cpu_env, tmp);
2776 tcg_temp_free_i64(tmp);
2777 break;
2778 case 0x0a: /* SPKA D2(B2) [S] */
2779 /* Set PSW Key from Address */
2780 check_privileged(env, s, ilc);
2781 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2782 tmp = get_address(s, 0, b2, d2);
2783 tmp2 = tcg_temp_new_i64();
2784 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2785 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2786 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2787 tcg_temp_free_i64(tmp2);
2788 tcg_temp_free_i64(tmp);
2789 break;
2790 case 0x0d: /* PTLB [S] */
2791 /* Purge TLB */
2792 check_privileged(env, s, ilc);
2793 gen_helper_ptlb(cpu_env);
2794 break;
2795 case 0x10: /* SPX D2(B2) [S] */
2796 /* Set Prefix Register */
2797 check_privileged(env, s, ilc);
2798 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2799 tmp = get_address(s, 0, b2, d2);
2800 potential_page_fault(s);
2801 gen_helper_spx(cpu_env, tmp);
2802 tcg_temp_free_i64(tmp);
2803 break;
2804 case 0x11: /* STPX D2(B2) [S] */
2805 /* Store Prefix */
2806 check_privileged(env, s, ilc);
2807 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2808 tmp = get_address(s, 0, b2, d2);
2809 tmp2 = tcg_temp_new_i64();
2810 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUS390XState, psa));
2811 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2812 tcg_temp_free_i64(tmp);
2813 tcg_temp_free_i64(tmp2);
2814 break;
2815 case 0x12: /* STAP D2(B2) [S] */
2816 /* Store CPU Address */
2817 check_privileged(env, s, ilc);
2818 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2819 tmp = get_address(s, 0, b2, d2);
2820 tmp2 = tcg_temp_new_i64();
2821 tmp32_1 = tcg_temp_new_i32();
2822 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, cpu_num));
2823 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2824 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2825 tcg_temp_free_i64(tmp);
2826 tcg_temp_free_i64(tmp2);
2827 tcg_temp_free_i32(tmp32_1);
2828 break;
2829 case 0x21: /* IPTE R1,R2 [RRE] */
2830 /* Invalidate PTE */
2831 check_privileged(env, s, ilc);
2832 r1 = (insn >> 4) & 0xf;
2833 r2 = insn & 0xf;
2834 tmp = load_reg(r1);
2835 tmp2 = load_reg(r2);
2836 gen_helper_ipte(cpu_env, tmp, tmp2);
2837 tcg_temp_free_i64(tmp);
2838 tcg_temp_free_i64(tmp2);
2839 break;
2840 case 0x29: /* ISKE R1,R2 [RRE] */
2841 /* Insert Storage Key Extended */
2842 check_privileged(env, s, ilc);
2843 r1 = (insn >> 4) & 0xf;
2844 r2 = insn & 0xf;
2845 tmp = load_reg(r2);
2846 tmp2 = tcg_temp_new_i64();
2847 gen_helper_iske(tmp2, cpu_env, tmp);
2848 store_reg(r1, tmp2);
2849 tcg_temp_free_i64(tmp);
2850 tcg_temp_free_i64(tmp2);
2851 break;
2852 case 0x2a: /* RRBE R1,R2 [RRE] */
2853 /* Set Storage Key Extended */
2854 check_privileged(env, s, ilc);
2855 r1 = (insn >> 4) & 0xf;
2856 r2 = insn & 0xf;
2857 tmp32_1 = load_reg32(r1);
2858 tmp = load_reg(r2);
2859 gen_helper_rrbe(cc_op, cpu_env, tmp32_1, tmp);
2860 set_cc_static(s);
2861 tcg_temp_free_i32(tmp32_1);
2862 tcg_temp_free_i64(tmp);
2863 break;
2864 case 0x2b: /* SSKE R1,R2 [RRE] */
2865 /* Set Storage Key Extended */
2866 check_privileged(env, s, ilc);
2867 r1 = (insn >> 4) & 0xf;
2868 r2 = insn & 0xf;
2869 tmp32_1 = load_reg32(r1);
2870 tmp = load_reg(r2);
2871 gen_helper_sske(cpu_env, tmp32_1, tmp);
2872 tcg_temp_free_i32(tmp32_1);
2873 tcg_temp_free_i64(tmp);
2874 break;
2875 case 0x34: /* STCH ? */
2876 /* Store Subchannel */
2877 check_privileged(env, s, ilc);
2878 gen_op_movi_cc(s, 3);
2879 break;
2880 case 0x46: /* STURA R1,R2 [RRE] */
2881 /* Store Using Real Address */
2882 check_privileged(env, s, ilc);
2883 r1 = (insn >> 4) & 0xf;
2884 r2 = insn & 0xf;
2885 tmp32_1 = load_reg32(r1);
2886 tmp = load_reg(r2);
2887 potential_page_fault(s);
2888 gen_helper_stura(cpu_env, tmp, tmp32_1);
2889 tcg_temp_free_i32(tmp32_1);
2890 tcg_temp_free_i64(tmp);
2891 break;
2892 case 0x50: /* CSP R1,R2 [RRE] */
2893 /* Compare And Swap And Purge */
2894 check_privileged(env, s, ilc);
2895 r1 = (insn >> 4) & 0xf;
2896 r2 = insn & 0xf;
2897 tmp32_1 = tcg_const_i32(r1);
2898 tmp32_2 = tcg_const_i32(r2);
2899 gen_helper_csp(cc_op, cpu_env, tmp32_1, tmp32_2);
2900 set_cc_static(s);
2901 tcg_temp_free_i32(tmp32_1);
2902 tcg_temp_free_i32(tmp32_2);
2903 break;
2904 case 0x5f: /* CHSC ? */
2905 /* Channel Subsystem Call */
2906 check_privileged(env, s, ilc);
2907 gen_op_movi_cc(s, 3);
2908 break;
2909 case 0x78: /* STCKE D2(B2) [S] */
2910 /* Store Clock Extended */
2911 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2912 tmp = get_address(s, 0, b2, d2);
2913 potential_page_fault(s);
2914 gen_helper_stcke(cc_op, cpu_env, tmp);
2915 set_cc_static(s);
2916 tcg_temp_free_i64(tmp);
2917 break;
2918 case 0x79: /* SACF D2(B2) [S] */
2919 /* Store Clock Extended */
2920 check_privileged(env, s, ilc);
2921 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2922 tmp = get_address(s, 0, b2, d2);
2923 potential_page_fault(s);
2924 gen_helper_sacf(cpu_env, tmp);
2925 tcg_temp_free_i64(tmp);
2926 /* addressing mode has changed, so end the block */
2927 s->pc += ilc * 2;
2928 update_psw_addr(s);
2929 s->is_jmp = DISAS_EXCP;
2930 break;
2931 case 0x7d: /* STSI D2,(B2) [S] */
2932 check_privileged(env, s, ilc);
2933 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2934 tmp = get_address(s, 0, b2, d2);
2935 tmp32_1 = load_reg32(0);
2936 tmp32_2 = load_reg32(1);
2937 potential_page_fault(s);
2938 gen_helper_stsi(cc_op, cpu_env, tmp, tmp32_1, tmp32_2);
2939 set_cc_static(s);
2940 tcg_temp_free_i64(tmp);
2941 tcg_temp_free_i32(tmp32_1);
2942 tcg_temp_free_i32(tmp32_2);
2943 break;
2944 case 0x9d: /* LFPC D2(B2) [S] */
2945 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2946 tmp = get_address(s, 0, b2, d2);
2947 tmp2 = tcg_temp_new_i64();
2948 tmp32_1 = tcg_temp_new_i32();
2949 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2950 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2951 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2952 tcg_temp_free_i64(tmp);
2953 tcg_temp_free_i64(tmp2);
2954 tcg_temp_free_i32(tmp32_1);
2955 break;
2956 case 0xb1: /* STFL D2(B2) [S] */
2957 /* Store Facility List (CPU features) at 200 */
2958 check_privileged(env, s, ilc);
2959 tmp2 = tcg_const_i64(0xc0000000);
2960 tmp = tcg_const_i64(200);
2961 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2962 tcg_temp_free_i64(tmp2);
2963 tcg_temp_free_i64(tmp);
2964 break;
2965 case 0xb2: /* LPSWE D2(B2) [S] */
2966 /* Load PSW Extended */
2967 check_privileged(env, s, ilc);
2968 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2969 tmp = get_address(s, 0, b2, d2);
2970 tmp2 = tcg_temp_new_i64();
2971 tmp3 = tcg_temp_new_i64();
2972 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2973 tcg_gen_addi_i64(tmp, tmp, 8);
2974 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2975 gen_helper_load_psw(cpu_env, tmp2, tmp3);
2976 /* we need to keep cc_op intact */
2977 s->is_jmp = DISAS_JUMP;
2978 tcg_temp_free_i64(tmp);
2979 tcg_temp_free_i64(tmp2);
2980 tcg_temp_free_i64(tmp3);
2981 break;
2982 case 0x20: /* SERVC R1,R2 [RRE] */
2983 /* SCLP Service call (PV hypercall) */
2984 check_privileged(env, s, ilc);
2985 potential_page_fault(s);
2986 tmp32_1 = load_reg32(r2);
2987 tmp = load_reg(r1);
2988 gen_helper_servc(cc_op, cpu_env, tmp32_1, tmp);
2989 set_cc_static(s);
2990 tcg_temp_free_i32(tmp32_1);
2991 tcg_temp_free_i64(tmp);
2992 break;
2993 #endif
2994 default:
2995 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2996 gen_illegal_opcode(env, s, ilc);
2997 break;
3001 static void disas_b3(CPUS390XState *env, DisasContext *s, int op, int m3,
3002 int r1, int r2)
3004 TCGv_i64 tmp;
3005 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3006 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
3007 #define FP_HELPER(i) \
3008 tmp32_1 = tcg_const_i32(r1); \
3009 tmp32_2 = tcg_const_i32(r2); \
3010 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
3011 tcg_temp_free_i32(tmp32_1); \
3012 tcg_temp_free_i32(tmp32_2);
3014 #define FP_HELPER_CC(i) \
3015 tmp32_1 = tcg_const_i32(r1); \
3016 tmp32_2 = tcg_const_i32(r2); \
3017 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
3018 set_cc_static(s); \
3019 tcg_temp_free_i32(tmp32_1); \
3020 tcg_temp_free_i32(tmp32_2);
3022 switch (op) {
3023 case 0x0: /* LPEBR R1,R2 [RRE] */
3024 FP_HELPER_CC(lpebr);
3025 break;
3026 case 0x2: /* LTEBR R1,R2 [RRE] */
3027 FP_HELPER_CC(ltebr);
3028 break;
3029 case 0x3: /* LCEBR R1,R2 [RRE] */
3030 FP_HELPER_CC(lcebr);
3031 break;
3032 case 0x4: /* LDEBR R1,R2 [RRE] */
3033 FP_HELPER(ldebr);
3034 break;
3035 case 0x5: /* LXDBR R1,R2 [RRE] */
3036 FP_HELPER(lxdbr);
3037 break;
3038 case 0x9: /* CEBR R1,R2 [RRE] */
3039 FP_HELPER_CC(cebr);
3040 break;
3041 case 0xa: /* AEBR R1,R2 [RRE] */
3042 FP_HELPER_CC(aebr);
3043 break;
3044 case 0xb: /* SEBR R1,R2 [RRE] */
3045 FP_HELPER_CC(sebr);
3046 break;
3047 case 0xd: /* DEBR R1,R2 [RRE] */
3048 FP_HELPER(debr);
3049 break;
3050 case 0x10: /* LPDBR R1,R2 [RRE] */
3051 FP_HELPER_CC(lpdbr);
3052 break;
3053 case 0x12: /* LTDBR R1,R2 [RRE] */
3054 FP_HELPER_CC(ltdbr);
3055 break;
3056 case 0x13: /* LCDBR R1,R2 [RRE] */
3057 FP_HELPER_CC(lcdbr);
3058 break;
3059 case 0x15: /* SQBDR R1,R2 [RRE] */
3060 FP_HELPER(sqdbr);
3061 break;
3062 case 0x17: /* MEEBR R1,R2 [RRE] */
3063 FP_HELPER(meebr);
3064 break;
3065 case 0x19: /* CDBR R1,R2 [RRE] */
3066 FP_HELPER_CC(cdbr);
3067 break;
3068 case 0x1a: /* ADBR R1,R2 [RRE] */
3069 FP_HELPER_CC(adbr);
3070 break;
3071 case 0x1b: /* SDBR R1,R2 [RRE] */
3072 FP_HELPER_CC(sdbr);
3073 break;
3074 case 0x1c: /* MDBR R1,R2 [RRE] */
3075 FP_HELPER(mdbr);
3076 break;
3077 case 0x1d: /* DDBR R1,R2 [RRE] */
3078 FP_HELPER(ddbr);
3079 break;
3080 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3081 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3082 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3083 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3084 tmp32_1 = tcg_const_i32(m3);
3085 tmp32_2 = tcg_const_i32(r2);
3086 tmp32_3 = tcg_const_i32(r1);
3087 switch (op) {
3088 case 0xe:
3089 gen_helper_maebr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3090 break;
3091 case 0x1e:
3092 gen_helper_madbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3093 break;
3094 case 0x1f:
3095 gen_helper_msdbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
3096 break;
3097 default:
3098 tcg_abort();
3100 tcg_temp_free_i32(tmp32_1);
3101 tcg_temp_free_i32(tmp32_2);
3102 tcg_temp_free_i32(tmp32_3);
3103 break;
3104 case 0x40: /* LPXBR R1,R2 [RRE] */
3105 FP_HELPER_CC(lpxbr);
3106 break;
3107 case 0x42: /* LTXBR R1,R2 [RRE] */
3108 FP_HELPER_CC(ltxbr);
3109 break;
3110 case 0x43: /* LCXBR R1,R2 [RRE] */
3111 FP_HELPER_CC(lcxbr);
3112 break;
3113 case 0x44: /* LEDBR R1,R2 [RRE] */
3114 FP_HELPER(ledbr);
3115 break;
3116 case 0x45: /* LDXBR R1,R2 [RRE] */
3117 FP_HELPER(ldxbr);
3118 break;
3119 case 0x46: /* LEXBR R1,R2 [RRE] */
3120 FP_HELPER(lexbr);
3121 break;
3122 case 0x49: /* CXBR R1,R2 [RRE] */
3123 FP_HELPER_CC(cxbr);
3124 break;
3125 case 0x4a: /* AXBR R1,R2 [RRE] */
3126 FP_HELPER_CC(axbr);
3127 break;
3128 case 0x4b: /* SXBR R1,R2 [RRE] */
3129 FP_HELPER_CC(sxbr);
3130 break;
3131 case 0x4c: /* MXBR R1,R2 [RRE] */
3132 FP_HELPER(mxbr);
3133 break;
3134 case 0x4d: /* DXBR R1,R2 [RRE] */
3135 FP_HELPER(dxbr);
3136 break;
3137 case 0x65: /* LXR R1,R2 [RRE] */
3138 tmp = load_freg(r2);
3139 store_freg(r1, tmp);
3140 tcg_temp_free_i64(tmp);
3141 tmp = load_freg(r2 + 2);
3142 store_freg(r1 + 2, tmp);
3143 tcg_temp_free_i64(tmp);
3144 break;
3145 case 0x74: /* LZER R1 [RRE] */
3146 tmp32_1 = tcg_const_i32(r1);
3147 gen_helper_lzer(cpu_env, tmp32_1);
3148 tcg_temp_free_i32(tmp32_1);
3149 break;
3150 case 0x75: /* LZDR R1 [RRE] */
3151 tmp32_1 = tcg_const_i32(r1);
3152 gen_helper_lzdr(cpu_env, tmp32_1);
3153 tcg_temp_free_i32(tmp32_1);
3154 break;
3155 case 0x76: /* LZXR R1 [RRE] */
3156 tmp32_1 = tcg_const_i32(r1);
3157 gen_helper_lzxr(cpu_env, tmp32_1);
3158 tcg_temp_free_i32(tmp32_1);
3159 break;
3160 case 0x84: /* SFPC R1 [RRE] */
3161 tmp32_1 = load_reg32(r1);
3162 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
3163 tcg_temp_free_i32(tmp32_1);
3164 break;
3165 case 0x8c: /* EFPC R1 [RRE] */
3166 tmp32_1 = tcg_temp_new_i32();
3167 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
3168 store_reg32(r1, tmp32_1);
3169 tcg_temp_free_i32(tmp32_1);
3170 break;
3171 case 0x94: /* CEFBR R1,R2 [RRE] */
3172 case 0x95: /* CDFBR R1,R2 [RRE] */
3173 case 0x96: /* CXFBR R1,R2 [RRE] */
3174 tmp32_1 = tcg_const_i32(r1);
3175 tmp32_2 = load_reg32(r2);
3176 switch (op) {
3177 case 0x94:
3178 gen_helper_cefbr(cpu_env, tmp32_1, tmp32_2);
3179 break;
3180 case 0x95:
3181 gen_helper_cdfbr(cpu_env, tmp32_1, tmp32_2);
3182 break;
3183 case 0x96:
3184 gen_helper_cxfbr(cpu_env, tmp32_1, tmp32_2);
3185 break;
3186 default:
3187 tcg_abort();
3189 tcg_temp_free_i32(tmp32_1);
3190 tcg_temp_free_i32(tmp32_2);
3191 break;
3192 case 0x98: /* CFEBR R1,R2 [RRE] */
3193 case 0x99: /* CFDBR R1,R2 [RRE] */
3194 case 0x9a: /* CFXBR R1,R2 [RRE] */
3195 tmp32_1 = tcg_const_i32(r1);
3196 tmp32_2 = tcg_const_i32(r2);
3197 tmp32_3 = tcg_const_i32(m3);
3198 switch (op) {
3199 case 0x98:
3200 gen_helper_cfebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3201 break;
3202 case 0x99:
3203 gen_helper_cfdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3204 break;
3205 case 0x9a:
3206 gen_helper_cfxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3207 break;
3208 default:
3209 tcg_abort();
3211 set_cc_static(s);
3212 tcg_temp_free_i32(tmp32_1);
3213 tcg_temp_free_i32(tmp32_2);
3214 tcg_temp_free_i32(tmp32_3);
3215 break;
3216 case 0xa4: /* CEGBR R1,R2 [RRE] */
3217 case 0xa5: /* CDGBR R1,R2 [RRE] */
3218 tmp32_1 = tcg_const_i32(r1);
3219 tmp = load_reg(r2);
3220 switch (op) {
3221 case 0xa4:
3222 gen_helper_cegbr(cpu_env, tmp32_1, tmp);
3223 break;
3224 case 0xa5:
3225 gen_helper_cdgbr(cpu_env, tmp32_1, tmp);
3226 break;
3227 default:
3228 tcg_abort();
3230 tcg_temp_free_i32(tmp32_1);
3231 tcg_temp_free_i64(tmp);
3232 break;
3233 case 0xa6: /* CXGBR R1,R2 [RRE] */
3234 tmp32_1 = tcg_const_i32(r1);
3235 tmp = load_reg(r2);
3236 gen_helper_cxgbr(cpu_env, tmp32_1, tmp);
3237 tcg_temp_free_i32(tmp32_1);
3238 tcg_temp_free_i64(tmp);
3239 break;
3240 case 0xa8: /* CGEBR R1,R2 [RRE] */
3241 tmp32_1 = tcg_const_i32(r1);
3242 tmp32_2 = tcg_const_i32(r2);
3243 tmp32_3 = tcg_const_i32(m3);
3244 gen_helper_cgebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3245 set_cc_static(s);
3246 tcg_temp_free_i32(tmp32_1);
3247 tcg_temp_free_i32(tmp32_2);
3248 tcg_temp_free_i32(tmp32_3);
3249 break;
3250 case 0xa9: /* CGDBR R1,R2 [RRE] */
3251 tmp32_1 = tcg_const_i32(r1);
3252 tmp32_2 = tcg_const_i32(r2);
3253 tmp32_3 = tcg_const_i32(m3);
3254 gen_helper_cgdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3255 set_cc_static(s);
3256 tcg_temp_free_i32(tmp32_1);
3257 tcg_temp_free_i32(tmp32_2);
3258 tcg_temp_free_i32(tmp32_3);
3259 break;
3260 case 0xaa: /* CGXBR R1,R2 [RRE] */
3261 tmp32_1 = tcg_const_i32(r1);
3262 tmp32_2 = tcg_const_i32(r2);
3263 tmp32_3 = tcg_const_i32(m3);
3264 gen_helper_cgxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3265 set_cc_static(s);
3266 tcg_temp_free_i32(tmp32_1);
3267 tcg_temp_free_i32(tmp32_2);
3268 tcg_temp_free_i32(tmp32_3);
3269 break;
3270 default:
3271 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3272 gen_illegal_opcode(env, s, 2);
3273 break;
3276 #undef FP_HELPER_CC
3277 #undef FP_HELPER
3280 static void disas_b9(CPUS390XState *env, DisasContext *s, int op, int r1,
3281 int r2)
3283 TCGv_i64 tmp, tmp2, tmp3;
3284 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3286 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3287 switch (op) {
3288 case 0x0: /* LPGR R1,R2 [RRE] */
3289 case 0x1: /* LNGR R1,R2 [RRE] */
3290 case 0x2: /* LTGR R1,R2 [RRE] */
3291 case 0x3: /* LCGR R1,R2 [RRE] */
3292 case 0x10: /* LPGFR R1,R2 [RRE] */
3293 case 0x11: /* LNFGR R1,R2 [RRE] */
3294 case 0x12: /* LTGFR R1,R2 [RRE] */
3295 case 0x13: /* LCGFR R1,R2 [RRE] */
3296 if (op & 0x10) {
3297 tmp = load_reg32_i64(r2);
3298 } else {
3299 tmp = load_reg(r2);
3301 switch (op & 0xf) {
3302 case 0x0: /* LP?GR */
3303 set_cc_abs64(s, tmp);
3304 gen_helper_abs_i64(tmp, tmp);
3305 store_reg(r1, tmp);
3306 break;
3307 case 0x1: /* LN?GR */
3308 set_cc_nabs64(s, tmp);
3309 gen_helper_nabs_i64(tmp, tmp);
3310 store_reg(r1, tmp);
3311 break;
3312 case 0x2: /* LT?GR */
3313 if (r1 != r2) {
3314 store_reg(r1, tmp);
3316 set_cc_s64(s, tmp);
3317 break;
3318 case 0x3: /* LC?GR */
3319 tcg_gen_neg_i64(regs[r1], tmp);
3320 set_cc_comp64(s, regs[r1]);
3321 break;
3323 tcg_temp_free_i64(tmp);
3324 break;
3325 case 0x4: /* LGR R1,R2 [RRE] */
3326 store_reg(r1, regs[r2]);
3327 break;
3328 case 0x6: /* LGBR R1,R2 [RRE] */
3329 tmp2 = load_reg(r2);
3330 tcg_gen_ext8s_i64(tmp2, tmp2);
3331 store_reg(r1, tmp2);
3332 tcg_temp_free_i64(tmp2);
3333 break;
3334 case 0x8: /* AGR R1,R2 [RRE] */
3335 case 0xa: /* ALGR R1,R2 [RRE] */
3336 tmp = load_reg(r1);
3337 tmp2 = load_reg(r2);
3338 tmp3 = tcg_temp_new_i64();
3339 tcg_gen_add_i64(tmp3, tmp, tmp2);
3340 store_reg(r1, tmp3);
3341 switch (op) {
3342 case 0x8:
3343 set_cc_add64(s, tmp, tmp2, tmp3);
3344 break;
3345 case 0xa:
3346 set_cc_addu64(s, tmp, tmp2, tmp3);
3347 break;
3348 default:
3349 tcg_abort();
3351 tcg_temp_free_i64(tmp);
3352 tcg_temp_free_i64(tmp2);
3353 tcg_temp_free_i64(tmp3);
3354 break;
3355 case 0x9: /* SGR R1,R2 [RRE] */
3356 case 0xb: /* SLGR R1,R2 [RRE] */
3357 case 0x1b: /* SLGFR R1,R2 [RRE] */
3358 case 0x19: /* SGFR R1,R2 [RRE] */
3359 tmp = load_reg(r1);
3360 switch (op) {
3361 case 0x1b:
3362 tmp32_1 = load_reg32(r2);
3363 tmp2 = tcg_temp_new_i64();
3364 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3365 tcg_temp_free_i32(tmp32_1);
3366 break;
3367 case 0x19:
3368 tmp32_1 = load_reg32(r2);
3369 tmp2 = tcg_temp_new_i64();
3370 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3371 tcg_temp_free_i32(tmp32_1);
3372 break;
3373 default:
3374 tmp2 = load_reg(r2);
3375 break;
3377 tmp3 = tcg_temp_new_i64();
3378 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3379 store_reg(r1, tmp3);
3380 switch (op) {
3381 case 0x9:
3382 case 0x19:
3383 set_cc_sub64(s, tmp, tmp2, tmp3);
3384 break;
3385 case 0xb:
3386 case 0x1b:
3387 set_cc_subu64(s, tmp, tmp2, tmp3);
3388 break;
3389 default:
3390 tcg_abort();
3392 tcg_temp_free_i64(tmp);
3393 tcg_temp_free_i64(tmp2);
3394 tcg_temp_free_i64(tmp3);
3395 break;
3396 case 0xc: /* MSGR R1,R2 [RRE] */
3397 case 0x1c: /* MSGFR R1,R2 [RRE] */
3398 tmp = load_reg(r1);
3399 tmp2 = load_reg(r2);
3400 if (op == 0x1c) {
3401 tcg_gen_ext32s_i64(tmp2, tmp2);
3403 tcg_gen_mul_i64(tmp, tmp, tmp2);
3404 store_reg(r1, tmp);
3405 tcg_temp_free_i64(tmp);
3406 tcg_temp_free_i64(tmp2);
3407 break;
3408 case 0xd: /* DSGR R1,R2 [RRE] */
3409 case 0x1d: /* DSGFR R1,R2 [RRE] */
3410 tmp = load_reg(r1 + 1);
3411 if (op == 0xd) {
3412 tmp2 = load_reg(r2);
3413 } else {
3414 tmp32_1 = load_reg32(r2);
3415 tmp2 = tcg_temp_new_i64();
3416 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3417 tcg_temp_free_i32(tmp32_1);
3419 tmp3 = tcg_temp_new_i64();
3420 tcg_gen_div_i64(tmp3, tmp, tmp2);
3421 store_reg(r1 + 1, tmp3);
3422 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3423 store_reg(r1, tmp3);
3424 tcg_temp_free_i64(tmp);
3425 tcg_temp_free_i64(tmp2);
3426 tcg_temp_free_i64(tmp3);
3427 break;
3428 case 0x14: /* LGFR R1,R2 [RRE] */
3429 tmp32_1 = load_reg32(r2);
3430 tmp = tcg_temp_new_i64();
3431 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3432 store_reg(r1, tmp);
3433 tcg_temp_free_i32(tmp32_1);
3434 tcg_temp_free_i64(tmp);
3435 break;
3436 case 0x16: /* LLGFR R1,R2 [RRE] */
3437 tmp32_1 = load_reg32(r2);
3438 tmp = tcg_temp_new_i64();
3439 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3440 store_reg(r1, tmp);
3441 tcg_temp_free_i32(tmp32_1);
3442 tcg_temp_free_i64(tmp);
3443 break;
3444 case 0x17: /* LLGTR R1,R2 [RRE] */
3445 tmp32_1 = load_reg32(r2);
3446 tmp = tcg_temp_new_i64();
3447 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3448 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3449 store_reg(r1, tmp);
3450 tcg_temp_free_i32(tmp32_1);
3451 tcg_temp_free_i64(tmp);
3452 break;
3453 case 0x18: /* AGFR R1,R2 [RRE] */
3454 case 0x1a: /* ALGFR R1,R2 [RRE] */
3455 tmp32_1 = load_reg32(r2);
3456 tmp2 = tcg_temp_new_i64();
3457 if (op == 0x18) {
3458 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3459 } else {
3460 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3462 tcg_temp_free_i32(tmp32_1);
3463 tmp = load_reg(r1);
3464 tmp3 = tcg_temp_new_i64();
3465 tcg_gen_add_i64(tmp3, tmp, tmp2);
3466 store_reg(r1, tmp3);
3467 if (op == 0x18) {
3468 set_cc_add64(s, tmp, tmp2, tmp3);
3469 } else {
3470 set_cc_addu64(s, tmp, tmp2, tmp3);
3472 tcg_temp_free_i64(tmp);
3473 tcg_temp_free_i64(tmp2);
3474 tcg_temp_free_i64(tmp3);
3475 break;
3476 case 0x0f: /* LRVGR R1,R2 [RRE] */
3477 tcg_gen_bswap64_i64(regs[r1], regs[r2]);
3478 break;
3479 case 0x1f: /* LRVR R1,R2 [RRE] */
3480 tmp32_1 = load_reg32(r2);
3481 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3482 store_reg32(r1, tmp32_1);
3483 tcg_temp_free_i32(tmp32_1);
3484 break;
3485 case 0x20: /* CGR R1,R2 [RRE] */
3486 case 0x30: /* CGFR R1,R2 [RRE] */
3487 tmp2 = load_reg(r2);
3488 if (op == 0x30) {
3489 tcg_gen_ext32s_i64(tmp2, tmp2);
3491 tmp = load_reg(r1);
3492 cmp_s64(s, tmp, tmp2);
3493 tcg_temp_free_i64(tmp);
3494 tcg_temp_free_i64(tmp2);
3495 break;
3496 case 0x21: /* CLGR R1,R2 [RRE] */
3497 case 0x31: /* CLGFR R1,R2 [RRE] */
3498 tmp2 = load_reg(r2);
3499 if (op == 0x31) {
3500 tcg_gen_ext32u_i64(tmp2, tmp2);
3502 tmp = load_reg(r1);
3503 cmp_u64(s, tmp, tmp2);
3504 tcg_temp_free_i64(tmp);
3505 tcg_temp_free_i64(tmp2);
3506 break;
3507 case 0x26: /* LBR R1,R2 [RRE] */
3508 tmp32_1 = load_reg32(r2);
3509 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3510 store_reg32(r1, tmp32_1);
3511 tcg_temp_free_i32(tmp32_1);
3512 break;
3513 case 0x27: /* LHR R1,R2 [RRE] */
3514 tmp32_1 = load_reg32(r2);
3515 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3516 store_reg32(r1, tmp32_1);
3517 tcg_temp_free_i32(tmp32_1);
3518 break;
3519 case 0x80: /* NGR R1,R2 [RRE] */
3520 case 0x81: /* OGR R1,R2 [RRE] */
3521 case 0x82: /* XGR R1,R2 [RRE] */
3522 tmp = load_reg(r1);
3523 tmp2 = load_reg(r2);
3524 switch (op) {
3525 case 0x80:
3526 tcg_gen_and_i64(tmp, tmp, tmp2);
3527 break;
3528 case 0x81:
3529 tcg_gen_or_i64(tmp, tmp, tmp2);
3530 break;
3531 case 0x82:
3532 tcg_gen_xor_i64(tmp, tmp, tmp2);
3533 break;
3534 default:
3535 tcg_abort();
3537 store_reg(r1, tmp);
3538 set_cc_nz_u64(s, tmp);
3539 tcg_temp_free_i64(tmp);
3540 tcg_temp_free_i64(tmp2);
3541 break;
3542 case 0x83: /* FLOGR R1,R2 [RRE] */
3543 tmp = load_reg(r2);
3544 tmp32_1 = tcg_const_i32(r1);
3545 gen_helper_flogr(cc_op, cpu_env, tmp32_1, tmp);
3546 set_cc_static(s);
3547 tcg_temp_free_i64(tmp);
3548 tcg_temp_free_i32(tmp32_1);
3549 break;
3550 case 0x84: /* LLGCR R1,R2 [RRE] */
3551 tmp = load_reg(r2);
3552 tcg_gen_andi_i64(tmp, tmp, 0xff);
3553 store_reg(r1, tmp);
3554 tcg_temp_free_i64(tmp);
3555 break;
3556 case 0x85: /* LLGHR R1,R2 [RRE] */
3557 tmp = load_reg(r2);
3558 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3559 store_reg(r1, tmp);
3560 tcg_temp_free_i64(tmp);
3561 break;
3562 case 0x87: /* DLGR R1,R2 [RRE] */
3563 tmp32_1 = tcg_const_i32(r1);
3564 tmp = load_reg(r2);
3565 gen_helper_dlg(cpu_env, tmp32_1, tmp);
3566 tcg_temp_free_i64(tmp);
3567 tcg_temp_free_i32(tmp32_1);
3568 break;
3569 case 0x88: /* ALCGR R1,R2 [RRE] */
3570 tmp = load_reg(r1);
3571 tmp2 = load_reg(r2);
3572 tmp3 = tcg_temp_new_i64();
3573 gen_op_calc_cc(s);
3574 tcg_gen_extu_i32_i64(tmp3, cc_op);
3575 tcg_gen_shri_i64(tmp3, tmp3, 1);
3576 tcg_gen_andi_i64(tmp3, tmp3, 1);
3577 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3578 tcg_gen_add_i64(tmp3, tmp, tmp3);
3579 store_reg(r1, tmp3);
3580 set_cc_addu64(s, tmp, tmp2, tmp3);
3581 tcg_temp_free_i64(tmp);
3582 tcg_temp_free_i64(tmp2);
3583 tcg_temp_free_i64(tmp3);
3584 break;
3585 case 0x89: /* SLBGR R1,R2 [RRE] */
3586 tmp = load_reg(r1);
3587 tmp2 = load_reg(r2);
3588 tmp32_1 = tcg_const_i32(r1);
3589 gen_op_calc_cc(s);
3590 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, tmp, tmp2);
3591 set_cc_static(s);
3592 tcg_temp_free_i64(tmp);
3593 tcg_temp_free_i64(tmp2);
3594 tcg_temp_free_i32(tmp32_1);
3595 break;
3596 case 0x94: /* LLCR R1,R2 [RRE] */
3597 tmp32_1 = load_reg32(r2);
3598 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3599 store_reg32(r1, tmp32_1);
3600 tcg_temp_free_i32(tmp32_1);
3601 break;
3602 case 0x95: /* LLHR R1,R2 [RRE] */
3603 tmp32_1 = load_reg32(r2);
3604 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3605 store_reg32(r1, tmp32_1);
3606 tcg_temp_free_i32(tmp32_1);
3607 break;
3608 case 0x96: /* MLR R1,R2 [RRE] */
3609 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3610 tmp2 = load_reg(r2);
3611 tmp3 = load_reg((r1 + 1) & 15);
3612 tcg_gen_ext32u_i64(tmp2, tmp2);
3613 tcg_gen_ext32u_i64(tmp3, tmp3);
3614 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3615 store_reg32_i64((r1 + 1) & 15, tmp2);
3616 tcg_gen_shri_i64(tmp2, tmp2, 32);
3617 store_reg32_i64(r1, tmp2);
3618 tcg_temp_free_i64(tmp2);
3619 tcg_temp_free_i64(tmp3);
3620 break;
3621 case 0x97: /* DLR R1,R2 [RRE] */
3622 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3623 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3624 tmp = load_reg(r1);
3625 tmp2 = load_reg(r2);
3626 tmp3 = load_reg((r1 + 1) & 15);
3627 tcg_gen_ext32u_i64(tmp2, tmp2);
3628 tcg_gen_ext32u_i64(tmp3, tmp3);
3629 tcg_gen_shli_i64(tmp, tmp, 32);
3630 tcg_gen_or_i64(tmp, tmp, tmp3);
3632 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3633 tcg_gen_div_i64(tmp, tmp, tmp2);
3634 store_reg32_i64((r1 + 1) & 15, tmp);
3635 store_reg32_i64(r1, tmp3);
3636 tcg_temp_free_i64(tmp);
3637 tcg_temp_free_i64(tmp2);
3638 tcg_temp_free_i64(tmp3);
3639 break;
3640 case 0x98: /* ALCR R1,R2 [RRE] */
3641 tmp32_1 = load_reg32(r1);
3642 tmp32_2 = load_reg32(r2);
3643 tmp32_3 = tcg_temp_new_i32();
3644 /* XXX possible optimization point */
3645 gen_op_calc_cc(s);
3646 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3647 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3648 store_reg32(r1, tmp32_3);
3649 tcg_temp_free_i32(tmp32_1);
3650 tcg_temp_free_i32(tmp32_2);
3651 tcg_temp_free_i32(tmp32_3);
3652 break;
3653 case 0x99: /* SLBR R1,R2 [RRE] */
3654 tmp32_1 = load_reg32(r2);
3655 tmp32_2 = tcg_const_i32(r1);
3656 gen_op_calc_cc(s);
3657 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_2, tmp32_1);
3658 set_cc_static(s);
3659 tcg_temp_free_i32(tmp32_1);
3660 tcg_temp_free_i32(tmp32_2);
3661 break;
3662 default:
3663 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3664 gen_illegal_opcode(env, s, 2);
3665 break;
3669 static void disas_c0(CPUS390XState *env, DisasContext *s, int op, int r1, int i2)
3671 TCGv_i64 tmp;
3672 TCGv_i32 tmp32_1, tmp32_2;
3673 uint64_t target = s->pc + i2 * 2LL;
3674 int l1;
3676 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3678 switch (op) {
3679 case 0: /* larl r1, i2 */
3680 tmp = tcg_const_i64(target);
3681 store_reg(r1, tmp);
3682 tcg_temp_free_i64(tmp);
3683 break;
3684 case 0x1: /* LGFI R1,I2 [RIL] */
3685 tmp = tcg_const_i64((int64_t)i2);
3686 store_reg(r1, tmp);
3687 tcg_temp_free_i64(tmp);
3688 break;
3689 case 0x4: /* BRCL M1,I2 [RIL] */
3690 /* m1 & (1 << (3 - cc)) */
3691 tmp32_1 = tcg_const_i32(3);
3692 tmp32_2 = tcg_const_i32(1);
3693 gen_op_calc_cc(s);
3694 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3695 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3696 tcg_temp_free_i32(tmp32_1);
3697 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3698 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3699 l1 = gen_new_label();
3700 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3701 gen_goto_tb(s, 0, target);
3702 gen_set_label(l1);
3703 gen_goto_tb(s, 1, s->pc + 6);
3704 s->is_jmp = DISAS_TB_JUMP;
3705 tcg_temp_free_i32(tmp32_1);
3706 tcg_temp_free_i32(tmp32_2);
3707 break;
3708 case 0x5: /* brasl r1, i2 */
3709 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3710 store_reg(r1, tmp);
3711 tcg_temp_free_i64(tmp);
3712 gen_goto_tb(s, 0, target);
3713 s->is_jmp = DISAS_TB_JUMP;
3714 break;
3715 case 0x7: /* XILF R1,I2 [RIL] */
3716 case 0xb: /* NILF R1,I2 [RIL] */
3717 case 0xd: /* OILF R1,I2 [RIL] */
3718 tmp32_1 = load_reg32(r1);
3719 switch (op) {
3720 case 0x7:
3721 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3722 break;
3723 case 0xb:
3724 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3725 break;
3726 case 0xd:
3727 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3728 break;
3729 default:
3730 tcg_abort();
3732 store_reg32(r1, tmp32_1);
3733 set_cc_nz_u32(s, tmp32_1);
3734 tcg_temp_free_i32(tmp32_1);
3735 break;
3736 case 0x9: /* IILF R1,I2 [RIL] */
3737 tmp32_1 = tcg_const_i32((uint32_t)i2);
3738 store_reg32(r1, tmp32_1);
3739 tcg_temp_free_i32(tmp32_1);
3740 break;
3741 case 0xa: /* NIHF R1,I2 [RIL] */
3742 tmp = load_reg(r1);
3743 tmp32_1 = tcg_temp_new_i32();
3744 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3745 | 0xffffffffULL);
3746 store_reg(r1, tmp);
3747 tcg_gen_shri_i64(tmp, tmp, 32);
3748 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3749 set_cc_nz_u32(s, tmp32_1);
3750 tcg_temp_free_i64(tmp);
3751 tcg_temp_free_i32(tmp32_1);
3752 break;
3753 case 0xe: /* LLIHF R1,I2 [RIL] */
3754 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3755 store_reg(r1, tmp);
3756 tcg_temp_free_i64(tmp);
3757 break;
3758 case 0xf: /* LLILF R1,I2 [RIL] */
3759 tmp = tcg_const_i64((uint32_t)i2);
3760 store_reg(r1, tmp);
3761 tcg_temp_free_i64(tmp);
3762 break;
3763 default:
3764 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3765 gen_illegal_opcode(env, s, 3);
3766 break;
3770 static void disas_c2(CPUS390XState *env, DisasContext *s, int op, int r1,
3771 int i2)
3773 TCGv_i64 tmp, tmp2, tmp3;
3774 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3776 switch (op) {
3777 case 0x4: /* SLGFI R1,I2 [RIL] */
3778 case 0xa: /* ALGFI R1,I2 [RIL] */
3779 tmp = load_reg(r1);
3780 tmp2 = tcg_const_i64((uint64_t)(uint32_t)i2);
3781 tmp3 = tcg_temp_new_i64();
3782 switch (op) {
3783 case 0x4:
3784 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3785 set_cc_subu64(s, tmp, tmp2, tmp3);
3786 break;
3787 case 0xa:
3788 tcg_gen_add_i64(tmp3, tmp, tmp2);
3789 set_cc_addu64(s, tmp, tmp2, tmp3);
3790 break;
3791 default:
3792 tcg_abort();
3794 store_reg(r1, tmp3);
3795 tcg_temp_free_i64(tmp);
3796 tcg_temp_free_i64(tmp2);
3797 tcg_temp_free_i64(tmp3);
3798 break;
3799 case 0x5: /* SLFI R1,I2 [RIL] */
3800 case 0xb: /* ALFI R1,I2 [RIL] */
3801 tmp32_1 = load_reg32(r1);
3802 tmp32_2 = tcg_const_i32(i2);
3803 tmp32_3 = tcg_temp_new_i32();
3804 switch (op) {
3805 case 0x5:
3806 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3807 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
3808 break;
3809 case 0xb:
3810 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3811 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3812 break;
3813 default:
3814 tcg_abort();
3816 store_reg32(r1, tmp32_3);
3817 tcg_temp_free_i32(tmp32_1);
3818 tcg_temp_free_i32(tmp32_2);
3819 tcg_temp_free_i32(tmp32_3);
3820 break;
3821 case 0xc: /* CGFI R1,I2 [RIL] */
3822 tmp = load_reg(r1);
3823 cmp_s64c(s, tmp, (int64_t)i2);
3824 tcg_temp_free_i64(tmp);
3825 break;
3826 case 0xe: /* CLGFI R1,I2 [RIL] */
3827 tmp = load_reg(r1);
3828 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3829 tcg_temp_free_i64(tmp);
3830 break;
3831 case 0xd: /* CFI R1,I2 [RIL] */
3832 tmp32_1 = load_reg32(r1);
3833 cmp_s32c(s, tmp32_1, i2);
3834 tcg_temp_free_i32(tmp32_1);
3835 break;
3836 case 0xf: /* CLFI R1,I2 [RIL] */
3837 tmp32_1 = load_reg32(r1);
3838 cmp_u32c(s, tmp32_1, i2);
3839 tcg_temp_free_i32(tmp32_1);
3840 break;
3841 default:
3842 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3843 gen_illegal_opcode(env, s, 3);
3844 break;
3848 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3850 switch (opc & 0xf) {
3851 case 0x4:
3852 tcg_gen_and_i32(tmp, tmp, tmp2);
3853 break;
3854 case 0x6:
3855 tcg_gen_or_i32(tmp, tmp, tmp2);
3856 break;
3857 case 0x7:
3858 tcg_gen_xor_i32(tmp, tmp, tmp2);
3859 break;
3860 default:
3861 tcg_abort();
3865 static void disas_s390_insn(CPUS390XState *env, DisasContext *s)
3867 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3868 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3869 unsigned char opc;
3870 uint64_t insn;
3871 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3872 TCGv_i32 vl;
3873 int ilc;
3874 int l1;
3876 opc = cpu_ldub_code(env, s->pc);
3877 LOG_DISAS("opc 0x%x\n", opc);
3879 ilc = get_ilc(opc);
3881 switch (opc) {
3882 #ifndef CONFIG_USER_ONLY
3883 case 0x01: /* SAM */
3884 insn = ld_code2(env, s->pc);
3885 /* set addressing mode, but we only do 64bit anyways */
3886 break;
3887 #endif
3888 case 0x6: /* BCTR R1,R2 [RR] */
3889 insn = ld_code2(env, s->pc);
3890 decode_rr(s, insn, &r1, &r2);
3891 tmp32_1 = load_reg32(r1);
3892 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3893 store_reg32(r1, tmp32_1);
3895 if (r2) {
3896 gen_update_cc_op(s);
3897 l1 = gen_new_label();
3898 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3900 /* not taking the branch, jump to after the instruction */
3901 gen_goto_tb(s, 0, s->pc + 2);
3902 gen_set_label(l1);
3904 /* take the branch, move R2 into psw.addr */
3905 tmp32_1 = load_reg32(r2);
3906 tmp = tcg_temp_new_i64();
3907 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3908 tcg_gen_mov_i64(psw_addr, tmp);
3909 s->is_jmp = DISAS_JUMP;
3910 tcg_temp_free_i32(tmp32_1);
3911 tcg_temp_free_i64(tmp);
3913 break;
3914 case 0x7: /* BCR M1,R2 [RR] */
3915 insn = ld_code2(env, s->pc);
3916 decode_rr(s, insn, &r1, &r2);
3917 if (r2) {
3918 tmp = load_reg(r2);
3919 gen_bcr(s, r1, tmp, s->pc);
3920 tcg_temp_free_i64(tmp);
3921 s->is_jmp = DISAS_TB_JUMP;
3922 } else {
3923 /* XXX: "serialization and checkpoint-synchronization function"? */
3925 break;
3926 case 0xa: /* SVC I [RR] */
3927 insn = ld_code2(env, s->pc);
3928 debug_insn(insn);
3929 i = insn & 0xff;
3930 update_psw_addr(s);
3931 gen_op_calc_cc(s);
3932 tmp32_1 = tcg_const_i32(i);
3933 tmp32_2 = tcg_const_i32(ilc * 2);
3934 tmp32_3 = tcg_const_i32(EXCP_SVC);
3935 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, int_svc_code));
3936 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUS390XState, int_svc_ilc));
3937 gen_helper_exception(cpu_env, tmp32_3);
3938 s->is_jmp = DISAS_EXCP;
3939 tcg_temp_free_i32(tmp32_1);
3940 tcg_temp_free_i32(tmp32_2);
3941 tcg_temp_free_i32(tmp32_3);
3942 break;
3943 case 0xd: /* BASR R1,R2 [RR] */
3944 insn = ld_code2(env, s->pc);
3945 decode_rr(s, insn, &r1, &r2);
3946 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3947 store_reg(r1, tmp);
3948 if (r2) {
3949 tmp2 = load_reg(r2);
3950 tcg_gen_mov_i64(psw_addr, tmp2);
3951 tcg_temp_free_i64(tmp2);
3952 s->is_jmp = DISAS_JUMP;
3954 tcg_temp_free_i64(tmp);
3955 break;
3956 case 0xe: /* MVCL R1,R2 [RR] */
3957 insn = ld_code2(env, s->pc);
3958 decode_rr(s, insn, &r1, &r2);
3959 tmp32_1 = tcg_const_i32(r1);
3960 tmp32_2 = tcg_const_i32(r2);
3961 potential_page_fault(s);
3962 gen_helper_mvcl(cc_op, cpu_env, tmp32_1, tmp32_2);
3963 set_cc_static(s);
3964 tcg_temp_free_i32(tmp32_1);
3965 tcg_temp_free_i32(tmp32_2);
3966 break;
3967 case 0x10: /* LPR R1,R2 [RR] */
3968 insn = ld_code2(env, s->pc);
3969 decode_rr(s, insn, &r1, &r2);
3970 tmp32_1 = load_reg32(r2);
3971 set_cc_abs32(s, tmp32_1);
3972 gen_helper_abs_i32(tmp32_1, tmp32_1);
3973 store_reg32(r1, tmp32_1);
3974 tcg_temp_free_i32(tmp32_1);
3975 break;
3976 case 0x11: /* LNR R1,R2 [RR] */
3977 insn = ld_code2(env, s->pc);
3978 decode_rr(s, insn, &r1, &r2);
3979 tmp32_1 = load_reg32(r2);
3980 set_cc_nabs32(s, tmp32_1);
3981 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3982 store_reg32(r1, tmp32_1);
3983 tcg_temp_free_i32(tmp32_1);
3984 break;
3985 case 0x12: /* LTR R1,R2 [RR] */
3986 insn = ld_code2(env, s->pc);
3987 decode_rr(s, insn, &r1, &r2);
3988 tmp32_1 = load_reg32(r2);
3989 if (r1 != r2) {
3990 store_reg32(r1, tmp32_1);
3992 set_cc_s32(s, tmp32_1);
3993 tcg_temp_free_i32(tmp32_1);
3994 break;
3995 case 0x13: /* LCR R1,R2 [RR] */
3996 insn = ld_code2(env, s->pc);
3997 decode_rr(s, insn, &r1, &r2);
3998 tmp32_1 = load_reg32(r2);
3999 tcg_gen_neg_i32(tmp32_1, tmp32_1);
4000 store_reg32(r1, tmp32_1);
4001 set_cc_comp32(s, tmp32_1);
4002 tcg_temp_free_i32(tmp32_1);
4003 break;
4004 case 0x14: /* NR R1,R2 [RR] */
4005 case 0x16: /* OR R1,R2 [RR] */
4006 case 0x17: /* XR R1,R2 [RR] */
4007 insn = ld_code2(env, s->pc);
4008 decode_rr(s, insn, &r1, &r2);
4009 tmp32_2 = load_reg32(r2);
4010 tmp32_1 = load_reg32(r1);
4011 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4012 store_reg32(r1, tmp32_1);
4013 set_cc_nz_u32(s, tmp32_1);
4014 tcg_temp_free_i32(tmp32_1);
4015 tcg_temp_free_i32(tmp32_2);
4016 break;
4017 case 0x18: /* LR R1,R2 [RR] */
4018 insn = ld_code2(env, s->pc);
4019 decode_rr(s, insn, &r1, &r2);
4020 tmp32_1 = load_reg32(r2);
4021 store_reg32(r1, tmp32_1);
4022 tcg_temp_free_i32(tmp32_1);
4023 break;
4024 case 0x15: /* CLR R1,R2 [RR] */
4025 case 0x19: /* CR R1,R2 [RR] */
4026 insn = ld_code2(env, s->pc);
4027 decode_rr(s, insn, &r1, &r2);
4028 tmp32_1 = load_reg32(r1);
4029 tmp32_2 = load_reg32(r2);
4030 if (opc == 0x15) {
4031 cmp_u32(s, tmp32_1, tmp32_2);
4032 } else {
4033 cmp_s32(s, tmp32_1, tmp32_2);
4035 tcg_temp_free_i32(tmp32_1);
4036 tcg_temp_free_i32(tmp32_2);
4037 break;
4038 case 0x1a: /* AR R1,R2 [RR] */
4039 case 0x1e: /* ALR R1,R2 [RR] */
4040 insn = ld_code2(env, s->pc);
4041 decode_rr(s, insn, &r1, &r2);
4042 tmp32_1 = load_reg32(r1);
4043 tmp32_2 = load_reg32(r2);
4044 tmp32_3 = tcg_temp_new_i32();
4045 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4046 store_reg32(r1, tmp32_3);
4047 if (opc == 0x1a) {
4048 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4049 } else {
4050 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4052 tcg_temp_free_i32(tmp32_1);
4053 tcg_temp_free_i32(tmp32_2);
4054 tcg_temp_free_i32(tmp32_3);
4055 break;
4056 case 0x1b: /* SR R1,R2 [RR] */
4057 case 0x1f: /* SLR R1,R2 [RR] */
4058 insn = ld_code2(env, s->pc);
4059 decode_rr(s, insn, &r1, &r2);
4060 tmp32_1 = load_reg32(r1);
4061 tmp32_2 = load_reg32(r2);
4062 tmp32_3 = tcg_temp_new_i32();
4063 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4064 store_reg32(r1, tmp32_3);
4065 if (opc == 0x1b) {
4066 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4067 } else {
4068 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4070 tcg_temp_free_i32(tmp32_1);
4071 tcg_temp_free_i32(tmp32_2);
4072 tcg_temp_free_i32(tmp32_3);
4073 break;
4074 case 0x1c: /* MR R1,R2 [RR] */
4075 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4076 insn = ld_code2(env, s->pc);
4077 decode_rr(s, insn, &r1, &r2);
4078 tmp2 = load_reg(r2);
4079 tmp3 = load_reg((r1 + 1) & 15);
4080 tcg_gen_ext32s_i64(tmp2, tmp2);
4081 tcg_gen_ext32s_i64(tmp3, tmp3);
4082 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4083 store_reg32_i64((r1 + 1) & 15, tmp2);
4084 tcg_gen_shri_i64(tmp2, tmp2, 32);
4085 store_reg32_i64(r1, tmp2);
4086 tcg_temp_free_i64(tmp2);
4087 tcg_temp_free_i64(tmp3);
4088 break;
4089 case 0x1d: /* DR R1,R2 [RR] */
4090 insn = ld_code2(env, s->pc);
4091 decode_rr(s, insn, &r1, &r2);
4092 tmp32_1 = load_reg32(r1);
4093 tmp32_2 = load_reg32(r1 + 1);
4094 tmp32_3 = load_reg32(r2);
4096 tmp = tcg_temp_new_i64(); /* dividend */
4097 tmp2 = tcg_temp_new_i64(); /* divisor */
4098 tmp3 = tcg_temp_new_i64();
4100 /* dividend is r(r1 << 32) | r(r1 + 1) */
4101 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4102 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4103 tcg_gen_shli_i64(tmp, tmp, 32);
4104 tcg_gen_or_i64(tmp, tmp, tmp2);
4106 /* divisor is r(r2) */
4107 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
4109 tcg_gen_div_i64(tmp3, tmp, tmp2);
4110 tcg_gen_rem_i64(tmp, tmp, tmp2);
4112 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4113 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4115 store_reg32(r1, tmp32_1); /* remainder */
4116 store_reg32(r1 + 1, tmp32_2); /* quotient */
4117 tcg_temp_free_i32(tmp32_1);
4118 tcg_temp_free_i32(tmp32_2);
4119 tcg_temp_free_i32(tmp32_3);
4120 tcg_temp_free_i64(tmp);
4121 tcg_temp_free_i64(tmp2);
4122 tcg_temp_free_i64(tmp3);
4123 break;
4124 case 0x28: /* LDR R1,R2 [RR] */
4125 insn = ld_code2(env, s->pc);
4126 decode_rr(s, insn, &r1, &r2);
4127 tmp = load_freg(r2);
4128 store_freg(r1, tmp);
4129 tcg_temp_free_i64(tmp);
4130 break;
4131 case 0x38: /* LER R1,R2 [RR] */
4132 insn = ld_code2(env, s->pc);
4133 decode_rr(s, insn, &r1, &r2);
4134 tmp32_1 = load_freg32(r2);
4135 store_freg32(r1, tmp32_1);
4136 tcg_temp_free_i32(tmp32_1);
4137 break;
4138 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4139 insn = ld_code4(env, s->pc);
4140 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4141 tmp2 = load_reg(r1);
4142 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
4143 tcg_temp_free_i64(tmp);
4144 tcg_temp_free_i64(tmp2);
4145 break;
4146 case 0x41: /* la */
4147 insn = ld_code4(env, s->pc);
4148 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4149 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
4150 tcg_temp_free_i64(tmp);
4151 break;
4152 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4153 insn = ld_code4(env, s->pc);
4154 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4155 tmp2 = load_reg(r1);
4156 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4157 tcg_temp_free_i64(tmp);
4158 tcg_temp_free_i64(tmp2);
4159 break;
4160 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4161 insn = ld_code4(env, s->pc);
4162 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4163 tmp2 = tcg_temp_new_i64();
4164 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4165 store_reg8(r1, tmp2);
4166 tcg_temp_free_i64(tmp);
4167 tcg_temp_free_i64(tmp2);
4168 break;
4169 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4170 insn = ld_code4(env, s->pc);
4171 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4172 tmp2 = load_reg(r1);
4173 tmp3 = tcg_const_i64(s->pc + 4);
4174 update_psw_addr(s);
4175 gen_op_calc_cc(s);
4176 gen_helper_ex(cc_op, cpu_env, cc_op, tmp2, tmp, tmp3);
4177 set_cc_static(s);
4178 tcg_temp_free_i64(tmp);
4179 tcg_temp_free_i64(tmp2);
4180 tcg_temp_free_i64(tmp3);
4181 break;
4182 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4183 insn = ld_code4(env, s->pc);
4184 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4185 tcg_temp_free_i64(tmp);
4187 tmp32_1 = load_reg32(r1);
4188 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
4189 store_reg32(r1, tmp32_1);
4191 gen_update_cc_op(s);
4192 l1 = gen_new_label();
4193 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
4195 /* not taking the branch, jump to after the instruction */
4196 gen_goto_tb(s, 0, s->pc + 4);
4197 gen_set_label(l1);
4199 /* take the branch, move R2 into psw.addr */
4200 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4201 tcg_gen_mov_i64(psw_addr, tmp);
4202 s->is_jmp = DISAS_JUMP;
4203 tcg_temp_free_i32(tmp32_1);
4204 tcg_temp_free_i64(tmp);
4205 break;
4206 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4207 insn = ld_code4(env, s->pc);
4208 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4209 gen_bcr(s, r1, tmp, s->pc + 4);
4210 tcg_temp_free_i64(tmp);
4211 s->is_jmp = DISAS_TB_JUMP;
4212 break;
4213 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4214 insn = ld_code4(env, s->pc);
4215 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4216 tmp2 = tcg_temp_new_i64();
4217 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4218 store_reg32_i64(r1, tmp2);
4219 tcg_temp_free_i64(tmp);
4220 tcg_temp_free_i64(tmp2);
4221 break;
4222 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4223 insn = ld_code4(env, s->pc);
4224 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4225 tmp32_1 = load_reg32(r1);
4226 tmp32_2 = tcg_temp_new_i32();
4227 tmp2 = tcg_temp_new_i64();
4228 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4229 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4230 cmp_s32(s, tmp32_1, tmp32_2);
4231 tcg_temp_free_i32(tmp32_1);
4232 tcg_temp_free_i32(tmp32_2);
4233 tcg_temp_free_i64(tmp);
4234 tcg_temp_free_i64(tmp2);
4235 break;
4236 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4237 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4238 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4239 insn = ld_code4(env, s->pc);
4240 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4241 tmp2 = tcg_temp_new_i64();
4242 tmp32_1 = load_reg32(r1);
4243 tmp32_2 = tcg_temp_new_i32();
4244 tmp32_3 = tcg_temp_new_i32();
4246 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4247 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4248 switch (opc) {
4249 case 0x4a:
4250 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4251 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4252 break;
4253 case 0x4b:
4254 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4255 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4256 break;
4257 case 0x4c:
4258 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
4259 break;
4260 default:
4261 tcg_abort();
4263 store_reg32(r1, tmp32_3);
4265 tcg_temp_free_i32(tmp32_1);
4266 tcg_temp_free_i32(tmp32_2);
4267 tcg_temp_free_i32(tmp32_3);
4268 tcg_temp_free_i64(tmp);
4269 tcg_temp_free_i64(tmp2);
4270 break;
4271 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4272 insn = ld_code4(env, s->pc);
4273 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4274 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
4275 store_reg(r1, tmp2);
4276 tcg_gen_mov_i64(psw_addr, tmp);
4277 tcg_temp_free_i64(tmp);
4278 tcg_temp_free_i64(tmp2);
4279 s->is_jmp = DISAS_JUMP;
4280 break;
4281 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4282 insn = ld_code4(env, s->pc);
4283 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4284 tmp2 = tcg_temp_new_i64();
4285 tmp32_1 = tcg_temp_new_i32();
4286 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
4287 gen_helper_cvd(tmp2, tmp32_1);
4288 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4289 tcg_temp_free_i64(tmp);
4290 tcg_temp_free_i64(tmp2);
4291 tcg_temp_free_i32(tmp32_1);
4292 break;
4293 case 0x50: /* st r1, d2(x2, b2) */
4294 insn = ld_code4(env, s->pc);
4295 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4296 tmp2 = load_reg(r1);
4297 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4298 tcg_temp_free_i64(tmp);
4299 tcg_temp_free_i64(tmp2);
4300 break;
4301 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4302 insn = ld_code4(env, s->pc);
4303 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4304 tmp2 = tcg_temp_new_i64();
4305 tmp32_1 = tcg_temp_new_i32();
4306 tmp32_2 = load_reg32(r1);
4307 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4308 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4309 cmp_u32(s, tmp32_2, tmp32_1);
4310 tcg_temp_free_i64(tmp);
4311 tcg_temp_free_i64(tmp2);
4312 tcg_temp_free_i32(tmp32_1);
4313 tcg_temp_free_i32(tmp32_2);
4314 break;
4315 case 0x54: /* N R1,D2(X2,B2) [RX] */
4316 case 0x56: /* O R1,D2(X2,B2) [RX] */
4317 case 0x57: /* X R1,D2(X2,B2) [RX] */
4318 insn = ld_code4(env, s->pc);
4319 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4320 tmp2 = tcg_temp_new_i64();
4321 tmp32_1 = load_reg32(r1);
4322 tmp32_2 = tcg_temp_new_i32();
4323 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4324 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4325 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4326 store_reg32(r1, tmp32_1);
4327 set_cc_nz_u32(s, tmp32_1);
4328 tcg_temp_free_i64(tmp);
4329 tcg_temp_free_i64(tmp2);
4330 tcg_temp_free_i32(tmp32_1);
4331 tcg_temp_free_i32(tmp32_2);
4332 break;
4333 case 0x58: /* l r1, d2(x2, b2) */
4334 insn = ld_code4(env, s->pc);
4335 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4336 tmp2 = tcg_temp_new_i64();
4337 tmp32_1 = tcg_temp_new_i32();
4338 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4339 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4340 store_reg32(r1, tmp32_1);
4341 tcg_temp_free_i64(tmp);
4342 tcg_temp_free_i64(tmp2);
4343 tcg_temp_free_i32(tmp32_1);
4344 break;
4345 case 0x59: /* C R1,D2(X2,B2) [RX] */
4346 insn = ld_code4(env, s->pc);
4347 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4348 tmp2 = tcg_temp_new_i64();
4349 tmp32_1 = tcg_temp_new_i32();
4350 tmp32_2 = load_reg32(r1);
4351 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4352 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4353 cmp_s32(s, tmp32_2, tmp32_1);
4354 tcg_temp_free_i64(tmp);
4355 tcg_temp_free_i64(tmp2);
4356 tcg_temp_free_i32(tmp32_1);
4357 tcg_temp_free_i32(tmp32_2);
4358 break;
4359 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4360 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4361 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4362 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4363 insn = ld_code4(env, s->pc);
4364 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4365 tmp32_1 = load_reg32(r1);
4366 tmp32_2 = tcg_temp_new_i32();
4367 tmp32_3 = tcg_temp_new_i32();
4368 tcg_gen_qemu_ld32s(tmp, tmp, get_mem_index(s));
4369 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4370 switch (opc) {
4371 case 0x5a:
4372 case 0x5e:
4373 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4374 break;
4375 case 0x5b:
4376 case 0x5f:
4377 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4378 break;
4379 default:
4380 tcg_abort();
4382 store_reg32(r1, tmp32_3);
4383 switch (opc) {
4384 case 0x5a:
4385 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4386 break;
4387 case 0x5e:
4388 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4389 break;
4390 case 0x5b:
4391 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4392 break;
4393 case 0x5f:
4394 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4395 break;
4396 default:
4397 tcg_abort();
4399 tcg_temp_free_i64(tmp);
4400 tcg_temp_free_i32(tmp32_1);
4401 tcg_temp_free_i32(tmp32_2);
4402 tcg_temp_free_i32(tmp32_3);
4403 break;
4404 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4405 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4406 insn = ld_code4(env, s->pc);
4407 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4408 tmp2 = tcg_temp_new_i64();
4409 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4410 tmp3 = load_reg((r1 + 1) & 15);
4411 tcg_gen_ext32s_i64(tmp2, tmp2);
4412 tcg_gen_ext32s_i64(tmp3, tmp3);
4413 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4414 store_reg32_i64((r1 + 1) & 15, tmp2);
4415 tcg_gen_shri_i64(tmp2, tmp2, 32);
4416 store_reg32_i64(r1, tmp2);
4417 tcg_temp_free_i64(tmp);
4418 tcg_temp_free_i64(tmp2);
4419 tcg_temp_free_i64(tmp3);
4420 break;
4421 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4422 insn = ld_code4(env, s->pc);
4423 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4424 tmp32_1 = load_reg32(r1);
4425 tmp32_2 = load_reg32(r1 + 1);
4427 tmp = tcg_temp_new_i64();
4428 tmp2 = tcg_temp_new_i64();
4430 /* dividend is r(r1 << 32) | r(r1 + 1) */
4431 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4432 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4433 tcg_gen_shli_i64(tmp, tmp, 32);
4434 tcg_gen_or_i64(tmp, tmp, tmp2);
4436 /* divisor is in memory */
4437 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
4439 /* XXX divisor == 0 -> FixP divide exception */
4441 tcg_gen_div_i64(tmp3, tmp, tmp2);
4442 tcg_gen_rem_i64(tmp, tmp, tmp2);
4444 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4445 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4447 store_reg32(r1, tmp32_1); /* remainder */
4448 store_reg32(r1 + 1, tmp32_2); /* quotient */
4449 tcg_temp_free_i32(tmp32_1);
4450 tcg_temp_free_i32(tmp32_2);
4451 tcg_temp_free_i64(tmp);
4452 tcg_temp_free_i64(tmp2);
4453 tcg_temp_free_i64(tmp3);
4454 break;
4455 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4456 insn = ld_code4(env, s->pc);
4457 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4458 tmp2 = load_freg(r1);
4459 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4460 tcg_temp_free_i64(tmp);
4461 tcg_temp_free_i64(tmp2);
4462 break;
4463 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4464 insn = ld_code4(env, s->pc);
4465 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4466 tmp2 = tcg_temp_new_i64();
4467 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4468 store_freg(r1, tmp2);
4469 tcg_temp_free_i64(tmp);
4470 tcg_temp_free_i64(tmp2);
4471 break;
4472 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4473 insn = ld_code4(env, s->pc);
4474 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4475 tmp2 = tcg_temp_new_i64();
4476 tmp32_1 = load_freg32(r1);
4477 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4478 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4479 tcg_temp_free_i64(tmp);
4480 tcg_temp_free_i64(tmp2);
4481 tcg_temp_free_i32(tmp32_1);
4482 break;
4483 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4484 insn = ld_code4(env, s->pc);
4485 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4486 tmp2 = tcg_temp_new_i64();
4487 tmp32_1 = load_reg32(r1);
4488 tmp32_2 = tcg_temp_new_i32();
4489 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4490 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4491 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4492 store_reg32(r1, tmp32_1);
4493 tcg_temp_free_i64(tmp);
4494 tcg_temp_free_i64(tmp2);
4495 tcg_temp_free_i32(tmp32_1);
4496 tcg_temp_free_i32(tmp32_2);
4497 break;
4498 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4499 insn = ld_code4(env, s->pc);
4500 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4501 tmp2 = tcg_temp_new_i64();
4502 tmp32_1 = tcg_temp_new_i32();
4503 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4504 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4505 store_freg32(r1, tmp32_1);
4506 tcg_temp_free_i64(tmp);
4507 tcg_temp_free_i64(tmp2);
4508 tcg_temp_free_i32(tmp32_1);
4509 break;
4510 #ifndef CONFIG_USER_ONLY
4511 case 0x80: /* SSM D2(B2) [S] */
4512 /* Set System Mask */
4513 check_privileged(env, s, ilc);
4514 insn = ld_code4(env, s->pc);
4515 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4516 tmp = get_address(s, 0, b2, d2);
4517 tmp2 = tcg_temp_new_i64();
4518 tmp3 = tcg_temp_new_i64();
4519 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4520 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4521 tcg_gen_shli_i64(tmp2, tmp2, 56);
4522 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4523 tcg_temp_free_i64(tmp);
4524 tcg_temp_free_i64(tmp2);
4525 tcg_temp_free_i64(tmp3);
4526 break;
4527 case 0x82: /* LPSW D2(B2) [S] */
4528 /* Load PSW */
4529 check_privileged(env, s, ilc);
4530 insn = ld_code4(env, s->pc);
4531 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4532 tmp = get_address(s, 0, b2, d2);
4533 tmp2 = tcg_temp_new_i64();
4534 tmp3 = tcg_temp_new_i64();
4535 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4536 tcg_gen_addi_i64(tmp, tmp, 4);
4537 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4538 gen_helper_load_psw(cpu_env, tmp2, tmp3);
4539 tcg_temp_free_i64(tmp);
4540 tcg_temp_free_i64(tmp2);
4541 tcg_temp_free_i64(tmp3);
4542 /* we need to keep cc_op intact */
4543 s->is_jmp = DISAS_JUMP;
4544 break;
4545 case 0x83: /* DIAG R1,R3,D2 [RS] */
4546 /* Diagnose call (KVM hypercall) */
4547 check_privileged(env, s, ilc);
4548 potential_page_fault(s);
4549 insn = ld_code4(env, s->pc);
4550 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4551 tmp32_1 = tcg_const_i32(insn & 0xfff);
4552 tmp2 = load_reg(2);
4553 tmp3 = load_reg(1);
4554 gen_helper_diag(tmp2, cpu_env, tmp32_1, tmp2, tmp3);
4555 store_reg(2, tmp2);
4556 tcg_temp_free_i32(tmp32_1);
4557 tcg_temp_free_i64(tmp2);
4558 tcg_temp_free_i64(tmp3);
4559 break;
4560 #endif
4561 case 0x88: /* SRL R1,D2(B2) [RS] */
4562 case 0x89: /* SLL R1,D2(B2) [RS] */
4563 case 0x8a: /* SRA R1,D2(B2) [RS] */
4564 insn = ld_code4(env, s->pc);
4565 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4566 tmp = get_address(s, 0, b2, d2);
4567 tmp32_1 = load_reg32(r1);
4568 tmp32_2 = tcg_temp_new_i32();
4569 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4570 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4571 switch (opc) {
4572 case 0x88:
4573 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4574 break;
4575 case 0x89:
4576 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4577 break;
4578 case 0x8a:
4579 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4580 set_cc_s32(s, tmp32_1);
4581 break;
4582 default:
4583 tcg_abort();
4585 store_reg32(r1, tmp32_1);
4586 tcg_temp_free_i64(tmp);
4587 tcg_temp_free_i32(tmp32_1);
4588 tcg_temp_free_i32(tmp32_2);
4589 break;
4590 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4591 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4592 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4593 insn = ld_code4(env, s->pc);
4594 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4595 tmp = get_address(s, 0, b2, d2); /* shift */
4596 tmp2 = tcg_temp_new_i64();
4597 tmp32_1 = load_reg32(r1);
4598 tmp32_2 = load_reg32(r1 + 1);
4599 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4600 switch (opc) {
4601 case 0x8c:
4602 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4603 break;
4604 case 0x8d:
4605 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4606 break;
4607 case 0x8e:
4608 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4609 set_cc_s64(s, tmp2);
4610 break;
4612 tcg_gen_shri_i64(tmp, tmp2, 32);
4613 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4614 store_reg32(r1, tmp32_1);
4615 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4616 store_reg32(r1 + 1, tmp32_2);
4617 tcg_temp_free_i64(tmp);
4618 tcg_temp_free_i64(tmp2);
4619 break;
4620 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4621 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4622 insn = ld_code4(env, s->pc);
4623 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4625 tmp = get_address(s, 0, b2, d2);
4626 tmp2 = tcg_temp_new_i64();
4627 tmp3 = tcg_const_i64(4);
4628 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4629 for (i = r1;; i = (i + 1) % 16) {
4630 if (opc == 0x98) {
4631 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4632 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4633 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4634 } else {
4635 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4637 if (i == r3) {
4638 break;
4640 tcg_gen_add_i64(tmp, tmp, tmp3);
4642 tcg_temp_free_i64(tmp);
4643 tcg_temp_free_i64(tmp2);
4644 tcg_temp_free_i64(tmp3);
4645 tcg_temp_free_i64(tmp4);
4646 break;
4647 case 0x91: /* TM D1(B1),I2 [SI] */
4648 insn = ld_code4(env, s->pc);
4649 tmp = decode_si(s, insn, &i2, &b1, &d1);
4650 tmp2 = tcg_const_i64(i2);
4651 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4652 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4653 tcg_temp_free_i64(tmp);
4654 tcg_temp_free_i64(tmp2);
4655 break;
4656 case 0x92: /* MVI D1(B1),I2 [SI] */
4657 insn = ld_code4(env, s->pc);
4658 tmp = decode_si(s, insn, &i2, &b1, &d1);
4659 tmp2 = tcg_const_i64(i2);
4660 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4661 tcg_temp_free_i64(tmp);
4662 tcg_temp_free_i64(tmp2);
4663 break;
4664 case 0x94: /* NI D1(B1),I2 [SI] */
4665 case 0x96: /* OI D1(B1),I2 [SI] */
4666 case 0x97: /* XI D1(B1),I2 [SI] */
4667 insn = ld_code4(env, s->pc);
4668 tmp = decode_si(s, insn, &i2, &b1, &d1);
4669 tmp2 = tcg_temp_new_i64();
4670 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4671 switch (opc) {
4672 case 0x94:
4673 tcg_gen_andi_i64(tmp2, tmp2, i2);
4674 break;
4675 case 0x96:
4676 tcg_gen_ori_i64(tmp2, tmp2, i2);
4677 break;
4678 case 0x97:
4679 tcg_gen_xori_i64(tmp2, tmp2, i2);
4680 break;
4681 default:
4682 tcg_abort();
4684 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4685 set_cc_nz_u64(s, tmp2);
4686 tcg_temp_free_i64(tmp);
4687 tcg_temp_free_i64(tmp2);
4688 break;
4689 case 0x95: /* CLI D1(B1),I2 [SI] */
4690 insn = ld_code4(env, s->pc);
4691 tmp = decode_si(s, insn, &i2, &b1, &d1);
4692 tmp2 = tcg_temp_new_i64();
4693 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4694 cmp_u64c(s, tmp2, i2);
4695 tcg_temp_free_i64(tmp);
4696 tcg_temp_free_i64(tmp2);
4697 break;
4698 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4699 insn = ld_code4(env, s->pc);
4700 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4701 tmp = get_address(s, 0, b2, d2);
4702 tmp32_1 = tcg_const_i32(r1);
4703 tmp32_2 = tcg_const_i32(r3);
4704 potential_page_fault(s);
4705 gen_helper_lam(cpu_env, tmp32_1, tmp, tmp32_2);
4706 tcg_temp_free_i64(tmp);
4707 tcg_temp_free_i32(tmp32_1);
4708 tcg_temp_free_i32(tmp32_2);
4709 break;
4710 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4711 insn = ld_code4(env, s->pc);
4712 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4713 tmp = get_address(s, 0, b2, d2);
4714 tmp32_1 = tcg_const_i32(r1);
4715 tmp32_2 = tcg_const_i32(r3);
4716 potential_page_fault(s);
4717 gen_helper_stam(cpu_env, tmp32_1, tmp, tmp32_2);
4718 tcg_temp_free_i64(tmp);
4719 tcg_temp_free_i32(tmp32_1);
4720 tcg_temp_free_i32(tmp32_2);
4721 break;
4722 case 0xa5:
4723 insn = ld_code4(env, s->pc);
4724 r1 = (insn >> 20) & 0xf;
4725 op = (insn >> 16) & 0xf;
4726 i2 = insn & 0xffff;
4727 disas_a5(env, s, op, r1, i2);
4728 break;
4729 case 0xa7:
4730 insn = ld_code4(env, s->pc);
4731 r1 = (insn >> 20) & 0xf;
4732 op = (insn >> 16) & 0xf;
4733 i2 = (short)insn;
4734 disas_a7(env, s, op, r1, i2);
4735 break;
4736 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4737 insn = ld_code4(env, s->pc);
4738 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4739 tmp = get_address(s, 0, b2, d2);
4740 tmp32_1 = tcg_const_i32(r1);
4741 tmp32_2 = tcg_const_i32(r3);
4742 potential_page_fault(s);
4743 gen_helper_mvcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4744 set_cc_static(s);
4745 tcg_temp_free_i64(tmp);
4746 tcg_temp_free_i32(tmp32_1);
4747 tcg_temp_free_i32(tmp32_2);
4748 break;
4749 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4750 insn = ld_code4(env, s->pc);
4751 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4752 tmp = get_address(s, 0, b2, d2);
4753 tmp32_1 = tcg_const_i32(r1);
4754 tmp32_2 = tcg_const_i32(r3);
4755 potential_page_fault(s);
4756 gen_helper_clcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4757 set_cc_static(s);
4758 tcg_temp_free_i64(tmp);
4759 tcg_temp_free_i32(tmp32_1);
4760 tcg_temp_free_i32(tmp32_2);
4761 break;
4762 #ifndef CONFIG_USER_ONLY
4763 case 0xac: /* STNSM D1(B1),I2 [SI] */
4764 case 0xad: /* STOSM D1(B1),I2 [SI] */
4765 check_privileged(env, s, ilc);
4766 insn = ld_code4(env, s->pc);
4767 tmp = decode_si(s, insn, &i2, &b1, &d1);
4768 tmp2 = tcg_temp_new_i64();
4769 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4770 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4771 if (opc == 0xac) {
4772 tcg_gen_andi_i64(psw_mask, psw_mask,
4773 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4774 } else {
4775 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4777 tcg_temp_free_i64(tmp);
4778 tcg_temp_free_i64(tmp2);
4779 break;
4780 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4781 check_privileged(env, s, ilc);
4782 insn = ld_code4(env, s->pc);
4783 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4784 tmp = get_address(s, 0, b2, d2);
4785 tmp2 = load_reg(r3);
4786 tmp32_1 = tcg_const_i32(r1);
4787 potential_page_fault(s);
4788 gen_helper_sigp(cc_op, cpu_env, tmp, tmp32_1, tmp2);
4789 set_cc_static(s);
4790 tcg_temp_free_i64(tmp);
4791 tcg_temp_free_i64(tmp2);
4792 tcg_temp_free_i32(tmp32_1);
4793 break;
4794 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4795 check_privileged(env, s, ilc);
4796 insn = ld_code4(env, s->pc);
4797 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4798 tmp32_1 = tcg_const_i32(r1);
4799 potential_page_fault(s);
4800 gen_helper_lra(cc_op, cpu_env, tmp, tmp32_1);
4801 set_cc_static(s);
4802 tcg_temp_free_i64(tmp);
4803 tcg_temp_free_i32(tmp32_1);
4804 break;
4805 #endif
4806 case 0xb2:
4807 insn = ld_code4(env, s->pc);
4808 op = (insn >> 16) & 0xff;
4809 switch (op) {
4810 case 0x9c: /* STFPC D2(B2) [S] */
4811 d2 = insn & 0xfff;
4812 b2 = (insn >> 12) & 0xf;
4813 tmp32_1 = tcg_temp_new_i32();
4814 tmp = tcg_temp_new_i64();
4815 tmp2 = get_address(s, 0, b2, d2);
4816 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
4817 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4818 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4819 tcg_temp_free_i32(tmp32_1);
4820 tcg_temp_free_i64(tmp);
4821 tcg_temp_free_i64(tmp2);
4822 break;
4823 default:
4824 disas_b2(env, s, op, insn);
4825 break;
4827 break;
4828 case 0xb3:
4829 insn = ld_code4(env, s->pc);
4830 op = (insn >> 16) & 0xff;
4831 r3 = (insn >> 12) & 0xf; /* aka m3 */
4832 r1 = (insn >> 4) & 0xf;
4833 r2 = insn & 0xf;
4834 disas_b3(env, s, op, r3, r1, r2);
4835 break;
4836 #ifndef CONFIG_USER_ONLY
4837 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4838 /* Store Control */
4839 check_privileged(env, s, ilc);
4840 insn = ld_code4(env, s->pc);
4841 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4842 tmp = get_address(s, 0, b2, d2);
4843 tmp32_1 = tcg_const_i32(r1);
4844 tmp32_2 = tcg_const_i32(r3);
4845 potential_page_fault(s);
4846 gen_helper_stctl(cpu_env, tmp32_1, tmp, tmp32_2);
4847 tcg_temp_free_i64(tmp);
4848 tcg_temp_free_i32(tmp32_1);
4849 tcg_temp_free_i32(tmp32_2);
4850 break;
4851 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4852 /* Load Control */
4853 check_privileged(env, s, ilc);
4854 insn = ld_code4(env, s->pc);
4855 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4856 tmp = get_address(s, 0, b2, d2);
4857 tmp32_1 = tcg_const_i32(r1);
4858 tmp32_2 = tcg_const_i32(r3);
4859 potential_page_fault(s);
4860 gen_helper_lctl(cpu_env, tmp32_1, tmp, tmp32_2);
4861 tcg_temp_free_i64(tmp);
4862 tcg_temp_free_i32(tmp32_1);
4863 tcg_temp_free_i32(tmp32_2);
4864 break;
4865 #endif
4866 case 0xb9:
4867 insn = ld_code4(env, s->pc);
4868 r1 = (insn >> 4) & 0xf;
4869 r2 = insn & 0xf;
4870 op = (insn >> 16) & 0xff;
4871 disas_b9(env, s, op, r1, r2);
4872 break;
4873 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4874 insn = ld_code4(env, s->pc);
4875 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4876 tmp = get_address(s, 0, b2, d2);
4877 tmp32_1 = tcg_const_i32(r1);
4878 tmp32_2 = tcg_const_i32(r3);
4879 potential_page_fault(s);
4880 gen_helper_cs(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4881 set_cc_static(s);
4882 tcg_temp_free_i64(tmp);
4883 tcg_temp_free_i32(tmp32_1);
4884 tcg_temp_free_i32(tmp32_2);
4885 break;
4886 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4887 insn = ld_code4(env, s->pc);
4888 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4889 tmp = get_address(s, 0, b2, d2);
4890 tmp32_1 = load_reg32(r1);
4891 tmp32_2 = tcg_const_i32(r3);
4892 potential_page_fault(s);
4893 gen_helper_clm(cc_op, cpu_env, tmp32_1, tmp32_2, tmp);
4894 set_cc_static(s);
4895 tcg_temp_free_i64(tmp);
4896 tcg_temp_free_i32(tmp32_1);
4897 tcg_temp_free_i32(tmp32_2);
4898 break;
4899 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4900 insn = ld_code4(env, s->pc);
4901 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4902 tmp = get_address(s, 0, b2, d2);
4903 tmp32_1 = load_reg32(r1);
4904 tmp32_2 = tcg_const_i32(r3);
4905 potential_page_fault(s);
4906 gen_helper_stcm(cpu_env, tmp32_1, tmp32_2, tmp);
4907 tcg_temp_free_i64(tmp);
4908 tcg_temp_free_i32(tmp32_1);
4909 tcg_temp_free_i32(tmp32_2);
4910 break;
4911 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4912 insn = ld_code4(env, s->pc);
4913 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4914 if (r3 == 15) {
4915 /* effectively a 32-bit load */
4916 tmp = get_address(s, 0, b2, d2);
4917 tmp32_1 = tcg_temp_new_i32();
4918 tmp32_2 = tcg_const_i32(r3);
4919 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4920 store_reg32_i64(r1, tmp);
4921 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4922 set_cc_icm(s, tmp32_2, tmp32_1);
4923 tcg_temp_free_i64(tmp);
4924 tcg_temp_free_i32(tmp32_1);
4925 tcg_temp_free_i32(tmp32_2);
4926 } else if (r3) {
4927 uint32_t mask = 0x00ffffffUL;
4928 uint32_t shift = 24;
4929 int m3 = r3;
4930 tmp = get_address(s, 0, b2, d2);
4931 tmp2 = tcg_temp_new_i64();
4932 tmp32_1 = load_reg32(r1);
4933 tmp32_2 = tcg_temp_new_i32();
4934 tmp32_3 = tcg_const_i32(r3);
4935 tmp32_4 = tcg_const_i32(0);
4936 while (m3) {
4937 if (m3 & 8) {
4938 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4939 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4940 if (shift) {
4941 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4943 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4944 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4945 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4946 tcg_gen_addi_i64(tmp, tmp, 1);
4948 m3 = (m3 << 1) & 0xf;
4949 mask = (mask >> 8) | 0xff000000UL;
4950 shift -= 8;
4952 store_reg32(r1, tmp32_1);
4953 set_cc_icm(s, tmp32_3, tmp32_4);
4954 tcg_temp_free_i64(tmp);
4955 tcg_temp_free_i64(tmp2);
4956 tcg_temp_free_i32(tmp32_1);
4957 tcg_temp_free_i32(tmp32_2);
4958 tcg_temp_free_i32(tmp32_3);
4959 tcg_temp_free_i32(tmp32_4);
4960 } else {
4961 /* i.e. env->cc = 0 */
4962 gen_op_movi_cc(s, 0);
4964 break;
4965 case 0xc0:
4966 case 0xc2:
4967 insn = ld_code6(env, s->pc);
4968 r1 = (insn >> 36) & 0xf;
4969 op = (insn >> 32) & 0xf;
4970 i2 = (int)insn;
4971 switch (opc) {
4972 case 0xc0:
4973 disas_c0(env, s, op, r1, i2);
4974 break;
4975 case 0xc2:
4976 disas_c2(env, s, op, r1, i2);
4977 break;
4978 default:
4979 tcg_abort();
4981 break;
4982 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4983 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4984 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4985 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4986 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4987 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4988 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4989 insn = ld_code6(env, s->pc);
4990 vl = tcg_const_i32((insn >> 32) & 0xff);
4991 b1 = (insn >> 28) & 0xf;
4992 b2 = (insn >> 12) & 0xf;
4993 d1 = (insn >> 16) & 0xfff;
4994 d2 = insn & 0xfff;
4995 tmp = get_address(s, 0, b1, d1);
4996 tmp2 = get_address(s, 0, b2, d2);
4997 switch (opc) {
4998 case 0xd2:
4999 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
5000 break;
5001 case 0xd4:
5002 potential_page_fault(s);
5003 gen_helper_nc(cc_op, cpu_env, vl, tmp, tmp2);
5004 set_cc_static(s);
5005 break;
5006 case 0xd5:
5007 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
5008 break;
5009 case 0xd6:
5010 potential_page_fault(s);
5011 gen_helper_oc(cc_op, cpu_env, vl, tmp, tmp2);
5012 set_cc_static(s);
5013 break;
5014 case 0xd7:
5015 potential_page_fault(s);
5016 gen_helper_xc(cc_op, cpu_env, vl, tmp, tmp2);
5017 set_cc_static(s);
5018 break;
5019 case 0xdc:
5020 potential_page_fault(s);
5021 gen_helper_tr(cpu_env, vl, tmp, tmp2);
5022 set_cc_static(s);
5023 break;
5024 case 0xf3:
5025 potential_page_fault(s);
5026 gen_helper_unpk(cpu_env, vl, tmp, tmp2);
5027 break;
5028 default:
5029 tcg_abort();
5031 tcg_temp_free_i64(tmp);
5032 tcg_temp_free_i64(tmp2);
5033 break;
5034 #ifndef CONFIG_USER_ONLY
5035 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5036 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5037 check_privileged(env, s, ilc);
5038 potential_page_fault(s);
5039 insn = ld_code6(env, s->pc);
5040 r1 = (insn >> 36) & 0xf;
5041 r3 = (insn >> 32) & 0xf;
5042 b1 = (insn >> 28) & 0xf;
5043 d1 = (insn >> 16) & 0xfff;
5044 b2 = (insn >> 12) & 0xf;
5045 d2 = insn & 0xfff;
5046 tmp = load_reg(r1);
5047 /* XXX key in r3 */
5048 tmp2 = get_address(s, 0, b1, d1);
5049 tmp3 = get_address(s, 0, b2, d2);
5050 if (opc == 0xda) {
5051 gen_helper_mvcp(cc_op, cpu_env, tmp, tmp2, tmp3);
5052 } else {
5053 gen_helper_mvcs(cc_op, cpu_env, tmp, tmp2, tmp3);
5055 set_cc_static(s);
5056 tcg_temp_free_i64(tmp);
5057 tcg_temp_free_i64(tmp2);
5058 tcg_temp_free_i64(tmp3);
5059 break;
5060 #endif
5061 case 0xe3:
5062 insn = ld_code6(env, s->pc);
5063 debug_insn(insn);
5064 op = insn & 0xff;
5065 r1 = (insn >> 36) & 0xf;
5066 x2 = (insn >> 32) & 0xf;
5067 b2 = (insn >> 28) & 0xf;
5068 d2 = ((int)((((insn >> 16) & 0xfff)
5069 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5070 disas_e3(env, s, op, r1, x2, b2, d2 );
5071 break;
5072 #ifndef CONFIG_USER_ONLY
5073 case 0xe5:
5074 /* Test Protection */
5075 check_privileged(env, s, ilc);
5076 insn = ld_code6(env, s->pc);
5077 debug_insn(insn);
5078 disas_e5(env, s, insn);
5079 break;
5080 #endif
5081 case 0xeb:
5082 insn = ld_code6(env, s->pc);
5083 debug_insn(insn);
5084 op = insn & 0xff;
5085 r1 = (insn >> 36) & 0xf;
5086 r3 = (insn >> 32) & 0xf;
5087 b2 = (insn >> 28) & 0xf;
5088 d2 = ((int)((((insn >> 16) & 0xfff)
5089 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5090 disas_eb(env, s, op, r1, r3, b2, d2);
5091 break;
5092 case 0xed:
5093 insn = ld_code6(env, s->pc);
5094 debug_insn(insn);
5095 op = insn & 0xff;
5096 r1 = (insn >> 36) & 0xf;
5097 x2 = (insn >> 32) & 0xf;
5098 b2 = (insn >> 28) & 0xf;
5099 d2 = (short)((insn >> 16) & 0xfff);
5100 r1b = (insn >> 12) & 0xf;
5101 disas_ed(env, s, op, r1, x2, b2, d2, r1b);
5102 break;
5103 default:
5104 qemu_log_mask(LOG_UNIMP, "unimplemented opcode 0x%x\n", opc);
5105 gen_illegal_opcode(env, s, ilc);
5106 break;
5109 /* Instruction length is encoded in the opcode */
5110 s->pc += (ilc * 2);
5113 static inline void gen_intermediate_code_internal(CPUS390XState *env,
5114 TranslationBlock *tb,
5115 int search_pc)
5117 DisasContext dc;
5118 target_ulong pc_start;
5119 uint64_t next_page_start;
5120 uint16_t *gen_opc_end;
5121 int j, lj = -1;
5122 int num_insns, max_insns;
5123 CPUBreakpoint *bp;
5125 pc_start = tb->pc;
5127 /* 31-bit mode */
5128 if (!(tb->flags & FLAG_MASK_64)) {
5129 pc_start &= 0x7fffffff;
5132 dc.pc = pc_start;
5133 dc.is_jmp = DISAS_NEXT;
5134 dc.tb = tb;
5135 dc.cc_op = CC_OP_DYNAMIC;
5137 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5139 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5141 num_insns = 0;
5142 max_insns = tb->cflags & CF_COUNT_MASK;
5143 if (max_insns == 0) {
5144 max_insns = CF_COUNT_MASK;
5147 gen_icount_start();
5149 do {
5150 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5151 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5152 if (bp->pc == dc.pc) {
5153 gen_debug(&dc);
5154 break;
5158 if (search_pc) {
5159 j = gen_opc_ptr - gen_opc_buf;
5160 if (lj < j) {
5161 lj++;
5162 while (lj < j) {
5163 gen_opc_instr_start[lj++] = 0;
5166 gen_opc_pc[lj] = dc.pc;
5167 gen_opc_cc_op[lj] = dc.cc_op;
5168 gen_opc_instr_start[lj] = 1;
5169 gen_opc_icount[lj] = num_insns;
5171 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
5172 gen_io_start();
5175 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
5176 tcg_gen_debug_insn_start(dc.pc);
5179 disas_s390_insn(env, &dc);
5181 num_insns++;
5182 if (env->singlestep_enabled) {
5183 gen_debug(&dc);
5185 } while (!dc.is_jmp && gen_opc_ptr < gen_opc_end && dc.pc < next_page_start
5186 && num_insns < max_insns && !env->singlestep_enabled
5187 && !singlestep);
5189 if (!dc.is_jmp) {
5190 update_psw_addr(&dc);
5193 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5194 gen_op_calc_cc(&dc);
5195 } else {
5196 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5197 is in env */
5198 gen_op_set_cc_op(&dc);
5201 if (tb->cflags & CF_LAST_IO) {
5202 gen_io_end();
5204 /* Generate the return instruction */
5205 if (dc.is_jmp != DISAS_TB_JUMP) {
5206 tcg_gen_exit_tb(0);
5208 gen_icount_end(tb, num_insns);
5209 *gen_opc_ptr = INDEX_op_end;
5210 if (search_pc) {
5211 j = gen_opc_ptr - gen_opc_buf;
5212 lj++;
5213 while (lj <= j) {
5214 gen_opc_instr_start[lj++] = 0;
5216 } else {
5217 tb->size = dc.pc - pc_start;
5218 tb->icount = num_insns;
5220 #if defined(S390X_DEBUG_DISAS)
5221 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5222 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5223 log_target_disas(pc_start, dc.pc - pc_start, 1);
5224 qemu_log("\n");
5226 #endif
5229 void gen_intermediate_code (CPUS390XState *env, struct TranslationBlock *tb)
5231 gen_intermediate_code_internal(env, tb, 0);
5234 void gen_intermediate_code_pc (CPUS390XState *env, struct TranslationBlock *tb)
5236 gen_intermediate_code_internal(env, tb, 1);
5239 void restore_state_to_opc(CPUS390XState *env, TranslationBlock *tb, int pc_pos)
5241 int cc_op;
5242 env->psw.addr = gen_opc_pc[pc_pos];
5243 cc_op = gen_opc_cc_op[pc_pos];
5244 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5245 env->cc_op = cc_op;