target-s390x: Fix duplicate call of tcg_temp_new_i64
[qemu-kvm.git] / target-s390x / translate.c
blob141a72f0e8a98dd78a2e78bca46499457402bb1c
1 /*
2 * S/390 translation
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
26 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
27 /* #define DEBUG_INLINE_BRANCHES */
28 #define S390X_DEBUG_DISAS
29 /* #define S390X_DEBUG_DISAS_VERBOSE */
31 #ifdef S390X_DEBUG_DISAS_VERBOSE
32 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
33 #else
34 # define LOG_DISAS(...) do { } while (0)
35 #endif
37 #include "cpu.h"
38 #include "exec-all.h"
39 #include "disas.h"
40 #include "tcg-op.h"
41 #include "qemu-log.h"
43 /* global register indexes */
44 static TCGv_ptr cpu_env;
46 #include "gen-icount.h"
47 #include "helpers.h"
48 #define GEN_HELPER 1
49 #include "helpers.h"
51 typedef struct DisasContext DisasContext;
52 struct DisasContext {
53 uint64_t pc;
54 int is_jmp;
55 enum cc_op cc_op;
56 struct TranslationBlock *tb;
59 #define DISAS_EXCP 4
61 static void gen_op_calc_cc(DisasContext *s);
63 #ifdef DEBUG_INLINE_BRANCHES
64 static uint64_t inline_branch_hit[CC_OP_MAX];
65 static uint64_t inline_branch_miss[CC_OP_MAX];
66 #endif
68 static inline void debug_insn(uint64_t insn)
70 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
73 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
75 if (!(s->tb->flags & FLAG_MASK_64)) {
76 if (s->tb->flags & FLAG_MASK_32) {
77 return pc | 0x80000000;
80 return pc;
83 void cpu_dump_state(CPUState *env, FILE *f, fprintf_function cpu_fprintf,
84 int flags)
86 int i;
88 for (i = 0; i < 16; i++) {
89 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
90 if ((i % 4) == 3) {
91 cpu_fprintf(f, "\n");
92 } else {
93 cpu_fprintf(f, " ");
97 for (i = 0; i < 16; i++) {
98 cpu_fprintf(f, "F%02d=%016" PRIx64, i, *(uint64_t *)&env->fregs[i]);
99 if ((i % 4) == 3) {
100 cpu_fprintf(f, "\n");
101 } else {
102 cpu_fprintf(f, " ");
106 cpu_fprintf(f, "\n");
108 #ifndef CONFIG_USER_ONLY
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
117 #endif
119 cpu_fprintf(f, "\n");
121 if (env->cc_op > 3) {
122 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
123 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
124 } else {
125 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
126 env->psw.mask, env->psw.addr, env->cc_op);
129 #ifdef DEBUG_INLINE_BRANCHES
130 for (i = 0; i < CC_OP_MAX; i++) {
131 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
132 inline_branch_miss[i], inline_branch_hit[i]);
134 #endif
137 static TCGv_i64 psw_addr;
138 static TCGv_i64 psw_mask;
140 static TCGv_i32 cc_op;
141 static TCGv_i64 cc_src;
142 static TCGv_i64 cc_dst;
143 static TCGv_i64 cc_vr;
145 static char cpu_reg_names[10*3 + 6*4];
146 static TCGv_i64 regs[16];
148 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
150 void s390x_translate_init(void)
152 int i;
153 size_t cpu_reg_names_size = sizeof(cpu_reg_names);
154 char *p;
156 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
157 psw_addr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.addr),
158 "psw_addr");
159 psw_mask = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.mask),
160 "psw_mask");
162 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
163 "cc_op");
164 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_src),
165 "cc_src");
166 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_dst),
167 "cc_dst");
168 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_vr),
169 "cc_vr");
171 p = cpu_reg_names;
172 for (i = 0; i < 16; i++) {
173 snprintf(p, cpu_reg_names_size, "r%d", i);
174 regs[i] = tcg_global_mem_new(TCG_AREG0,
175 offsetof(CPUState, regs[i]), p);
176 p += (i < 10) ? 3 : 4;
177 cpu_reg_names_size -= (i < 10) ? 3 : 4;
181 static inline TCGv_i64 load_reg(int reg)
183 TCGv_i64 r = tcg_temp_new_i64();
184 tcg_gen_mov_i64(r, regs[reg]);
185 return r;
188 static inline TCGv_i64 load_freg(int reg)
190 TCGv_i64 r = tcg_temp_new_i64();
191 tcg_gen_ld_i64(r, cpu_env, offsetof(CPUState, fregs[reg].d));
192 return r;
195 static inline TCGv_i32 load_freg32(int reg)
197 TCGv_i32 r = tcg_temp_new_i32();
198 tcg_gen_ld_i32(r, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
199 return r;
202 static inline TCGv_i32 load_reg32(int reg)
204 TCGv_i32 r = tcg_temp_new_i32();
205 tcg_gen_trunc_i64_i32(r, regs[reg]);
206 return r;
209 static inline TCGv_i64 load_reg32_i64(int reg)
211 TCGv_i64 r = tcg_temp_new_i64();
212 tcg_gen_ext32s_i64(r, regs[reg]);
213 return r;
216 static inline void store_reg(int reg, TCGv_i64 v)
218 tcg_gen_mov_i64(regs[reg], v);
221 static inline void store_freg(int reg, TCGv_i64 v)
223 tcg_gen_st_i64(v, cpu_env, offsetof(CPUState, fregs[reg].d));
226 static inline void store_reg32(int reg, TCGv_i32 v)
228 #if HOST_LONG_BITS == 32
229 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
230 #else
231 TCGv_i64 tmp = tcg_temp_new_i64();
232 tcg_gen_extu_i32_i64(tmp, v);
233 /* 32 bit register writes keep the upper half */
234 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 32);
235 tcg_temp_free_i64(tmp);
236 #endif
239 static inline void store_reg32_i64(int reg, TCGv_i64 v)
241 /* 32 bit register writes keep the upper half */
242 #if HOST_LONG_BITS == 32
243 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), TCGV_LOW(v));
244 #else
245 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
246 #endif
249 static inline void store_reg16(int reg, TCGv_i32 v)
251 TCGv_i64 tmp = tcg_temp_new_i64();
252 tcg_gen_extu_i32_i64(tmp, v);
253 /* 16 bit register writes keep the upper bytes */
254 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 16);
255 tcg_temp_free_i64(tmp);
258 static inline void store_reg8(int reg, TCGv_i64 v)
260 /* 8 bit register writes keep the upper bytes */
261 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
264 static inline void store_freg32(int reg, TCGv_i32 v)
266 tcg_gen_st_i32(v, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
269 static inline void update_psw_addr(DisasContext *s)
271 /* psw.addr */
272 tcg_gen_movi_i64(psw_addr, s->pc);
275 static inline void potential_page_fault(DisasContext *s)
277 #ifndef CONFIG_USER_ONLY
278 update_psw_addr(s);
279 gen_op_calc_cc(s);
280 #endif
283 static inline uint64_t ld_code2(uint64_t pc)
285 return (uint64_t)lduw_code(pc);
288 static inline uint64_t ld_code4(uint64_t pc)
290 return (uint64_t)ldl_code(pc);
293 static inline uint64_t ld_code6(uint64_t pc)
295 uint64_t opc;
296 opc = (uint64_t)lduw_code(pc) << 32;
297 opc |= (uint64_t)(uint32_t)ldl_code(pc+2);
298 return opc;
301 static inline int get_mem_index(DisasContext *s)
303 switch (s->tb->flags & FLAG_MASK_ASC) {
304 case PSW_ASC_PRIMARY >> 32:
305 return 0;
306 case PSW_ASC_SECONDARY >> 32:
307 return 1;
308 case PSW_ASC_HOME >> 32:
309 return 2;
310 default:
311 tcg_abort();
312 break;
316 static inline void gen_debug(DisasContext *s)
318 TCGv_i32 tmp = tcg_const_i32(EXCP_DEBUG);
319 update_psw_addr(s);
320 gen_op_calc_cc(s);
321 gen_helper_exception(tmp);
322 tcg_temp_free_i32(tmp);
323 s->is_jmp = DISAS_EXCP;
326 #ifdef CONFIG_USER_ONLY
328 static void gen_illegal_opcode(DisasContext *s, int ilc)
330 TCGv_i32 tmp = tcg_const_i32(EXCP_SPEC);
331 update_psw_addr(s);
332 gen_op_calc_cc(s);
333 gen_helper_exception(tmp);
334 tcg_temp_free_i32(tmp);
335 s->is_jmp = DISAS_EXCP;
338 #else /* CONFIG_USER_ONLY */
340 static void debug_print_inst(DisasContext *s, int ilc)
342 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
343 uint64_t inst = 0;
345 switch (ilc & 3) {
346 case 1:
347 inst = ld_code2(s->pc);
348 break;
349 case 2:
350 inst = ld_code4(s->pc);
351 break;
352 case 3:
353 inst = ld_code6(s->pc);
354 break;
357 fprintf(stderr, "Illegal instruction [%d at %016" PRIx64 "]: 0x%016"
358 PRIx64 "\n", ilc, s->pc, inst);
359 #endif
362 static void gen_program_exception(DisasContext *s, int ilc, int code)
364 TCGv_i32 tmp;
366 debug_print_inst(s, ilc);
368 /* remember what pgm exeption this was */
369 tmp = tcg_const_i32(code);
370 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_code));
371 tcg_temp_free_i32(tmp);
373 tmp = tcg_const_i32(ilc);
374 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_ilc));
375 tcg_temp_free_i32(tmp);
377 /* advance past instruction */
378 s->pc += (ilc * 2);
379 update_psw_addr(s);
381 /* save off cc */
382 gen_op_calc_cc(s);
384 /* trigger exception */
385 tmp = tcg_const_i32(EXCP_PGM);
386 gen_helper_exception(tmp);
387 tcg_temp_free_i32(tmp);
389 /* end TB here */
390 s->is_jmp = DISAS_EXCP;
394 static void gen_illegal_opcode(DisasContext *s, int ilc)
396 gen_program_exception(s, ilc, PGM_SPECIFICATION);
399 static void gen_privileged_exception(DisasContext *s, int ilc)
401 gen_program_exception(s, ilc, PGM_PRIVILEGED);
404 static void check_privileged(DisasContext *s, int ilc)
406 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
407 gen_privileged_exception(s, ilc);
411 #endif /* CONFIG_USER_ONLY */
413 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
415 TCGv_i64 tmp;
417 /* 31-bitify the immediate part; register contents are dealt with below */
418 if (!(s->tb->flags & FLAG_MASK_64)) {
419 d2 &= 0x7fffffffUL;
422 if (x2) {
423 if (d2) {
424 tmp = tcg_const_i64(d2);
425 tcg_gen_add_i64(tmp, tmp, regs[x2]);
426 } else {
427 tmp = load_reg(x2);
429 if (b2) {
430 tcg_gen_add_i64(tmp, tmp, regs[b2]);
432 } else if (b2) {
433 if (d2) {
434 tmp = tcg_const_i64(d2);
435 tcg_gen_add_i64(tmp, tmp, regs[b2]);
436 } else {
437 tmp = load_reg(b2);
439 } else {
440 tmp = tcg_const_i64(d2);
443 /* 31-bit mode mask if there are values loaded from registers */
444 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
445 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
448 return tmp;
451 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
453 s->cc_op = CC_OP_CONST0 + val;
456 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
458 tcg_gen_discard_i64(cc_src);
459 tcg_gen_mov_i64(cc_dst, dst);
460 tcg_gen_discard_i64(cc_vr);
461 s->cc_op = op;
464 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
466 tcg_gen_discard_i64(cc_src);
467 tcg_gen_extu_i32_i64(cc_dst, dst);
468 tcg_gen_discard_i64(cc_vr);
469 s->cc_op = op;
472 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
473 TCGv_i64 dst)
475 tcg_gen_mov_i64(cc_src, src);
476 tcg_gen_mov_i64(cc_dst, dst);
477 tcg_gen_discard_i64(cc_vr);
478 s->cc_op = op;
481 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
482 TCGv_i32 dst)
484 tcg_gen_extu_i32_i64(cc_src, src);
485 tcg_gen_extu_i32_i64(cc_dst, dst);
486 tcg_gen_discard_i64(cc_vr);
487 s->cc_op = op;
490 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
491 TCGv_i64 dst, TCGv_i64 vr)
493 tcg_gen_mov_i64(cc_src, src);
494 tcg_gen_mov_i64(cc_dst, dst);
495 tcg_gen_mov_i64(cc_vr, vr);
496 s->cc_op = op;
499 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
500 TCGv_i32 dst, TCGv_i32 vr)
502 tcg_gen_extu_i32_i64(cc_src, src);
503 tcg_gen_extu_i32_i64(cc_dst, dst);
504 tcg_gen_extu_i32_i64(cc_vr, vr);
505 s->cc_op = op;
508 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
510 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
513 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
515 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
518 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
519 enum cc_op cond)
521 gen_op_update2_cc_i32(s, cond, v1, v2);
524 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
525 enum cc_op cond)
527 gen_op_update2_cc_i64(s, cond, v1, v2);
530 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
532 cmp_32(s, v1, v2, CC_OP_LTGT_32);
535 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
537 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
540 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
542 /* XXX optimize for the constant? put it in s? */
543 TCGv_i32 tmp = tcg_const_i32(v2);
544 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
545 tcg_temp_free_i32(tmp);
548 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
550 TCGv_i32 tmp = tcg_const_i32(v2);
551 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
552 tcg_temp_free_i32(tmp);
555 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
557 cmp_64(s, v1, v2, CC_OP_LTGT_64);
560 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
562 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
565 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
567 TCGv_i64 tmp = tcg_const_i64(v2);
568 cmp_s64(s, v1, tmp);
569 tcg_temp_free_i64(tmp);
572 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
574 TCGv_i64 tmp = tcg_const_i64(v2);
575 cmp_u64(s, v1, tmp);
576 tcg_temp_free_i64(tmp);
579 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
581 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
584 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
586 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
589 static void set_cc_add64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
591 gen_op_update3_cc_i64(s, CC_OP_ADD_64, v1, v2, vr);
594 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
595 TCGv_i64 vr)
597 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
600 static void set_cc_sub64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
602 gen_op_update3_cc_i64(s, CC_OP_SUB_64, v1, v2, vr);
605 static void set_cc_subu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
606 TCGv_i64 vr)
608 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, v1, v2, vr);
611 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
613 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
616 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
618 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
621 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
623 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
626 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
627 TCGv_i32 vr)
629 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
632 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
634 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
637 static void set_cc_subu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
638 TCGv_i32 vr)
640 gen_op_update3_cc_i32(s, CC_OP_SUBU_32, v1, v2, vr);
643 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
645 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
648 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
650 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
653 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
655 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
658 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
660 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
663 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
665 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
668 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
670 tcg_gen_extu_i32_i64(cc_src, v1);
671 tcg_gen_mov_i64(cc_dst, v2);
672 tcg_gen_discard_i64(cc_vr);
673 s->cc_op = CC_OP_LTGT_F32;
676 static void set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
678 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
681 static inline void set_cc_nz_f64(DisasContext *s, TCGv_i64 v1)
683 gen_op_update1_cc_i64(s, CC_OP_NZ_F64, v1);
686 /* CC value is in env->cc_op */
687 static inline void set_cc_static(DisasContext *s)
689 tcg_gen_discard_i64(cc_src);
690 tcg_gen_discard_i64(cc_dst);
691 tcg_gen_discard_i64(cc_vr);
692 s->cc_op = CC_OP_STATIC;
695 static inline void gen_op_set_cc_op(DisasContext *s)
697 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
698 tcg_gen_movi_i32(cc_op, s->cc_op);
702 static inline void gen_update_cc_op(DisasContext *s)
704 gen_op_set_cc_op(s);
707 /* calculates cc into cc_op */
708 static void gen_op_calc_cc(DisasContext *s)
710 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
711 TCGv_i64 dummy = tcg_const_i64(0);
713 switch (s->cc_op) {
714 case CC_OP_CONST0:
715 case CC_OP_CONST1:
716 case CC_OP_CONST2:
717 case CC_OP_CONST3:
718 /* s->cc_op is the cc value */
719 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
720 break;
721 case CC_OP_STATIC:
722 /* env->cc_op already is the cc value */
723 break;
724 case CC_OP_NZ:
725 case CC_OP_ABS_64:
726 case CC_OP_NABS_64:
727 case CC_OP_ABS_32:
728 case CC_OP_NABS_32:
729 case CC_OP_LTGT0_32:
730 case CC_OP_LTGT0_64:
731 case CC_OP_COMP_32:
732 case CC_OP_COMP_64:
733 case CC_OP_NZ_F32:
734 case CC_OP_NZ_F64:
735 /* 1 argument */
736 gen_helper_calc_cc(cc_op, local_cc_op, dummy, cc_dst, dummy);
737 break;
738 case CC_OP_ICM:
739 case CC_OP_LTGT_32:
740 case CC_OP_LTGT_64:
741 case CC_OP_LTUGTU_32:
742 case CC_OP_LTUGTU_64:
743 case CC_OP_TM_32:
744 case CC_OP_TM_64:
745 case CC_OP_LTGT_F32:
746 case CC_OP_LTGT_F64:
747 case CC_OP_SLAG:
748 /* 2 arguments */
749 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, dummy);
750 break;
751 case CC_OP_ADD_64:
752 case CC_OP_ADDU_64:
753 case CC_OP_SUB_64:
754 case CC_OP_SUBU_64:
755 case CC_OP_ADD_32:
756 case CC_OP_ADDU_32:
757 case CC_OP_SUB_32:
758 case CC_OP_SUBU_32:
759 /* 3 arguments */
760 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, cc_vr);
761 break;
762 case CC_OP_DYNAMIC:
763 /* unknown operation - assume 3 arguments and cc_op in env */
764 gen_helper_calc_cc(cc_op, cc_op, cc_src, cc_dst, cc_vr);
765 break;
766 default:
767 tcg_abort();
770 tcg_temp_free_i32(local_cc_op);
772 /* We now have cc in cc_op as constant */
773 set_cc_static(s);
776 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
778 debug_insn(insn);
780 *r1 = (insn >> 4) & 0xf;
781 *r2 = insn & 0xf;
784 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
785 int *x2, int *b2, int *d2)
787 debug_insn(insn);
789 *r1 = (insn >> 20) & 0xf;
790 *x2 = (insn >> 16) & 0xf;
791 *b2 = (insn >> 12) & 0xf;
792 *d2 = insn & 0xfff;
794 return get_address(s, *x2, *b2, *d2);
797 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
798 int *b2, int *d2)
800 debug_insn(insn);
802 *r1 = (insn >> 20) & 0xf;
803 /* aka m3 */
804 *r3 = (insn >> 16) & 0xf;
805 *b2 = (insn >> 12) & 0xf;
806 *d2 = insn & 0xfff;
809 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
810 int *b1, int *d1)
812 debug_insn(insn);
814 *i2 = (insn >> 16) & 0xff;
815 *b1 = (insn >> 12) & 0xf;
816 *d1 = insn & 0xfff;
818 return get_address(s, 0, *b1, *d1);
821 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
823 TranslationBlock *tb;
825 gen_update_cc_op(s);
827 tb = s->tb;
828 /* NOTE: we handle the case where the TB spans two pages here */
829 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
830 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
831 /* jump to same page: we can use a direct jump */
832 tcg_gen_goto_tb(tb_num);
833 tcg_gen_movi_i64(psw_addr, pc);
834 tcg_gen_exit_tb((long)tb + tb_num);
835 } else {
836 /* jump to another page: currently not optimized */
837 tcg_gen_movi_i64(psw_addr, pc);
838 tcg_gen_exit_tb(0);
842 static inline void account_noninline_branch(DisasContext *s, int cc_op)
844 #ifdef DEBUG_INLINE_BRANCHES
845 inline_branch_miss[cc_op]++;
846 #endif
849 static inline void account_inline_branch(DisasContext *s)
851 #ifdef DEBUG_INLINE_BRANCHES
852 inline_branch_hit[s->cc_op]++;
853 #endif
856 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
858 TCGv_i32 tmp, tmp2, r;
859 TCGv_i64 tmp64;
860 int old_cc_op;
862 switch (s->cc_op) {
863 case CC_OP_LTGT0_32:
864 tmp = tcg_temp_new_i32();
865 tcg_gen_trunc_i64_i32(tmp, cc_dst);
866 switch (mask) {
867 case 0x8 | 0x4: /* dst <= 0 */
868 tcg_gen_brcondi_i32(TCG_COND_GT, tmp, 0, skip);
869 break;
870 case 0x8 | 0x2: /* dst >= 0 */
871 tcg_gen_brcondi_i32(TCG_COND_LT, tmp, 0, skip);
872 break;
873 case 0x8: /* dst == 0 */
874 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
875 break;
876 case 0x7: /* dst != 0 */
877 case 0x6: /* dst != 0 */
878 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
879 break;
880 case 0x4: /* dst < 0 */
881 tcg_gen_brcondi_i32(TCG_COND_GE, tmp, 0, skip);
882 break;
883 case 0x2: /* dst > 0 */
884 tcg_gen_brcondi_i32(TCG_COND_LE, tmp, 0, skip);
885 break;
886 default:
887 tcg_temp_free_i32(tmp);
888 goto do_dynamic;
890 account_inline_branch(s);
891 tcg_temp_free_i32(tmp);
892 break;
893 case CC_OP_LTGT0_64:
894 switch (mask) {
895 case 0x8 | 0x4: /* dst <= 0 */
896 tcg_gen_brcondi_i64(TCG_COND_GT, cc_dst, 0, skip);
897 break;
898 case 0x8 | 0x2: /* dst >= 0 */
899 tcg_gen_brcondi_i64(TCG_COND_LT, cc_dst, 0, skip);
900 break;
901 case 0x8: /* dst == 0 */
902 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
903 break;
904 case 0x7: /* dst != 0 */
905 case 0x6: /* dst != 0 */
906 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
907 break;
908 case 0x4: /* dst < 0 */
909 tcg_gen_brcondi_i64(TCG_COND_GE, cc_dst, 0, skip);
910 break;
911 case 0x2: /* dst > 0 */
912 tcg_gen_brcondi_i64(TCG_COND_LE, cc_dst, 0, skip);
913 break;
914 default:
915 goto do_dynamic;
917 account_inline_branch(s);
918 break;
919 case CC_OP_LTGT_32:
920 tmp = tcg_temp_new_i32();
921 tmp2 = tcg_temp_new_i32();
922 tcg_gen_trunc_i64_i32(tmp, cc_src);
923 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
924 switch (mask) {
925 case 0x8 | 0x4: /* src <= dst */
926 tcg_gen_brcond_i32(TCG_COND_GT, tmp, tmp2, skip);
927 break;
928 case 0x8 | 0x2: /* src >= dst */
929 tcg_gen_brcond_i32(TCG_COND_LT, tmp, tmp2, skip);
930 break;
931 case 0x8: /* src == dst */
932 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
933 break;
934 case 0x7: /* src != dst */
935 case 0x6: /* src != dst */
936 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
937 break;
938 case 0x4: /* src < dst */
939 tcg_gen_brcond_i32(TCG_COND_GE, tmp, tmp2, skip);
940 break;
941 case 0x2: /* src > dst */
942 tcg_gen_brcond_i32(TCG_COND_LE, tmp, tmp2, skip);
943 break;
944 default:
945 tcg_temp_free_i32(tmp);
946 tcg_temp_free_i32(tmp2);
947 goto do_dynamic;
949 account_inline_branch(s);
950 tcg_temp_free_i32(tmp);
951 tcg_temp_free_i32(tmp2);
952 break;
953 case CC_OP_LTGT_64:
954 switch (mask) {
955 case 0x8 | 0x4: /* src <= dst */
956 tcg_gen_brcond_i64(TCG_COND_GT, cc_src, cc_dst, skip);
957 break;
958 case 0x8 | 0x2: /* src >= dst */
959 tcg_gen_brcond_i64(TCG_COND_LT, cc_src, cc_dst, skip);
960 break;
961 case 0x8: /* src == dst */
962 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
963 break;
964 case 0x7: /* src != dst */
965 case 0x6: /* src != dst */
966 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
967 break;
968 case 0x4: /* src < dst */
969 tcg_gen_brcond_i64(TCG_COND_GE, cc_src, cc_dst, skip);
970 break;
971 case 0x2: /* src > dst */
972 tcg_gen_brcond_i64(TCG_COND_LE, cc_src, cc_dst, skip);
973 break;
974 default:
975 goto do_dynamic;
977 account_inline_branch(s);
978 break;
979 case CC_OP_LTUGTU_32:
980 tmp = tcg_temp_new_i32();
981 tmp2 = tcg_temp_new_i32();
982 tcg_gen_trunc_i64_i32(tmp, cc_src);
983 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
984 switch (mask) {
985 case 0x8 | 0x4: /* src <= dst */
986 tcg_gen_brcond_i32(TCG_COND_GTU, tmp, tmp2, skip);
987 break;
988 case 0x8 | 0x2: /* src >= dst */
989 tcg_gen_brcond_i32(TCG_COND_LTU, tmp, tmp2, skip);
990 break;
991 case 0x8: /* src == dst */
992 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
993 break;
994 case 0x7: /* src != dst */
995 case 0x6: /* src != dst */
996 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
997 break;
998 case 0x4: /* src < dst */
999 tcg_gen_brcond_i32(TCG_COND_GEU, tmp, tmp2, skip);
1000 break;
1001 case 0x2: /* src > dst */
1002 tcg_gen_brcond_i32(TCG_COND_LEU, tmp, tmp2, skip);
1003 break;
1004 default:
1005 tcg_temp_free_i32(tmp);
1006 tcg_temp_free_i32(tmp2);
1007 goto do_dynamic;
1009 account_inline_branch(s);
1010 tcg_temp_free_i32(tmp);
1011 tcg_temp_free_i32(tmp2);
1012 break;
1013 case CC_OP_LTUGTU_64:
1014 switch (mask) {
1015 case 0x8 | 0x4: /* src <= dst */
1016 tcg_gen_brcond_i64(TCG_COND_GTU, cc_src, cc_dst, skip);
1017 break;
1018 case 0x8 | 0x2: /* src >= dst */
1019 tcg_gen_brcond_i64(TCG_COND_LTU, cc_src, cc_dst, skip);
1020 break;
1021 case 0x8: /* src == dst */
1022 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
1023 break;
1024 case 0x7: /* src != dst */
1025 case 0x6: /* src != dst */
1026 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
1027 break;
1028 case 0x4: /* src < dst */
1029 tcg_gen_brcond_i64(TCG_COND_GEU, cc_src, cc_dst, skip);
1030 break;
1031 case 0x2: /* src > dst */
1032 tcg_gen_brcond_i64(TCG_COND_LEU, cc_src, cc_dst, skip);
1033 break;
1034 default:
1035 goto do_dynamic;
1037 account_inline_branch(s);
1038 break;
1039 case CC_OP_NZ:
1040 switch (mask) {
1041 /* dst == 0 || dst != 0 */
1042 case 0x8 | 0x4:
1043 case 0x8 | 0x4 | 0x2:
1044 case 0x8 | 0x4 | 0x2 | 0x1:
1045 case 0x8 | 0x4 | 0x1:
1046 break;
1047 /* dst == 0 */
1048 case 0x8:
1049 case 0x8 | 0x2:
1050 case 0x8 | 0x2 | 0x1:
1051 case 0x8 | 0x1:
1052 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1053 break;
1054 /* dst != 0 */
1055 case 0x4:
1056 case 0x4 | 0x2:
1057 case 0x4 | 0x2 | 0x1:
1058 case 0x4 | 0x1:
1059 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1060 break;
1061 default:
1062 goto do_dynamic;
1064 account_inline_branch(s);
1065 break;
1066 case CC_OP_TM_32:
1067 tmp = tcg_temp_new_i32();
1068 tmp2 = tcg_temp_new_i32();
1070 tcg_gen_trunc_i64_i32(tmp, cc_src);
1071 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
1072 tcg_gen_and_i32(tmp, tmp, tmp2);
1073 switch (mask) {
1074 case 0x8: /* val & mask == 0 */
1075 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1076 break;
1077 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1078 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1079 break;
1080 default:
1081 goto do_dynamic;
1083 tcg_temp_free_i32(tmp);
1084 account_inline_branch(s);
1085 break;
1086 case CC_OP_TM_64:
1087 tmp64 = tcg_temp_new_i64();
1089 tcg_gen_and_i64(tmp64, cc_src, cc_dst);
1090 switch (mask) {
1091 case 0x8: /* val & mask == 0 */
1092 tcg_gen_brcondi_i64(TCG_COND_NE, tmp64, 0, skip);
1093 break;
1094 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1095 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp64, 0, skip);
1096 break;
1097 default:
1098 goto do_dynamic;
1100 tcg_temp_free_i64(tmp64);
1101 account_inline_branch(s);
1102 break;
1103 case CC_OP_ICM:
1104 switch (mask) {
1105 case 0x8: /* val == 0 */
1106 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1107 break;
1108 case 0x4 | 0x2 | 0x1: /* val != 0 */
1109 case 0x4 | 0x2: /* val != 0 */
1110 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1111 break;
1112 default:
1113 goto do_dynamic;
1115 account_inline_branch(s);
1116 break;
1117 case CC_OP_STATIC:
1118 old_cc_op = s->cc_op;
1119 goto do_dynamic_nocccalc;
1120 case CC_OP_DYNAMIC:
1121 default:
1122 do_dynamic:
1123 old_cc_op = s->cc_op;
1124 /* calculate cc value */
1125 gen_op_calc_cc(s);
1127 do_dynamic_nocccalc:
1128 /* jump based on cc */
1129 account_noninline_branch(s, old_cc_op);
1131 switch (mask) {
1132 case 0x8 | 0x4 | 0x2 | 0x1:
1133 /* always true */
1134 break;
1135 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1136 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 3, skip);
1137 break;
1138 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1139 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 2, skip);
1140 break;
1141 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1142 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 1, skip);
1143 break;
1144 case 0x8 | 0x2: /* cc == 0 || cc == 2 */
1145 tmp = tcg_temp_new_i32();
1146 tcg_gen_andi_i32(tmp, cc_op, 1);
1147 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1148 tcg_temp_free_i32(tmp);
1149 break;
1150 case 0x8 | 0x4: /* cc < 2 */
1151 tcg_gen_brcondi_i32(TCG_COND_GEU, cc_op, 2, skip);
1152 break;
1153 case 0x8: /* cc == 0 */
1154 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 0, skip);
1155 break;
1156 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1157 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 0, skip);
1158 break;
1159 case 0x4 | 0x1: /* cc == 1 || cc == 3 */
1160 tmp = tcg_temp_new_i32();
1161 tcg_gen_andi_i32(tmp, cc_op, 1);
1162 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1163 tcg_temp_free_i32(tmp);
1164 break;
1165 case 0x4: /* cc == 1 */
1166 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 1, skip);
1167 break;
1168 case 0x2 | 0x1: /* cc > 1 */
1169 tcg_gen_brcondi_i32(TCG_COND_LEU, cc_op, 1, skip);
1170 break;
1171 case 0x2: /* cc == 2 */
1172 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 2, skip);
1173 break;
1174 case 0x1: /* cc == 3 */
1175 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 3, skip);
1176 break;
1177 default: /* cc is masked by something else */
1178 tmp = tcg_const_i32(3);
1179 /* 3 - cc */
1180 tcg_gen_sub_i32(tmp, tmp, cc_op);
1181 tmp2 = tcg_const_i32(1);
1182 /* 1 << (3 - cc) */
1183 tcg_gen_shl_i32(tmp2, tmp2, tmp);
1184 r = tcg_const_i32(mask);
1185 /* mask & (1 << (3 - cc)) */
1186 tcg_gen_and_i32(r, r, tmp2);
1187 tcg_temp_free_i32(tmp);
1188 tcg_temp_free_i32(tmp2);
1190 tcg_gen_brcondi_i32(TCG_COND_EQ, r, 0, skip);
1191 tcg_temp_free_i32(r);
1192 break;
1194 break;
1198 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1199 uint64_t offset)
1201 int skip;
1203 if (mask == 0xf) {
1204 /* unconditional */
1205 tcg_gen_mov_i64(psw_addr, target);
1206 tcg_gen_exit_tb(0);
1207 } else if (mask == 0) {
1208 /* ignore cc and never match */
1209 gen_goto_tb(s, 0, offset + 2);
1210 } else {
1211 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1213 tcg_gen_mov_i64(new_addr, target);
1214 skip = gen_new_label();
1215 gen_jcc(s, mask, skip);
1216 tcg_gen_mov_i64(psw_addr, new_addr);
1217 tcg_temp_free_i64(new_addr);
1218 tcg_gen_exit_tb(0);
1219 gen_set_label(skip);
1220 tcg_temp_free_i64(new_addr);
1221 gen_goto_tb(s, 1, offset + 2);
1225 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1227 int skip;
1229 if (mask == 0xf) {
1230 /* unconditional */
1231 gen_goto_tb(s, 0, s->pc + offset);
1232 } else if (mask == 0) {
1233 /* ignore cc and never match */
1234 gen_goto_tb(s, 0, s->pc + 4);
1235 } else {
1236 skip = gen_new_label();
1237 gen_jcc(s, mask, skip);
1238 gen_goto_tb(s, 0, s->pc + offset);
1239 gen_set_label(skip);
1240 gen_goto_tb(s, 1, s->pc + 4);
1242 s->is_jmp = DISAS_TB_JUMP;
1245 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1247 TCGv_i64 tmp, tmp2;
1248 int i;
1249 int l_memset = gen_new_label();
1250 int l_out = gen_new_label();
1251 TCGv_i64 dest = tcg_temp_local_new_i64();
1252 TCGv_i64 src = tcg_temp_local_new_i64();
1253 TCGv_i32 vl;
1255 /* Find out if we should use the inline version of mvc */
1256 switch (l) {
1257 case 0:
1258 case 1:
1259 case 2:
1260 case 3:
1261 case 4:
1262 case 5:
1263 case 6:
1264 case 7:
1265 case 11:
1266 case 15:
1267 /* use inline */
1268 break;
1269 default:
1270 /* Fall back to helper */
1271 vl = tcg_const_i32(l);
1272 potential_page_fault(s);
1273 gen_helper_mvc(vl, s1, s2);
1274 tcg_temp_free_i32(vl);
1275 return;
1278 tcg_gen_mov_i64(dest, s1);
1279 tcg_gen_mov_i64(src, s2);
1281 if (!(s->tb->flags & FLAG_MASK_64)) {
1282 /* XXX what if we overflow while moving? */
1283 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1284 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1287 tmp = tcg_temp_new_i64();
1288 tcg_gen_addi_i64(tmp, src, 1);
1289 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1290 tcg_temp_free_i64(tmp);
1292 switch (l) {
1293 case 0:
1294 tmp = tcg_temp_new_i64();
1296 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1297 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1299 tcg_temp_free_i64(tmp);
1300 break;
1301 case 1:
1302 tmp = tcg_temp_new_i64();
1304 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1305 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1307 tcg_temp_free_i64(tmp);
1308 break;
1309 case 3:
1310 tmp = tcg_temp_new_i64();
1312 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1313 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1315 tcg_temp_free_i64(tmp);
1316 break;
1317 case 4:
1318 tmp = tcg_temp_new_i64();
1319 tmp2 = tcg_temp_new_i64();
1321 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1322 tcg_gen_addi_i64(src, src, 4);
1323 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1324 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1325 tcg_gen_addi_i64(dest, dest, 4);
1326 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1328 tcg_temp_free_i64(tmp);
1329 tcg_temp_free_i64(tmp2);
1330 break;
1331 case 7:
1332 tmp = tcg_temp_new_i64();
1334 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1335 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1337 tcg_temp_free_i64(tmp);
1338 break;
1339 default:
1340 /* The inline version can become too big for too uneven numbers, only
1341 use it on known good lengths */
1342 tmp = tcg_temp_new_i64();
1343 tmp2 = tcg_const_i64(8);
1344 for (i = 0; (i + 7) <= l; i += 8) {
1345 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1346 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1348 tcg_gen_add_i64(src, src, tmp2);
1349 tcg_gen_add_i64(dest, dest, tmp2);
1352 tcg_temp_free_i64(tmp2);
1353 tmp2 = tcg_const_i64(1);
1355 for (; i <= l; i++) {
1356 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1357 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1359 tcg_gen_add_i64(src, src, tmp2);
1360 tcg_gen_add_i64(dest, dest, tmp2);
1363 tcg_temp_free_i64(tmp2);
1364 tcg_temp_free_i64(tmp);
1365 break;
1368 tcg_gen_br(l_out);
1370 gen_set_label(l_memset);
1371 /* memset case (dest == (src + 1)) */
1373 tmp = tcg_temp_new_i64();
1374 tmp2 = tcg_temp_new_i64();
1375 /* fill tmp with the byte */
1376 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1377 tcg_gen_shli_i64(tmp2, tmp, 8);
1378 tcg_gen_or_i64(tmp, tmp, tmp2);
1379 tcg_gen_shli_i64(tmp2, tmp, 16);
1380 tcg_gen_or_i64(tmp, tmp, tmp2);
1381 tcg_gen_shli_i64(tmp2, tmp, 32);
1382 tcg_gen_or_i64(tmp, tmp, tmp2);
1383 tcg_temp_free_i64(tmp2);
1385 tmp2 = tcg_const_i64(8);
1387 for (i = 0; (i + 7) <= l; i += 8) {
1388 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1389 tcg_gen_addi_i64(dest, dest, 8);
1392 tcg_temp_free_i64(tmp2);
1393 tmp2 = tcg_const_i64(1);
1395 for (; i <= l; i++) {
1396 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1397 tcg_gen_addi_i64(dest, dest, 1);
1400 tcg_temp_free_i64(tmp2);
1401 tcg_temp_free_i64(tmp);
1403 gen_set_label(l_out);
1405 tcg_temp_free(dest);
1406 tcg_temp_free(src);
1409 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1411 TCGv_i64 tmp;
1412 TCGv_i64 tmp2;
1413 TCGv_i32 vl;
1415 /* check for simple 32bit or 64bit match */
1416 switch (l) {
1417 case 0:
1418 tmp = tcg_temp_new_i64();
1419 tmp2 = tcg_temp_new_i64();
1421 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1422 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1423 cmp_u64(s, tmp, tmp2);
1425 tcg_temp_free_i64(tmp);
1426 tcg_temp_free_i64(tmp2);
1427 return;
1428 case 1:
1429 tmp = tcg_temp_new_i64();
1430 tmp2 = tcg_temp_new_i64();
1432 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1433 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1434 cmp_u64(s, tmp, tmp2);
1436 tcg_temp_free_i64(tmp);
1437 tcg_temp_free_i64(tmp2);
1438 return;
1439 case 3:
1440 tmp = tcg_temp_new_i64();
1441 tmp2 = tcg_temp_new_i64();
1443 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1444 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1445 cmp_u64(s, tmp, tmp2);
1447 tcg_temp_free_i64(tmp);
1448 tcg_temp_free_i64(tmp2);
1449 return;
1450 case 7:
1451 tmp = tcg_temp_new_i64();
1452 tmp2 = tcg_temp_new_i64();
1454 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1455 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1456 cmp_u64(s, tmp, tmp2);
1458 tcg_temp_free_i64(tmp);
1459 tcg_temp_free_i64(tmp2);
1460 return;
1463 potential_page_fault(s);
1464 vl = tcg_const_i32(l);
1465 gen_helper_clc(cc_op, vl, s1, s2);
1466 tcg_temp_free_i32(vl);
1467 set_cc_static(s);
1470 static void disas_e3(DisasContext* s, int op, int r1, int x2, int b2, int d2)
1472 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1473 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1475 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1476 op, r1, x2, b2, d2);
1477 addr = get_address(s, x2, b2, d2);
1478 switch (op) {
1479 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1480 case 0x4: /* lg r1,d2(x2,b2) */
1481 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1482 if (op == 0x2) {
1483 set_cc_s64(s, regs[r1]);
1485 break;
1486 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1487 tmp2 = tcg_temp_new_i64();
1488 tmp32_1 = tcg_temp_new_i32();
1489 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1490 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1491 store_reg32(r1, tmp32_1);
1492 set_cc_s32(s, tmp32_1);
1493 tcg_temp_free_i64(tmp2);
1494 tcg_temp_free_i32(tmp32_1);
1495 break;
1496 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1497 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1498 tmp2 = tcg_temp_new_i64();
1499 if (op == 0xc) {
1500 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1501 } else {
1502 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1504 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1505 tcg_temp_free_i64(tmp2);
1506 break;
1507 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1508 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1509 tmp2 = tcg_temp_new_i64();
1510 if (op == 0x1d) {
1511 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1512 } else {
1513 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1515 tmp4 = load_reg(r1 + 1);
1516 tmp3 = tcg_temp_new_i64();
1517 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1518 store_reg(r1 + 1, tmp3);
1519 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1520 store_reg(r1, tmp3);
1521 tcg_temp_free_i64(tmp2);
1522 tcg_temp_free_i64(tmp3);
1523 tcg_temp_free_i64(tmp4);
1524 break;
1525 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1526 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1527 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1528 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1529 if (op == 0x1a) {
1530 tmp2 = tcg_temp_new_i64();
1531 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1532 } else if (op == 0x18) {
1533 tmp2 = tcg_temp_new_i64();
1534 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1535 } else {
1536 tmp2 = tcg_temp_new_i64();
1537 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1539 tmp4 = load_reg(r1);
1540 tmp3 = tcg_temp_new_i64();
1541 tcg_gen_add_i64(tmp3, tmp4, tmp2);
1542 store_reg(r1, tmp3);
1543 switch (op) {
1544 case 0x8:
1545 case 0x18:
1546 set_cc_add64(s, tmp4, tmp2, tmp3);
1547 break;
1548 case 0xa:
1549 case 0x1a:
1550 set_cc_addu64(s, tmp4, tmp2, tmp3);
1551 break;
1552 default:
1553 tcg_abort();
1555 tcg_temp_free_i64(tmp2);
1556 tcg_temp_free_i64(tmp3);
1557 tcg_temp_free_i64(tmp4);
1558 break;
1559 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1560 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1561 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1562 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1563 tmp2 = tcg_temp_new_i64();
1564 if (op == 0x19) {
1565 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1566 } else if (op == 0x1b) {
1567 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1568 } else {
1569 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1571 tmp4 = load_reg(r1);
1572 tmp3 = tcg_temp_new_i64();
1573 tcg_gen_sub_i64(tmp3, tmp4, tmp2);
1574 store_reg(r1, tmp3);
1575 switch (op) {
1576 case 0x9:
1577 case 0x19:
1578 set_cc_sub64(s, tmp4, tmp2, tmp3);
1579 break;
1580 case 0xb:
1581 case 0x1b:
1582 set_cc_subu64(s, tmp4, tmp2, tmp3);
1583 break;
1584 default:
1585 tcg_abort();
1587 tcg_temp_free_i64(tmp2);
1588 tcg_temp_free_i64(tmp3);
1589 tcg_temp_free_i64(tmp4);
1590 break;
1591 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1592 tmp2 = tcg_temp_new_i64();
1593 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1594 tcg_gen_bswap64_i64(tmp2, tmp2);
1595 store_reg(r1, tmp2);
1596 tcg_temp_free_i64(tmp2);
1597 break;
1598 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1599 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1600 tmp2 = tcg_temp_new_i64();
1601 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1602 if (op == 0x14) {
1603 tcg_gen_ext32s_i64(tmp2, tmp2);
1605 store_reg(r1, tmp2);
1606 tcg_temp_free_i64(tmp2);
1607 break;
1608 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1609 tmp2 = tcg_temp_new_i64();
1610 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1611 store_reg(r1, tmp2);
1612 tcg_temp_free_i64(tmp2);
1613 break;
1614 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1615 tmp2 = tcg_temp_new_i64();
1616 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1617 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1618 store_reg(r1, tmp2);
1619 tcg_temp_free_i64(tmp2);
1620 break;
1621 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1622 tmp2 = tcg_temp_new_i64();
1623 tmp32_1 = tcg_temp_new_i32();
1624 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1625 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1626 tcg_temp_free_i64(tmp2);
1627 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1628 store_reg32(r1, tmp32_1);
1629 tcg_temp_free_i32(tmp32_1);
1630 break;
1631 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1632 tmp2 = tcg_temp_new_i64();
1633 tmp32_1 = tcg_temp_new_i32();
1634 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1635 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1636 tcg_temp_free_i64(tmp2);
1637 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1638 store_reg16(r1, tmp32_1);
1639 tcg_temp_free_i32(tmp32_1);
1640 break;
1641 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1642 case 0x21: /* CLG R1,D2(X2,B2) */
1643 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1644 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1645 tmp2 = tcg_temp_new_i64();
1646 switch (op) {
1647 case 0x20:
1648 case 0x21:
1649 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1650 break;
1651 case 0x30:
1652 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1653 break;
1654 case 0x31:
1655 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1656 break;
1657 default:
1658 tcg_abort();
1660 switch (op) {
1661 case 0x20:
1662 case 0x30:
1663 cmp_s64(s, regs[r1], tmp2);
1664 break;
1665 case 0x21:
1666 case 0x31:
1667 cmp_u64(s, regs[r1], tmp2);
1668 break;
1669 default:
1670 tcg_abort();
1672 tcg_temp_free_i64(tmp2);
1673 break;
1674 case 0x24: /* stg r1, d2(x2,b2) */
1675 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1676 break;
1677 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1678 tmp32_1 = load_reg32(r1);
1679 tmp2 = tcg_temp_new_i64();
1680 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1681 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1682 tcg_temp_free_i32(tmp32_1);
1683 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1684 tcg_temp_free_i64(tmp2);
1685 break;
1686 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1687 tmp32_1 = load_reg32(r1);
1688 tmp2 = tcg_temp_new_i64();
1689 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1690 tcg_temp_free_i32(tmp32_1);
1691 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1692 tcg_temp_free_i64(tmp2);
1693 break;
1694 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1695 tmp32_1 = load_reg32(r1);
1696 tmp32_2 = tcg_temp_new_i32();
1697 tmp2 = tcg_temp_new_i64();
1698 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1699 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1700 tcg_temp_free_i64(tmp2);
1701 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1702 store_reg32(r1, tmp32_2);
1703 set_cc_nz_u32(s, tmp32_2);
1704 tcg_temp_free_i32(tmp32_1);
1705 tcg_temp_free_i32(tmp32_2);
1706 break;
1707 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1708 tmp3 = tcg_temp_new_i64();
1709 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1710 store_reg32_i64(r1, tmp3);
1711 tcg_temp_free_i64(tmp3);
1712 break;
1713 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1714 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1715 tmp32_1 = load_reg32(r1);
1716 tmp32_2 = tcg_temp_new_i32();
1717 tmp32_3 = tcg_temp_new_i32();
1718 tmp2 = tcg_temp_new_i64();
1719 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1720 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1721 tcg_temp_free_i64(tmp2);
1722 switch (op) {
1723 case 0x5a:
1724 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
1725 break;
1726 case 0x5b:
1727 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
1728 break;
1729 default:
1730 tcg_abort();
1732 store_reg32(r1, tmp32_3);
1733 switch (op) {
1734 case 0x5a:
1735 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
1736 break;
1737 case 0x5b:
1738 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
1739 break;
1740 default:
1741 tcg_abort();
1743 tcg_temp_free_i32(tmp32_1);
1744 tcg_temp_free_i32(tmp32_2);
1745 tcg_temp_free_i32(tmp32_3);
1746 break;
1747 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1748 store_reg(r1, addr);
1749 break;
1750 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1751 tmp32_1 = load_reg32(r1);
1752 tmp2 = tcg_temp_new_i64();
1753 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1754 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1755 tcg_temp_free_i32(tmp32_1);
1756 tcg_temp_free_i64(tmp2);
1757 break;
1758 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1759 tmp3 = tcg_temp_new_i64();
1760 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1761 store_reg8(r1, tmp3);
1762 tcg_temp_free_i64(tmp3);
1763 break;
1764 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1765 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1766 tmp2 = tcg_temp_new_i64();
1767 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1768 switch (op) {
1769 case 0x76:
1770 tcg_gen_ext8s_i64(tmp2, tmp2);
1771 store_reg32_i64(r1, tmp2);
1772 break;
1773 case 0x77:
1774 tcg_gen_ext8s_i64(tmp2, tmp2);
1775 store_reg(r1, tmp2);
1776 break;
1777 default:
1778 tcg_abort();
1780 tcg_temp_free_i64(tmp2);
1781 break;
1782 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1783 tmp2 = tcg_temp_new_i64();
1784 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1785 store_reg32_i64(r1, tmp2);
1786 tcg_temp_free_i64(tmp2);
1787 break;
1788 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1789 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1790 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1791 tmp3 = tcg_temp_new_i64();
1792 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1793 switch (op) {
1794 case 0x80:
1795 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1796 break;
1797 case 0x81:
1798 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1799 break;
1800 case 0x82:
1801 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1802 break;
1803 default:
1804 tcg_abort();
1806 set_cc_nz_u64(s, regs[r1]);
1807 tcg_temp_free_i64(tmp3);
1808 break;
1809 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1810 tmp2 = tcg_temp_new_i64();
1811 tmp32_1 = tcg_const_i32(r1);
1812 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1813 gen_helper_mlg(tmp32_1, tmp2);
1814 tcg_temp_free_i64(tmp2);
1815 tcg_temp_free_i32(tmp32_1);
1816 break;
1817 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1818 tmp2 = tcg_temp_new_i64();
1819 tmp32_1 = tcg_const_i32(r1);
1820 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1821 gen_helper_dlg(tmp32_1, tmp2);
1822 tcg_temp_free_i64(tmp2);
1823 tcg_temp_free_i32(tmp32_1);
1824 break;
1825 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1826 tmp2 = tcg_temp_new_i64();
1827 tmp3 = tcg_temp_new_i64();
1828 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1829 /* XXX possible optimization point */
1830 gen_op_calc_cc(s);
1831 tcg_gen_extu_i32_i64(tmp3, cc_op);
1832 tcg_gen_shri_i64(tmp3, tmp3, 1);
1833 tcg_gen_andi_i64(tmp3, tmp3, 1);
1834 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1835 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1836 store_reg(r1, tmp3);
1837 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1838 tcg_temp_free_i64(tmp2);
1839 tcg_temp_free_i64(tmp3);
1840 break;
1841 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1842 tmp2 = tcg_temp_new_i64();
1843 tmp32_1 = tcg_const_i32(r1);
1844 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1845 /* XXX possible optimization point */
1846 gen_op_calc_cc(s);
1847 gen_helper_slbg(cc_op, cc_op, tmp32_1, regs[r1], tmp2);
1848 set_cc_static(s);
1849 tcg_temp_free_i64(tmp2);
1850 tcg_temp_free_i32(tmp32_1);
1851 break;
1852 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1853 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1854 break;
1855 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1856 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1857 break;
1858 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1859 tmp2 = tcg_temp_new_i64();
1860 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1861 store_reg32_i64(r1, tmp2);
1862 tcg_temp_free_i64(tmp2);
1863 break;
1864 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1865 tmp2 = tcg_temp_new_i64();
1866 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1867 store_reg32_i64(r1, tmp2);
1868 tcg_temp_free_i64(tmp2);
1869 break;
1870 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1871 tmp2 = tcg_temp_new_i64();
1872 tmp3 = load_reg((r1 + 1) & 15);
1873 tcg_gen_ext32u_i64(tmp3, tmp3);
1874 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1875 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1876 store_reg32_i64((r1 + 1) & 15, tmp2);
1877 tcg_gen_shri_i64(tmp2, tmp2, 32);
1878 store_reg32_i64(r1, tmp2);
1879 tcg_temp_free_i64(tmp2);
1880 tcg_temp_free_i64(tmp3);
1881 break;
1882 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1883 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1884 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1885 tmp = load_reg(r1);
1886 tmp2 = tcg_temp_new_i64();
1887 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1888 tmp3 = load_reg((r1 + 1) & 15);
1889 tcg_gen_ext32u_i64(tmp2, tmp2);
1890 tcg_gen_ext32u_i64(tmp3, tmp3);
1891 tcg_gen_shli_i64(tmp, tmp, 32);
1892 tcg_gen_or_i64(tmp, tmp, tmp3);
1894 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1895 tcg_gen_div_i64(tmp, tmp, tmp2);
1896 store_reg32_i64((r1 + 1) & 15, tmp);
1897 store_reg32_i64(r1, tmp3);
1898 tcg_temp_free_i64(tmp);
1899 tcg_temp_free_i64(tmp2);
1900 tcg_temp_free_i64(tmp3);
1901 break;
1902 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1903 tmp2 = tcg_temp_new_i64();
1904 tmp32_1 = load_reg32(r1);
1905 tmp32_2 = tcg_temp_new_i32();
1906 tmp32_3 = tcg_temp_new_i32();
1907 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1908 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1909 /* XXX possible optimization point */
1910 gen_op_calc_cc(s);
1911 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1912 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1913 store_reg32(r1, tmp32_3);
1914 tcg_temp_free_i64(tmp2);
1915 tcg_temp_free_i32(tmp32_1);
1916 tcg_temp_free_i32(tmp32_2);
1917 tcg_temp_free_i32(tmp32_3);
1918 break;
1919 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1920 tmp2 = tcg_temp_new_i64();
1921 tmp32_1 = tcg_const_i32(r1);
1922 tmp32_2 = tcg_temp_new_i32();
1923 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1924 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1925 /* XXX possible optimization point */
1926 gen_op_calc_cc(s);
1927 gen_helper_slb(cc_op, cc_op, tmp32_1, tmp32_2);
1928 set_cc_static(s);
1929 tcg_temp_free_i64(tmp2);
1930 tcg_temp_free_i32(tmp32_1);
1931 tcg_temp_free_i32(tmp32_2);
1932 break;
1933 default:
1934 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1935 gen_illegal_opcode(s, 3);
1936 break;
1938 tcg_temp_free_i64(addr);
1941 #ifndef CONFIG_USER_ONLY
1942 static void disas_e5(DisasContext* s, uint64_t insn)
1944 TCGv_i64 tmp, tmp2;
1945 int op = (insn >> 32) & 0xff;
1947 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1948 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1950 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1951 switch (op) {
1952 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1953 /* Test Protection */
1954 potential_page_fault(s);
1955 gen_helper_tprot(cc_op, tmp, tmp2);
1956 set_cc_static(s);
1957 break;
1958 default:
1959 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1960 gen_illegal_opcode(s, 3);
1961 break;
1964 tcg_temp_free_i64(tmp);
1965 tcg_temp_free_i64(tmp2);
1967 #endif
1969 static void disas_eb(DisasContext *s, int op, int r1, int r3, int b2, int d2)
1971 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1972 TCGv_i32 tmp32_1, tmp32_2;
1973 int i, stm_len;
1974 int ilc = 3;
1976 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1977 op, r1, r3, b2, d2);
1978 switch (op) {
1979 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1980 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1981 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1982 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1983 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1984 if (b2) {
1985 tmp = get_address(s, 0, b2, d2);
1986 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1987 } else {
1988 tmp = tcg_const_i64(d2 & 0x3f);
1990 switch (op) {
1991 case 0xc:
1992 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1993 break;
1994 case 0xd:
1995 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1996 break;
1997 case 0xa:
1998 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
1999 break;
2000 case 0xb:
2001 tmp2 = tcg_temp_new_i64();
2002 tmp3 = tcg_temp_new_i64();
2003 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
2004 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
2005 /* override sign bit with source sign */
2006 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
2007 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
2008 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
2009 tcg_temp_free_i64(tmp2);
2010 tcg_temp_free_i64(tmp3);
2011 break;
2012 case 0x1c:
2013 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
2014 break;
2015 default:
2016 tcg_abort();
2017 break;
2019 if (op == 0xa) {
2020 set_cc_s64(s, regs[r1]);
2022 tcg_temp_free_i64(tmp);
2023 break;
2024 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2025 if (b2) {
2026 tmp = get_address(s, 0, b2, d2);
2027 tcg_gen_andi_i64(tmp, tmp, 0x3f);
2028 } else {
2029 tmp = tcg_const_i64(d2 & 0x3f);
2031 tmp32_1 = tcg_temp_new_i32();
2032 tmp32_2 = load_reg32(r3);
2033 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2034 switch (op) {
2035 case 0x1d:
2036 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
2037 break;
2038 default:
2039 tcg_abort();
2040 break;
2042 store_reg32(r1, tmp32_1);
2043 tcg_temp_free_i64(tmp);
2044 tcg_temp_free_i32(tmp32_1);
2045 tcg_temp_free_i32(tmp32_2);
2046 break;
2047 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2048 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2049 stm_len = 8;
2050 goto do_mh;
2051 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2052 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2053 stm_len = 4;
2054 do_mh:
2055 /* Apparently, unrolling lmg/stmg of any size gains performance -
2056 even for very long ones... */
2057 tmp = get_address(s, 0, b2, d2);
2058 tmp3 = tcg_const_i64(stm_len);
2059 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
2060 for (i = r1;; i = (i + 1) % 16) {
2061 switch (op) {
2062 case 0x4:
2063 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
2064 break;
2065 case 0x96:
2066 tmp2 = tcg_temp_new_i64();
2067 #if HOST_LONG_BITS == 32
2068 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2069 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
2070 #else
2071 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2072 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
2073 tcg_gen_ext32u_i64(regs[i], regs[i]);
2074 tcg_gen_or_i64(regs[i], regs[i], tmp2);
2075 #endif
2076 tcg_temp_free_i64(tmp2);
2077 break;
2078 case 0x24:
2079 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
2080 break;
2081 case 0x26:
2082 tmp2 = tcg_temp_new_i64();
2083 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
2084 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2085 tcg_temp_free_i64(tmp2);
2086 break;
2087 default:
2088 tcg_abort();
2090 if (i == r3) {
2091 break;
2093 tcg_gen_add_i64(tmp, tmp, tmp3);
2095 tcg_temp_free_i64(tmp);
2096 tcg_temp_free_i64(tmp4);
2097 break;
2098 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2099 tmp = get_address(s, 0, b2, d2);
2100 tmp32_1 = tcg_const_i32(r1);
2101 tmp32_2 = tcg_const_i32(r3);
2102 potential_page_fault(s);
2103 gen_helper_stcmh(tmp32_1, tmp, tmp32_2);
2104 tcg_temp_free_i64(tmp);
2105 tcg_temp_free_i32(tmp32_1);
2106 tcg_temp_free_i32(tmp32_2);
2107 break;
2108 #ifndef CONFIG_USER_ONLY
2109 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2110 /* Load Control */
2111 check_privileged(s, ilc);
2112 tmp = get_address(s, 0, b2, d2);
2113 tmp32_1 = tcg_const_i32(r1);
2114 tmp32_2 = tcg_const_i32(r3);
2115 potential_page_fault(s);
2116 gen_helper_lctlg(tmp32_1, tmp, tmp32_2);
2117 tcg_temp_free_i64(tmp);
2118 tcg_temp_free_i32(tmp32_1);
2119 tcg_temp_free_i32(tmp32_2);
2120 break;
2121 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2122 /* Store Control */
2123 check_privileged(s, ilc);
2124 tmp = get_address(s, 0, b2, d2);
2125 tmp32_1 = tcg_const_i32(r1);
2126 tmp32_2 = tcg_const_i32(r3);
2127 potential_page_fault(s);
2128 gen_helper_stctg(tmp32_1, tmp, tmp32_2);
2129 tcg_temp_free_i64(tmp);
2130 tcg_temp_free_i32(tmp32_1);
2131 tcg_temp_free_i32(tmp32_2);
2132 break;
2133 #endif
2134 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2135 tmp = get_address(s, 0, b2, d2);
2136 tmp32_1 = tcg_const_i32(r1);
2137 tmp32_2 = tcg_const_i32(r3);
2138 potential_page_fault(s);
2139 /* XXX rewrite in tcg */
2140 gen_helper_csg(cc_op, tmp32_1, tmp, tmp32_2);
2141 set_cc_static(s);
2142 tcg_temp_free_i64(tmp);
2143 tcg_temp_free_i32(tmp32_1);
2144 tcg_temp_free_i32(tmp32_2);
2145 break;
2146 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2147 tmp = get_address(s, 0, b2, d2);
2148 tmp32_1 = tcg_const_i32(r1);
2149 tmp32_2 = tcg_const_i32(r3);
2150 potential_page_fault(s);
2151 /* XXX rewrite in tcg */
2152 gen_helper_cdsg(cc_op, tmp32_1, tmp, tmp32_2);
2153 set_cc_static(s);
2154 tcg_temp_free_i64(tmp);
2155 tcg_temp_free_i32(tmp32_1);
2156 tcg_temp_free_i32(tmp32_2);
2157 break;
2158 case 0x51: /* TMY D1(B1),I2 [SIY] */
2159 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2160 tmp2 = tcg_const_i64((r1 << 4) | r3);
2161 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
2162 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2163 that incurs less conversions */
2164 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
2165 tcg_temp_free_i64(tmp);
2166 tcg_temp_free_i64(tmp2);
2167 break;
2168 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2169 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2170 tmp2 = tcg_const_i64((r1 << 4) | r3);
2171 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
2172 tcg_temp_free_i64(tmp);
2173 tcg_temp_free_i64(tmp2);
2174 break;
2175 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2176 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
2177 tmp = tcg_temp_new_i64();
2178 tmp32_1 = tcg_temp_new_i32();
2179 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
2180 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2181 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
2182 tcg_temp_free_i64(tmp);
2183 tcg_temp_free_i64(tmp3);
2184 tcg_temp_free_i32(tmp32_1);
2185 break;
2186 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2187 tmp = get_address(s, 0, b2, d2);
2188 tmp32_1 = tcg_const_i32(r1);
2189 tmp32_2 = tcg_const_i32(r3);
2190 potential_page_fault(s);
2191 /* XXX split CC calculation out */
2192 gen_helper_icmh(cc_op, tmp32_1, tmp, tmp32_2);
2193 set_cc_static(s);
2194 tcg_temp_free_i64(tmp);
2195 tcg_temp_free_i32(tmp32_1);
2196 tcg_temp_free_i32(tmp32_2);
2197 break;
2198 default:
2199 LOG_DISAS("illegal eb operation 0x%x\n", op);
2200 gen_illegal_opcode(s, ilc);
2201 break;
2205 static void disas_ed(DisasContext *s, int op, int r1, int x2, int b2, int d2,
2206 int r1b)
2208 TCGv_i32 tmp_r1, tmp32;
2209 TCGv_i64 addr, tmp;
2210 addr = get_address(s, x2, b2, d2);
2211 tmp_r1 = tcg_const_i32(r1);
2212 switch (op) {
2213 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2214 potential_page_fault(s);
2215 gen_helper_lxdb(tmp_r1, addr);
2216 break;
2217 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2218 tmp = tcg_temp_new_i64();
2219 tmp32 = load_freg32(r1);
2220 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2221 set_cc_cmp_f32_i64(s, tmp32, tmp);
2222 tcg_temp_free_i64(tmp);
2223 tcg_temp_free_i32(tmp32);
2224 break;
2225 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2226 tmp = tcg_temp_new_i64();
2227 tmp32 = tcg_temp_new_i32();
2228 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2229 tcg_gen_trunc_i64_i32(tmp32, tmp);
2230 gen_helper_aeb(tmp_r1, tmp32);
2231 tcg_temp_free_i64(tmp);
2232 tcg_temp_free_i32(tmp32);
2234 tmp32 = load_freg32(r1);
2235 set_cc_nz_f32(s, tmp32);
2236 tcg_temp_free_i32(tmp32);
2237 break;
2238 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2239 tmp = tcg_temp_new_i64();
2240 tmp32 = tcg_temp_new_i32();
2241 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2242 tcg_gen_trunc_i64_i32(tmp32, tmp);
2243 gen_helper_seb(tmp_r1, tmp32);
2244 tcg_temp_free_i64(tmp);
2245 tcg_temp_free_i32(tmp32);
2247 tmp32 = load_freg32(r1);
2248 set_cc_nz_f32(s, tmp32);
2249 tcg_temp_free_i32(tmp32);
2250 break;
2251 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2252 tmp = tcg_temp_new_i64();
2253 tmp32 = tcg_temp_new_i32();
2254 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2255 tcg_gen_trunc_i64_i32(tmp32, tmp);
2256 gen_helper_deb(tmp_r1, tmp32);
2257 tcg_temp_free_i64(tmp);
2258 tcg_temp_free_i32(tmp32);
2259 break;
2260 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2261 potential_page_fault(s);
2262 gen_helper_tceb(cc_op, tmp_r1, addr);
2263 set_cc_static(s);
2264 break;
2265 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2266 potential_page_fault(s);
2267 gen_helper_tcdb(cc_op, tmp_r1, addr);
2268 set_cc_static(s);
2269 break;
2270 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2271 potential_page_fault(s);
2272 gen_helper_tcxb(cc_op, tmp_r1, addr);
2273 set_cc_static(s);
2274 break;
2275 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2276 tmp = tcg_temp_new_i64();
2277 tmp32 = tcg_temp_new_i32();
2278 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2279 tcg_gen_trunc_i64_i32(tmp32, tmp);
2280 gen_helper_meeb(tmp_r1, tmp32);
2281 tcg_temp_free_i64(tmp);
2282 tcg_temp_free_i32(tmp32);
2283 break;
2284 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2285 potential_page_fault(s);
2286 gen_helper_cdb(cc_op, tmp_r1, addr);
2287 set_cc_static(s);
2288 break;
2289 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2290 potential_page_fault(s);
2291 gen_helper_adb(cc_op, tmp_r1, addr);
2292 set_cc_static(s);
2293 break;
2294 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2295 potential_page_fault(s);
2296 gen_helper_sdb(cc_op, tmp_r1, addr);
2297 set_cc_static(s);
2298 break;
2299 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2300 potential_page_fault(s);
2301 gen_helper_mdb(tmp_r1, addr);
2302 break;
2303 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2304 potential_page_fault(s);
2305 gen_helper_ddb(tmp_r1, addr);
2306 break;
2307 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2308 /* for RXF insns, r1 is R3 and r1b is R1 */
2309 tmp32 = tcg_const_i32(r1b);
2310 potential_page_fault(s);
2311 gen_helper_madb(tmp32, addr, tmp_r1);
2312 tcg_temp_free_i32(tmp32);
2313 break;
2314 default:
2315 LOG_DISAS("illegal ed operation 0x%x\n", op);
2316 gen_illegal_opcode(s, 3);
2317 return;
2319 tcg_temp_free_i32(tmp_r1);
2320 tcg_temp_free_i64(addr);
2323 static void disas_a5(DisasContext *s, int op, int r1, int i2)
2325 TCGv_i64 tmp, tmp2;
2326 TCGv_i32 tmp32;
2327 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2328 switch (op) {
2329 case 0x0: /* IIHH R1,I2 [RI] */
2330 tmp = tcg_const_i64(i2);
2331 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2332 break;
2333 case 0x1: /* IIHL R1,I2 [RI] */
2334 tmp = tcg_const_i64(i2);
2335 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2336 break;
2337 case 0x2: /* IILH R1,I2 [RI] */
2338 tmp = tcg_const_i64(i2);
2339 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2340 break;
2341 case 0x3: /* IILL R1,I2 [RI] */
2342 tmp = tcg_const_i64(i2);
2343 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2344 break;
2345 case 0x4: /* NIHH R1,I2 [RI] */
2346 case 0x8: /* OIHH R1,I2 [RI] */
2347 tmp = load_reg(r1);
2348 tmp32 = tcg_temp_new_i32();
2349 switch (op) {
2350 case 0x4:
2351 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2352 | 0x0000ffffffffffffULL);
2353 tcg_gen_and_i64(tmp, tmp, tmp2);
2354 break;
2355 case 0x8:
2356 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2357 tcg_gen_or_i64(tmp, tmp, tmp2);
2358 break;
2359 default:
2360 tcg_abort();
2362 store_reg(r1, tmp);
2363 tcg_gen_shri_i64(tmp2, tmp, 48);
2364 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2365 set_cc_nz_u32(s, tmp32);
2366 tcg_temp_free_i64(tmp2);
2367 tcg_temp_free_i32(tmp32);
2368 break;
2369 case 0x5: /* NIHL R1,I2 [RI] */
2370 case 0x9: /* OIHL R1,I2 [RI] */
2371 tmp = load_reg(r1);
2372 tmp32 = tcg_temp_new_i32();
2373 switch (op) {
2374 case 0x5:
2375 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2376 | 0xffff0000ffffffffULL);
2377 tcg_gen_and_i64(tmp, tmp, tmp2);
2378 break;
2379 case 0x9:
2380 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2381 tcg_gen_or_i64(tmp, tmp, tmp2);
2382 break;
2383 default:
2384 tcg_abort();
2386 store_reg(r1, tmp);
2387 tcg_gen_shri_i64(tmp2, tmp, 32);
2388 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2389 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2390 set_cc_nz_u32(s, tmp32);
2391 tcg_temp_free_i64(tmp2);
2392 tcg_temp_free_i32(tmp32);
2393 break;
2394 case 0x6: /* NILH R1,I2 [RI] */
2395 case 0xa: /* OILH R1,I2 [RI] */
2396 tmp = load_reg(r1);
2397 tmp32 = tcg_temp_new_i32();
2398 switch (op) {
2399 case 0x6:
2400 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2401 | 0xffffffff0000ffffULL);
2402 tcg_gen_and_i64(tmp, tmp, tmp2);
2403 break;
2404 case 0xa:
2405 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2406 tcg_gen_or_i64(tmp, tmp, tmp2);
2407 break;
2408 default:
2409 tcg_abort();
2411 store_reg(r1, tmp);
2412 tcg_gen_shri_i64(tmp, tmp, 16);
2413 tcg_gen_trunc_i64_i32(tmp32, tmp);
2414 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2415 set_cc_nz_u32(s, tmp32);
2416 tcg_temp_free_i64(tmp2);
2417 tcg_temp_free_i32(tmp32);
2418 break;
2419 case 0x7: /* NILL R1,I2 [RI] */
2420 case 0xb: /* OILL R1,I2 [RI] */
2421 tmp = load_reg(r1);
2422 tmp32 = tcg_temp_new_i32();
2423 switch (op) {
2424 case 0x7:
2425 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2426 tcg_gen_and_i64(tmp, tmp, tmp2);
2427 break;
2428 case 0xb:
2429 tmp2 = tcg_const_i64(i2);
2430 tcg_gen_or_i64(tmp, tmp, tmp2);
2431 break;
2432 default:
2433 tcg_abort();
2435 store_reg(r1, tmp);
2436 tcg_gen_trunc_i64_i32(tmp32, tmp);
2437 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2438 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2439 tcg_temp_free_i64(tmp2);
2440 tcg_temp_free_i32(tmp32);
2441 break;
2442 case 0xc: /* LLIHH R1,I2 [RI] */
2443 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2444 store_reg(r1, tmp);
2445 break;
2446 case 0xd: /* LLIHL R1,I2 [RI] */
2447 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2448 store_reg(r1, tmp);
2449 break;
2450 case 0xe: /* LLILH R1,I2 [RI] */
2451 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2452 store_reg(r1, tmp);
2453 break;
2454 case 0xf: /* LLILL R1,I2 [RI] */
2455 tmp = tcg_const_i64(i2);
2456 store_reg(r1, tmp);
2457 break;
2458 default:
2459 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2460 gen_illegal_opcode(s, 2);
2461 return;
2463 tcg_temp_free_i64(tmp);
2466 static void disas_a7(DisasContext *s, int op, int r1, int i2)
2468 TCGv_i64 tmp, tmp2;
2469 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2470 int l1;
2472 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2473 switch (op) {
2474 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2475 case 0x1: /* TMLL or TML R1,I2 [RI] */
2476 case 0x2: /* TMHH R1,I2 [RI] */
2477 case 0x3: /* TMHL R1,I2 [RI] */
2478 tmp = load_reg(r1);
2479 tmp2 = tcg_const_i64((uint16_t)i2);
2480 switch (op) {
2481 case 0x0:
2482 tcg_gen_shri_i64(tmp, tmp, 16);
2483 break;
2484 case 0x1:
2485 break;
2486 case 0x2:
2487 tcg_gen_shri_i64(tmp, tmp, 48);
2488 break;
2489 case 0x3:
2490 tcg_gen_shri_i64(tmp, tmp, 32);
2491 break;
2493 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2494 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2495 tcg_temp_free_i64(tmp);
2496 tcg_temp_free_i64(tmp2);
2497 break;
2498 case 0x4: /* brc m1, i2 */
2499 gen_brc(r1, s, i2 * 2LL);
2500 return;
2501 case 0x5: /* BRAS R1,I2 [RI] */
2502 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2503 store_reg(r1, tmp);
2504 tcg_temp_free_i64(tmp);
2505 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2506 s->is_jmp = DISAS_TB_JUMP;
2507 break;
2508 case 0x6: /* BRCT R1,I2 [RI] */
2509 tmp32_1 = load_reg32(r1);
2510 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2511 store_reg32(r1, tmp32_1);
2512 gen_update_cc_op(s);
2513 l1 = gen_new_label();
2514 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2515 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2516 gen_set_label(l1);
2517 gen_goto_tb(s, 1, s->pc + 4);
2518 s->is_jmp = DISAS_TB_JUMP;
2519 tcg_temp_free_i32(tmp32_1);
2520 break;
2521 case 0x7: /* BRCTG R1,I2 [RI] */
2522 tmp = load_reg(r1);
2523 tcg_gen_subi_i64(tmp, tmp, 1);
2524 store_reg(r1, tmp);
2525 gen_update_cc_op(s);
2526 l1 = gen_new_label();
2527 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2528 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2529 gen_set_label(l1);
2530 gen_goto_tb(s, 1, s->pc + 4);
2531 s->is_jmp = DISAS_TB_JUMP;
2532 tcg_temp_free_i64(tmp);
2533 break;
2534 case 0x8: /* lhi r1, i2 */
2535 tmp32_1 = tcg_const_i32(i2);
2536 store_reg32(r1, tmp32_1);
2537 tcg_temp_free_i32(tmp32_1);
2538 break;
2539 case 0x9: /* lghi r1, i2 */
2540 tmp = tcg_const_i64(i2);
2541 store_reg(r1, tmp);
2542 tcg_temp_free_i64(tmp);
2543 break;
2544 case 0xa: /* AHI R1,I2 [RI] */
2545 tmp32_1 = load_reg32(r1);
2546 tmp32_2 = tcg_temp_new_i32();
2547 tmp32_3 = tcg_const_i32(i2);
2549 if (i2 < 0) {
2550 tcg_gen_subi_i32(tmp32_2, tmp32_1, -i2);
2551 } else {
2552 tcg_gen_add_i32(tmp32_2, tmp32_1, tmp32_3);
2555 store_reg32(r1, tmp32_2);
2556 set_cc_add32(s, tmp32_1, tmp32_3, tmp32_2);
2557 tcg_temp_free_i32(tmp32_1);
2558 tcg_temp_free_i32(tmp32_2);
2559 tcg_temp_free_i32(tmp32_3);
2560 break;
2561 case 0xb: /* aghi r1, i2 */
2562 tmp = load_reg(r1);
2563 tmp2 = tcg_const_i64(i2);
2565 if (i2 < 0) {
2566 tcg_gen_subi_i64(regs[r1], tmp, -i2);
2567 } else {
2568 tcg_gen_add_i64(regs[r1], tmp, tmp2);
2570 set_cc_add64(s, tmp, tmp2, regs[r1]);
2571 tcg_temp_free_i64(tmp);
2572 tcg_temp_free_i64(tmp2);
2573 break;
2574 case 0xc: /* MHI R1,I2 [RI] */
2575 tmp32_1 = load_reg32(r1);
2576 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2577 store_reg32(r1, tmp32_1);
2578 tcg_temp_free_i32(tmp32_1);
2579 break;
2580 case 0xd: /* MGHI R1,I2 [RI] */
2581 tmp = load_reg(r1);
2582 tcg_gen_muli_i64(tmp, tmp, i2);
2583 store_reg(r1, tmp);
2584 tcg_temp_free_i64(tmp);
2585 break;
2586 case 0xe: /* CHI R1,I2 [RI] */
2587 tmp32_1 = load_reg32(r1);
2588 cmp_s32c(s, tmp32_1, i2);
2589 tcg_temp_free_i32(tmp32_1);
2590 break;
2591 case 0xf: /* CGHI R1,I2 [RI] */
2592 tmp = load_reg(r1);
2593 cmp_s64c(s, tmp, i2);
2594 tcg_temp_free_i64(tmp);
2595 break;
2596 default:
2597 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2598 gen_illegal_opcode(s, 2);
2599 return;
2603 static void disas_b2(DisasContext *s, int op, uint32_t insn)
2605 TCGv_i64 tmp, tmp2, tmp3;
2606 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2607 int r1, r2;
2608 int ilc = 2;
2609 #ifndef CONFIG_USER_ONLY
2610 int r3, d2, b2;
2611 #endif
2613 r1 = (insn >> 4) & 0xf;
2614 r2 = insn & 0xf;
2616 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2618 switch (op) {
2619 case 0x22: /* IPM R1 [RRE] */
2620 tmp32_1 = tcg_const_i32(r1);
2621 gen_op_calc_cc(s);
2622 gen_helper_ipm(cc_op, tmp32_1);
2623 tcg_temp_free_i32(tmp32_1);
2624 break;
2625 case 0x41: /* CKSM R1,R2 [RRE] */
2626 tmp32_1 = tcg_const_i32(r1);
2627 tmp32_2 = tcg_const_i32(r2);
2628 potential_page_fault(s);
2629 gen_helper_cksm(tmp32_1, tmp32_2);
2630 tcg_temp_free_i32(tmp32_1);
2631 tcg_temp_free_i32(tmp32_2);
2632 gen_op_movi_cc(s, 0);
2633 break;
2634 case 0x4e: /* SAR R1,R2 [RRE] */
2635 tmp32_1 = load_reg32(r2);
2636 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r1]));
2637 tcg_temp_free_i32(tmp32_1);
2638 break;
2639 case 0x4f: /* EAR R1,R2 [RRE] */
2640 tmp32_1 = tcg_temp_new_i32();
2641 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r2]));
2642 store_reg32(r1, tmp32_1);
2643 tcg_temp_free_i32(tmp32_1);
2644 break;
2645 case 0x52: /* MSR R1,R2 [RRE] */
2646 tmp32_1 = load_reg32(r1);
2647 tmp32_2 = load_reg32(r2);
2648 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2649 store_reg32(r1, tmp32_1);
2650 tcg_temp_free_i32(tmp32_1);
2651 tcg_temp_free_i32(tmp32_2);
2652 break;
2653 case 0x54: /* MVPG R1,R2 [RRE] */
2654 tmp = load_reg(0);
2655 tmp2 = load_reg(r1);
2656 tmp3 = load_reg(r2);
2657 potential_page_fault(s);
2658 gen_helper_mvpg(tmp, tmp2, tmp3);
2659 tcg_temp_free_i64(tmp);
2660 tcg_temp_free_i64(tmp2);
2661 tcg_temp_free_i64(tmp3);
2662 /* XXX check CCO bit and set CC accordingly */
2663 gen_op_movi_cc(s, 0);
2664 break;
2665 case 0x55: /* MVST R1,R2 [RRE] */
2666 tmp32_1 = load_reg32(0);
2667 tmp32_2 = tcg_const_i32(r1);
2668 tmp32_3 = tcg_const_i32(r2);
2669 potential_page_fault(s);
2670 gen_helper_mvst(tmp32_1, tmp32_2, tmp32_3);
2671 tcg_temp_free_i32(tmp32_1);
2672 tcg_temp_free_i32(tmp32_2);
2673 tcg_temp_free_i32(tmp32_3);
2674 gen_op_movi_cc(s, 1);
2675 break;
2676 case 0x5d: /* CLST R1,R2 [RRE] */
2677 tmp32_1 = load_reg32(0);
2678 tmp32_2 = tcg_const_i32(r1);
2679 tmp32_3 = tcg_const_i32(r2);
2680 potential_page_fault(s);
2681 gen_helper_clst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2682 set_cc_static(s);
2683 tcg_temp_free_i32(tmp32_1);
2684 tcg_temp_free_i32(tmp32_2);
2685 tcg_temp_free_i32(tmp32_3);
2686 break;
2687 case 0x5e: /* SRST R1,R2 [RRE] */
2688 tmp32_1 = load_reg32(0);
2689 tmp32_2 = tcg_const_i32(r1);
2690 tmp32_3 = tcg_const_i32(r2);
2691 potential_page_fault(s);
2692 gen_helper_srst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2693 set_cc_static(s);
2694 tcg_temp_free_i32(tmp32_1);
2695 tcg_temp_free_i32(tmp32_2);
2696 tcg_temp_free_i32(tmp32_3);
2697 break;
2699 #ifndef CONFIG_USER_ONLY
2700 case 0x02: /* STIDP D2(B2) [S] */
2701 /* Store CPU ID */
2702 check_privileged(s, ilc);
2703 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2704 tmp = get_address(s, 0, b2, d2);
2705 potential_page_fault(s);
2706 gen_helper_stidp(tmp);
2707 tcg_temp_free_i64(tmp);
2708 break;
2709 case 0x04: /* SCK D2(B2) [S] */
2710 /* Set Clock */
2711 check_privileged(s, ilc);
2712 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2713 tmp = get_address(s, 0, b2, d2);
2714 potential_page_fault(s);
2715 gen_helper_sck(cc_op, tmp);
2716 set_cc_static(s);
2717 tcg_temp_free_i64(tmp);
2718 break;
2719 case 0x05: /* STCK D2(B2) [S] */
2720 /* Store Clock */
2721 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2722 tmp = get_address(s, 0, b2, d2);
2723 potential_page_fault(s);
2724 gen_helper_stck(cc_op, tmp);
2725 set_cc_static(s);
2726 tcg_temp_free_i64(tmp);
2727 break;
2728 case 0x06: /* SCKC D2(B2) [S] */
2729 /* Set Clock Comparator */
2730 check_privileged(s, ilc);
2731 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2732 tmp = get_address(s, 0, b2, d2);
2733 potential_page_fault(s);
2734 gen_helper_sckc(tmp);
2735 tcg_temp_free_i64(tmp);
2736 break;
2737 case 0x07: /* STCKC D2(B2) [S] */
2738 /* Store Clock Comparator */
2739 check_privileged(s, ilc);
2740 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2741 tmp = get_address(s, 0, b2, d2);
2742 potential_page_fault(s);
2743 gen_helper_stckc(tmp);
2744 tcg_temp_free_i64(tmp);
2745 break;
2746 case 0x08: /* SPT D2(B2) [S] */
2747 /* Set CPU Timer */
2748 check_privileged(s, ilc);
2749 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2750 tmp = get_address(s, 0, b2, d2);
2751 potential_page_fault(s);
2752 gen_helper_spt(tmp);
2753 tcg_temp_free_i64(tmp);
2754 break;
2755 case 0x09: /* STPT D2(B2) [S] */
2756 /* Store CPU Timer */
2757 check_privileged(s, ilc);
2758 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2759 tmp = get_address(s, 0, b2, d2);
2760 potential_page_fault(s);
2761 gen_helper_stpt(tmp);
2762 tcg_temp_free_i64(tmp);
2763 break;
2764 case 0x0a: /* SPKA D2(B2) [S] */
2765 /* Set PSW Key from Address */
2766 check_privileged(s, ilc);
2767 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2768 tmp = get_address(s, 0, b2, d2);
2769 tmp2 = tcg_temp_new_i64();
2770 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2771 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2772 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2773 tcg_temp_free_i64(tmp2);
2774 tcg_temp_free_i64(tmp);
2775 break;
2776 case 0x0d: /* PTLB [S] */
2777 /* Purge TLB */
2778 check_privileged(s, ilc);
2779 gen_helper_ptlb();
2780 break;
2781 case 0x10: /* SPX D2(B2) [S] */
2782 /* Set Prefix Register */
2783 check_privileged(s, ilc);
2784 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2785 tmp = get_address(s, 0, b2, d2);
2786 potential_page_fault(s);
2787 gen_helper_spx(tmp);
2788 tcg_temp_free_i64(tmp);
2789 break;
2790 case 0x11: /* STPX D2(B2) [S] */
2791 /* Store Prefix */
2792 check_privileged(s, ilc);
2793 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2794 tmp = get_address(s, 0, b2, d2);
2795 tmp2 = tcg_temp_new_i64();
2796 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUState, psa));
2797 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2798 tcg_temp_free_i64(tmp);
2799 tcg_temp_free_i64(tmp2);
2800 break;
2801 case 0x12: /* STAP D2(B2) [S] */
2802 /* Store CPU Address */
2803 check_privileged(s, ilc);
2804 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2805 tmp = get_address(s, 0, b2, d2);
2806 tmp2 = tcg_temp_new_i64();
2807 tmp32_1 = tcg_temp_new_i32();
2808 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, cpu_num));
2809 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2810 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2811 tcg_temp_free_i64(tmp);
2812 tcg_temp_free_i64(tmp2);
2813 tcg_temp_free_i32(tmp32_1);
2814 break;
2815 case 0x21: /* IPTE R1,R2 [RRE] */
2816 /* Invalidate PTE */
2817 check_privileged(s, ilc);
2818 r1 = (insn >> 4) & 0xf;
2819 r2 = insn & 0xf;
2820 tmp = load_reg(r1);
2821 tmp2 = load_reg(r2);
2822 gen_helper_ipte(tmp, tmp2);
2823 tcg_temp_free_i64(tmp);
2824 tcg_temp_free_i64(tmp2);
2825 break;
2826 case 0x29: /* ISKE R1,R2 [RRE] */
2827 /* Insert Storage Key Extended */
2828 check_privileged(s, ilc);
2829 r1 = (insn >> 4) & 0xf;
2830 r2 = insn & 0xf;
2831 tmp = load_reg(r2);
2832 tmp2 = tcg_temp_new_i64();
2833 gen_helper_iske(tmp2, tmp);
2834 store_reg(r1, tmp2);
2835 tcg_temp_free_i64(tmp);
2836 tcg_temp_free_i64(tmp2);
2837 break;
2838 case 0x2a: /* RRBE R1,R2 [RRE] */
2839 /* Set Storage Key Extended */
2840 check_privileged(s, ilc);
2841 r1 = (insn >> 4) & 0xf;
2842 r2 = insn & 0xf;
2843 tmp32_1 = load_reg32(r1);
2844 tmp = load_reg(r2);
2845 gen_helper_rrbe(cc_op, tmp32_1, tmp);
2846 set_cc_static(s);
2847 tcg_temp_free_i32(tmp32_1);
2848 tcg_temp_free_i64(tmp);
2849 break;
2850 case 0x2b: /* SSKE R1,R2 [RRE] */
2851 /* Set Storage Key Extended */
2852 check_privileged(s, ilc);
2853 r1 = (insn >> 4) & 0xf;
2854 r2 = insn & 0xf;
2855 tmp32_1 = load_reg32(r1);
2856 tmp = load_reg(r2);
2857 gen_helper_sske(tmp32_1, tmp);
2858 tcg_temp_free_i32(tmp32_1);
2859 tcg_temp_free_i64(tmp);
2860 break;
2861 case 0x34: /* STCH ? */
2862 /* Store Subchannel */
2863 check_privileged(s, ilc);
2864 gen_op_movi_cc(s, 3);
2865 break;
2866 case 0x46: /* STURA R1,R2 [RRE] */
2867 /* Store Using Real Address */
2868 check_privileged(s, ilc);
2869 r1 = (insn >> 4) & 0xf;
2870 r2 = insn & 0xf;
2871 tmp32_1 = load_reg32(r1);
2872 tmp = load_reg(r2);
2873 potential_page_fault(s);
2874 gen_helper_stura(tmp, tmp32_1);
2875 tcg_temp_free_i32(tmp32_1);
2876 tcg_temp_free_i64(tmp);
2877 break;
2878 case 0x50: /* CSP R1,R2 [RRE] */
2879 /* Compare And Swap And Purge */
2880 check_privileged(s, ilc);
2881 r1 = (insn >> 4) & 0xf;
2882 r2 = insn & 0xf;
2883 tmp32_1 = tcg_const_i32(r1);
2884 tmp32_2 = tcg_const_i32(r2);
2885 gen_helper_csp(cc_op, tmp32_1, tmp32_2);
2886 set_cc_static(s);
2887 tcg_temp_free_i32(tmp32_1);
2888 tcg_temp_free_i32(tmp32_2);
2889 break;
2890 case 0x5f: /* CHSC ? */
2891 /* Channel Subsystem Call */
2892 check_privileged(s, ilc);
2893 gen_op_movi_cc(s, 3);
2894 break;
2895 case 0x78: /* STCKE D2(B2) [S] */
2896 /* Store Clock Extended */
2897 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2898 tmp = get_address(s, 0, b2, d2);
2899 potential_page_fault(s);
2900 gen_helper_stcke(cc_op, tmp);
2901 set_cc_static(s);
2902 tcg_temp_free_i64(tmp);
2903 break;
2904 case 0x79: /* SACF D2(B2) [S] */
2905 /* Store Clock Extended */
2906 check_privileged(s, ilc);
2907 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2908 tmp = get_address(s, 0, b2, d2);
2909 potential_page_fault(s);
2910 gen_helper_sacf(tmp);
2911 tcg_temp_free_i64(tmp);
2912 /* addressing mode has changed, so end the block */
2913 s->pc += ilc * 2;
2914 update_psw_addr(s);
2915 s->is_jmp = DISAS_EXCP;
2916 break;
2917 case 0x7d: /* STSI D2,(B2) [S] */
2918 check_privileged(s, ilc);
2919 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2920 tmp = get_address(s, 0, b2, d2);
2921 tmp32_1 = load_reg32(0);
2922 tmp32_2 = load_reg32(1);
2923 potential_page_fault(s);
2924 gen_helper_stsi(cc_op, tmp, tmp32_1, tmp32_2);
2925 set_cc_static(s);
2926 tcg_temp_free_i64(tmp);
2927 tcg_temp_free_i32(tmp32_1);
2928 tcg_temp_free_i32(tmp32_2);
2929 break;
2930 case 0x9d: /* LFPC D2(B2) [S] */
2931 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2932 tmp = get_address(s, 0, b2, d2);
2933 tmp2 = tcg_temp_new_i64();
2934 tmp32_1 = tcg_temp_new_i32();
2935 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2936 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2937 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
2938 tcg_temp_free_i64(tmp);
2939 tcg_temp_free_i64(tmp2);
2940 tcg_temp_free_i32(tmp32_1);
2941 break;
2942 case 0xb1: /* STFL D2(B2) [S] */
2943 /* Store Facility List (CPU features) at 200 */
2944 check_privileged(s, ilc);
2945 tmp2 = tcg_const_i64(0xc0000000);
2946 tmp = tcg_const_i64(200);
2947 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2948 tcg_temp_free_i64(tmp2);
2949 tcg_temp_free_i64(tmp);
2950 break;
2951 case 0xb2: /* LPSWE D2(B2) [S] */
2952 /* Load PSW Extended */
2953 check_privileged(s, ilc);
2954 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2955 tmp = get_address(s, 0, b2, d2);
2956 tmp2 = tcg_temp_new_i64();
2957 tmp3 = tcg_temp_new_i64();
2958 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2959 tcg_gen_addi_i64(tmp, tmp, 8);
2960 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2961 gen_helper_load_psw(tmp2, tmp3);
2962 /* we need to keep cc_op intact */
2963 s->is_jmp = DISAS_JUMP;
2964 tcg_temp_free_i64(tmp);
2965 break;
2966 case 0x20: /* SERVC R1,R2 [RRE] */
2967 /* SCLP Service call (PV hypercall) */
2968 check_privileged(s, ilc);
2969 potential_page_fault(s);
2970 tmp32_1 = load_reg32(r2);
2971 tmp = load_reg(r1);
2972 gen_helper_servc(cc_op, tmp32_1, tmp);
2973 set_cc_static(s);
2974 tcg_temp_free_i32(tmp32_1);
2975 tcg_temp_free_i64(tmp);
2976 break;
2977 #endif
2978 default:
2979 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2980 gen_illegal_opcode(s, ilc);
2981 break;
2985 static void disas_b3(DisasContext *s, int op, int m3, int r1, int r2)
2987 TCGv_i64 tmp;
2988 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2989 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
2990 #define FP_HELPER(i) \
2991 tmp32_1 = tcg_const_i32(r1); \
2992 tmp32_2 = tcg_const_i32(r2); \
2993 gen_helper_ ## i (tmp32_1, tmp32_2); \
2994 tcg_temp_free_i32(tmp32_1); \
2995 tcg_temp_free_i32(tmp32_2);
2997 #define FP_HELPER_CC(i) \
2998 tmp32_1 = tcg_const_i32(r1); \
2999 tmp32_2 = tcg_const_i32(r2); \
3000 gen_helper_ ## i (cc_op, tmp32_1, tmp32_2); \
3001 set_cc_static(s); \
3002 tcg_temp_free_i32(tmp32_1); \
3003 tcg_temp_free_i32(tmp32_2);
3005 switch (op) {
3006 case 0x0: /* LPEBR R1,R2 [RRE] */
3007 FP_HELPER_CC(lpebr);
3008 break;
3009 case 0x2: /* LTEBR R1,R2 [RRE] */
3010 FP_HELPER_CC(ltebr);
3011 break;
3012 case 0x3: /* LCEBR R1,R2 [RRE] */
3013 FP_HELPER_CC(lcebr);
3014 break;
3015 case 0x4: /* LDEBR R1,R2 [RRE] */
3016 FP_HELPER(ldebr);
3017 break;
3018 case 0x5: /* LXDBR R1,R2 [RRE] */
3019 FP_HELPER(lxdbr);
3020 break;
3021 case 0x9: /* CEBR R1,R2 [RRE] */
3022 FP_HELPER_CC(cebr);
3023 break;
3024 case 0xa: /* AEBR R1,R2 [RRE] */
3025 FP_HELPER_CC(aebr);
3026 break;
3027 case 0xb: /* SEBR R1,R2 [RRE] */
3028 FP_HELPER_CC(sebr);
3029 break;
3030 case 0xd: /* DEBR R1,R2 [RRE] */
3031 FP_HELPER(debr);
3032 break;
3033 case 0x10: /* LPDBR R1,R2 [RRE] */
3034 FP_HELPER_CC(lpdbr);
3035 break;
3036 case 0x12: /* LTDBR R1,R2 [RRE] */
3037 FP_HELPER_CC(ltdbr);
3038 break;
3039 case 0x13: /* LCDBR R1,R2 [RRE] */
3040 FP_HELPER_CC(lcdbr);
3041 break;
3042 case 0x15: /* SQBDR R1,R2 [RRE] */
3043 FP_HELPER(sqdbr);
3044 break;
3045 case 0x17: /* MEEBR R1,R2 [RRE] */
3046 FP_HELPER(meebr);
3047 break;
3048 case 0x19: /* CDBR R1,R2 [RRE] */
3049 FP_HELPER_CC(cdbr);
3050 break;
3051 case 0x1a: /* ADBR R1,R2 [RRE] */
3052 FP_HELPER_CC(adbr);
3053 break;
3054 case 0x1b: /* SDBR R1,R2 [RRE] */
3055 FP_HELPER_CC(sdbr);
3056 break;
3057 case 0x1c: /* MDBR R1,R2 [RRE] */
3058 FP_HELPER(mdbr);
3059 break;
3060 case 0x1d: /* DDBR R1,R2 [RRE] */
3061 FP_HELPER(ddbr);
3062 break;
3063 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3064 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3065 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3066 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3067 tmp32_1 = tcg_const_i32(m3);
3068 tmp32_2 = tcg_const_i32(r2);
3069 tmp32_3 = tcg_const_i32(r1);
3070 switch (op) {
3071 case 0xe:
3072 gen_helper_maebr(tmp32_1, tmp32_3, tmp32_2);
3073 break;
3074 case 0x1e:
3075 gen_helper_madbr(tmp32_1, tmp32_3, tmp32_2);
3076 break;
3077 case 0x1f:
3078 gen_helper_msdbr(tmp32_1, tmp32_3, tmp32_2);
3079 break;
3080 default:
3081 tcg_abort();
3083 tcg_temp_free_i32(tmp32_1);
3084 tcg_temp_free_i32(tmp32_2);
3085 tcg_temp_free_i32(tmp32_3);
3086 break;
3087 case 0x40: /* LPXBR R1,R2 [RRE] */
3088 FP_HELPER_CC(lpxbr);
3089 break;
3090 case 0x42: /* LTXBR R1,R2 [RRE] */
3091 FP_HELPER_CC(ltxbr);
3092 break;
3093 case 0x43: /* LCXBR R1,R2 [RRE] */
3094 FP_HELPER_CC(lcxbr);
3095 break;
3096 case 0x44: /* LEDBR R1,R2 [RRE] */
3097 FP_HELPER(ledbr);
3098 break;
3099 case 0x45: /* LDXBR R1,R2 [RRE] */
3100 FP_HELPER(ldxbr);
3101 break;
3102 case 0x46: /* LEXBR R1,R2 [RRE] */
3103 FP_HELPER(lexbr);
3104 break;
3105 case 0x49: /* CXBR R1,R2 [RRE] */
3106 FP_HELPER_CC(cxbr);
3107 break;
3108 case 0x4a: /* AXBR R1,R2 [RRE] */
3109 FP_HELPER_CC(axbr);
3110 break;
3111 case 0x4b: /* SXBR R1,R2 [RRE] */
3112 FP_HELPER_CC(sxbr);
3113 break;
3114 case 0x4c: /* MXBR R1,R2 [RRE] */
3115 FP_HELPER(mxbr);
3116 break;
3117 case 0x4d: /* DXBR R1,R2 [RRE] */
3118 FP_HELPER(dxbr);
3119 break;
3120 case 0x65: /* LXR R1,R2 [RRE] */
3121 tmp = load_freg(r2);
3122 store_freg(r1, tmp);
3123 tcg_temp_free_i64(tmp);
3124 tmp = load_freg(r2 + 2);
3125 store_freg(r1 + 2, tmp);
3126 tcg_temp_free_i64(tmp);
3127 break;
3128 case 0x74: /* LZER R1 [RRE] */
3129 tmp32_1 = tcg_const_i32(r1);
3130 gen_helper_lzer(tmp32_1);
3131 tcg_temp_free_i32(tmp32_1);
3132 break;
3133 case 0x75: /* LZDR R1 [RRE] */
3134 tmp32_1 = tcg_const_i32(r1);
3135 gen_helper_lzdr(tmp32_1);
3136 tcg_temp_free_i32(tmp32_1);
3137 break;
3138 case 0x76: /* LZXR R1 [RRE] */
3139 tmp32_1 = tcg_const_i32(r1);
3140 gen_helper_lzxr(tmp32_1);
3141 tcg_temp_free_i32(tmp32_1);
3142 break;
3143 case 0x84: /* SFPC R1 [RRE] */
3144 tmp32_1 = load_reg32(r1);
3145 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3146 tcg_temp_free_i32(tmp32_1);
3147 break;
3148 case 0x8c: /* EFPC R1 [RRE] */
3149 tmp32_1 = tcg_temp_new_i32();
3150 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3151 store_reg32(r1, tmp32_1);
3152 tcg_temp_free_i32(tmp32_1);
3153 break;
3154 case 0x94: /* CEFBR R1,R2 [RRE] */
3155 case 0x95: /* CDFBR R1,R2 [RRE] */
3156 case 0x96: /* CXFBR R1,R2 [RRE] */
3157 tmp32_1 = tcg_const_i32(r1);
3158 tmp32_2 = load_reg32(r2);
3159 switch (op) {
3160 case 0x94:
3161 gen_helper_cefbr(tmp32_1, tmp32_2);
3162 break;
3163 case 0x95:
3164 gen_helper_cdfbr(tmp32_1, tmp32_2);
3165 break;
3166 case 0x96:
3167 gen_helper_cxfbr(tmp32_1, tmp32_2);
3168 break;
3169 default:
3170 tcg_abort();
3172 tcg_temp_free_i32(tmp32_1);
3173 tcg_temp_free_i32(tmp32_2);
3174 break;
3175 case 0x98: /* CFEBR R1,R2 [RRE] */
3176 case 0x99: /* CFDBR R1,R2 [RRE] */
3177 case 0x9a: /* CFXBR R1,R2 [RRE] */
3178 tmp32_1 = tcg_const_i32(r1);
3179 tmp32_2 = tcg_const_i32(r2);
3180 tmp32_3 = tcg_const_i32(m3);
3181 switch (op) {
3182 case 0x98:
3183 gen_helper_cfebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3184 break;
3185 case 0x99:
3186 gen_helper_cfdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3187 break;
3188 case 0x9a:
3189 gen_helper_cfxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3190 break;
3191 default:
3192 tcg_abort();
3194 set_cc_static(s);
3195 tcg_temp_free_i32(tmp32_1);
3196 tcg_temp_free_i32(tmp32_2);
3197 tcg_temp_free_i32(tmp32_3);
3198 break;
3199 case 0xa4: /* CEGBR R1,R2 [RRE] */
3200 case 0xa5: /* CDGBR R1,R2 [RRE] */
3201 tmp32_1 = tcg_const_i32(r1);
3202 tmp = load_reg(r2);
3203 switch (op) {
3204 case 0xa4:
3205 gen_helper_cegbr(tmp32_1, tmp);
3206 break;
3207 case 0xa5:
3208 gen_helper_cdgbr(tmp32_1, tmp);
3209 break;
3210 default:
3211 tcg_abort();
3213 tcg_temp_free_i32(tmp32_1);
3214 tcg_temp_free_i64(tmp);
3215 break;
3216 case 0xa6: /* CXGBR R1,R2 [RRE] */
3217 tmp32_1 = tcg_const_i32(r1);
3218 tmp = load_reg(r2);
3219 gen_helper_cxgbr(tmp32_1, tmp);
3220 tcg_temp_free_i32(tmp32_1);
3221 tcg_temp_free_i64(tmp);
3222 break;
3223 case 0xa8: /* CGEBR R1,R2 [RRE] */
3224 tmp32_1 = tcg_const_i32(r1);
3225 tmp32_2 = tcg_const_i32(r2);
3226 tmp32_3 = tcg_const_i32(m3);
3227 gen_helper_cgebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3228 set_cc_static(s);
3229 tcg_temp_free_i32(tmp32_1);
3230 tcg_temp_free_i32(tmp32_2);
3231 tcg_temp_free_i32(tmp32_3);
3232 break;
3233 case 0xa9: /* CGDBR R1,R2 [RRE] */
3234 tmp32_1 = tcg_const_i32(r1);
3235 tmp32_2 = tcg_const_i32(r2);
3236 tmp32_3 = tcg_const_i32(m3);
3237 gen_helper_cgdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3238 set_cc_static(s);
3239 tcg_temp_free_i32(tmp32_1);
3240 tcg_temp_free_i32(tmp32_2);
3241 tcg_temp_free_i32(tmp32_3);
3242 break;
3243 case 0xaa: /* CGXBR R1,R2 [RRE] */
3244 tmp32_1 = tcg_const_i32(r1);
3245 tmp32_2 = tcg_const_i32(r2);
3246 tmp32_3 = tcg_const_i32(m3);
3247 gen_helper_cgxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3248 set_cc_static(s);
3249 tcg_temp_free_i32(tmp32_1);
3250 tcg_temp_free_i32(tmp32_2);
3251 tcg_temp_free_i32(tmp32_3);
3252 break;
3253 default:
3254 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3255 gen_illegal_opcode(s, 2);
3256 break;
3259 #undef FP_HELPER_CC
3260 #undef FP_HELPER
3263 static void disas_b9(DisasContext *s, int op, int r1, int r2)
3265 TCGv_i64 tmp, tmp2, tmp3;
3266 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3268 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3269 switch (op) {
3270 case 0x0: /* LPGR R1,R2 [RRE] */
3271 case 0x1: /* LNGR R1,R2 [RRE] */
3272 case 0x2: /* LTGR R1,R2 [RRE] */
3273 case 0x3: /* LCGR R1,R2 [RRE] */
3274 case 0x10: /* LPGFR R1,R2 [RRE] */
3275 case 0x11: /* LNFGR R1,R2 [RRE] */
3276 case 0x12: /* LTGFR R1,R2 [RRE] */
3277 case 0x13: /* LCGFR R1,R2 [RRE] */
3278 if (op & 0x10) {
3279 tmp = load_reg32_i64(r2);
3280 } else {
3281 tmp = load_reg(r2);
3283 switch (op & 0xf) {
3284 case 0x0: /* LP?GR */
3285 set_cc_abs64(s, tmp);
3286 gen_helper_abs_i64(tmp, tmp);
3287 store_reg(r1, tmp);
3288 break;
3289 case 0x1: /* LN?GR */
3290 set_cc_nabs64(s, tmp);
3291 gen_helper_nabs_i64(tmp, tmp);
3292 store_reg(r1, tmp);
3293 break;
3294 case 0x2: /* LT?GR */
3295 if (r1 != r2) {
3296 store_reg(r1, tmp);
3298 set_cc_s64(s, tmp);
3299 break;
3300 case 0x3: /* LC?GR */
3301 tcg_gen_neg_i64(regs[r1], tmp);
3302 set_cc_comp64(s, regs[r1]);
3303 break;
3305 tcg_temp_free_i64(tmp);
3306 break;
3307 case 0x4: /* LGR R1,R2 [RRE] */
3308 store_reg(r1, regs[r2]);
3309 break;
3310 case 0x6: /* LGBR R1,R2 [RRE] */
3311 tmp2 = load_reg(r2);
3312 tcg_gen_ext8s_i64(tmp2, tmp2);
3313 store_reg(r1, tmp2);
3314 tcg_temp_free_i64(tmp2);
3315 break;
3316 case 0x8: /* AGR R1,R2 [RRE] */
3317 case 0xa: /* ALGR R1,R2 [RRE] */
3318 tmp = load_reg(r1);
3319 tmp2 = load_reg(r2);
3320 tmp3 = tcg_temp_new_i64();
3321 tcg_gen_add_i64(tmp3, tmp, tmp2);
3322 store_reg(r1, tmp3);
3323 switch (op) {
3324 case 0x8:
3325 set_cc_add64(s, tmp, tmp2, tmp3);
3326 break;
3327 case 0xa:
3328 set_cc_addu64(s, tmp, tmp2, tmp3);
3329 break;
3330 default:
3331 tcg_abort();
3333 tcg_temp_free_i64(tmp);
3334 tcg_temp_free_i64(tmp2);
3335 tcg_temp_free_i64(tmp3);
3336 break;
3337 case 0x9: /* SGR R1,R2 [RRE] */
3338 case 0xb: /* SLGR R1,R2 [RRE] */
3339 case 0x1b: /* SLGFR R1,R2 [RRE] */
3340 case 0x19: /* SGFR R1,R2 [RRE] */
3341 tmp = load_reg(r1);
3342 switch (op) {
3343 case 0x1b:
3344 tmp32_1 = load_reg32(r2);
3345 tmp2 = tcg_temp_new_i64();
3346 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3347 tcg_temp_free_i32(tmp32_1);
3348 break;
3349 case 0x19:
3350 tmp32_1 = load_reg32(r2);
3351 tmp2 = tcg_temp_new_i64();
3352 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3353 tcg_temp_free_i32(tmp32_1);
3354 break;
3355 default:
3356 tmp2 = load_reg(r2);
3357 break;
3359 tmp3 = tcg_temp_new_i64();
3360 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3361 store_reg(r1, tmp3);
3362 switch (op) {
3363 case 0x9:
3364 case 0x19:
3365 set_cc_sub64(s, tmp, tmp2, tmp3);
3366 break;
3367 case 0xb:
3368 case 0x1b:
3369 set_cc_subu64(s, tmp, tmp2, tmp3);
3370 break;
3371 default:
3372 tcg_abort();
3374 tcg_temp_free_i64(tmp);
3375 tcg_temp_free_i64(tmp2);
3376 tcg_temp_free_i64(tmp3);
3377 break;
3378 case 0xc: /* MSGR R1,R2 [RRE] */
3379 case 0x1c: /* MSGFR R1,R2 [RRE] */
3380 tmp = load_reg(r1);
3381 tmp2 = load_reg(r2);
3382 if (op == 0x1c) {
3383 tcg_gen_ext32s_i64(tmp2, tmp2);
3385 tcg_gen_mul_i64(tmp, tmp, tmp2);
3386 store_reg(r1, tmp);
3387 tcg_temp_free_i64(tmp);
3388 tcg_temp_free_i64(tmp2);
3389 break;
3390 case 0xd: /* DSGR R1,R2 [RRE] */
3391 case 0x1d: /* DSGFR R1,R2 [RRE] */
3392 tmp = load_reg(r1 + 1);
3393 if (op == 0xd) {
3394 tmp2 = load_reg(r2);
3395 } else {
3396 tmp32_1 = load_reg32(r2);
3397 tmp2 = tcg_temp_new_i64();
3398 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3399 tcg_temp_free_i32(tmp32_1);
3401 tmp3 = tcg_temp_new_i64();
3402 tcg_gen_div_i64(tmp3, tmp, tmp2);
3403 store_reg(r1 + 1, tmp3);
3404 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3405 store_reg(r1, tmp3);
3406 tcg_temp_free_i64(tmp);
3407 tcg_temp_free_i64(tmp2);
3408 tcg_temp_free_i64(tmp3);
3409 break;
3410 case 0x14: /* LGFR R1,R2 [RRE] */
3411 tmp32_1 = load_reg32(r2);
3412 tmp = tcg_temp_new_i64();
3413 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3414 store_reg(r1, tmp);
3415 tcg_temp_free_i32(tmp32_1);
3416 tcg_temp_free_i64(tmp);
3417 break;
3418 case 0x16: /* LLGFR R1,R2 [RRE] */
3419 tmp32_1 = load_reg32(r2);
3420 tmp = tcg_temp_new_i64();
3421 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3422 store_reg(r1, tmp);
3423 tcg_temp_free_i32(tmp32_1);
3424 tcg_temp_free_i64(tmp);
3425 break;
3426 case 0x17: /* LLGTR R1,R2 [RRE] */
3427 tmp32_1 = load_reg32(r2);
3428 tmp = tcg_temp_new_i64();
3429 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3430 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3431 store_reg(r1, tmp);
3432 tcg_temp_free_i32(tmp32_1);
3433 tcg_temp_free_i64(tmp);
3434 break;
3435 case 0x18: /* AGFR R1,R2 [RRE] */
3436 case 0x1a: /* ALGFR R1,R2 [RRE] */
3437 tmp32_1 = load_reg32(r2);
3438 tmp2 = tcg_temp_new_i64();
3439 if (op == 0x18) {
3440 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3441 } else {
3442 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3444 tcg_temp_free_i32(tmp32_1);
3445 tmp = load_reg(r1);
3446 tmp3 = tcg_temp_new_i64();
3447 tcg_gen_add_i64(tmp3, tmp, tmp2);
3448 store_reg(r1, tmp3);
3449 if (op == 0x18) {
3450 set_cc_add64(s, tmp, tmp2, tmp3);
3451 } else {
3452 set_cc_addu64(s, tmp, tmp2, tmp3);
3454 tcg_temp_free_i64(tmp);
3455 tcg_temp_free_i64(tmp2);
3456 tcg_temp_free_i64(tmp3);
3457 break;
3458 case 0x1f: /* LRVR R1,R2 [RRE] */
3459 tmp32_1 = load_reg32(r2);
3460 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3461 store_reg32(r1, tmp32_1);
3462 tcg_temp_free_i32(tmp32_1);
3463 break;
3464 case 0x20: /* CGR R1,R2 [RRE] */
3465 case 0x30: /* CGFR R1,R2 [RRE] */
3466 tmp2 = load_reg(r2);
3467 if (op == 0x30) {
3468 tcg_gen_ext32s_i64(tmp2, tmp2);
3470 tmp = load_reg(r1);
3471 cmp_s64(s, tmp, tmp2);
3472 tcg_temp_free_i64(tmp);
3473 tcg_temp_free_i64(tmp2);
3474 break;
3475 case 0x21: /* CLGR R1,R2 [RRE] */
3476 case 0x31: /* CLGFR R1,R2 [RRE] */
3477 tmp2 = load_reg(r2);
3478 if (op == 0x31) {
3479 tcg_gen_ext32u_i64(tmp2, tmp2);
3481 tmp = load_reg(r1);
3482 cmp_u64(s, tmp, tmp2);
3483 tcg_temp_free_i64(tmp);
3484 tcg_temp_free_i64(tmp2);
3485 break;
3486 case 0x26: /* LBR R1,R2 [RRE] */
3487 tmp32_1 = load_reg32(r2);
3488 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3489 store_reg32(r1, tmp32_1);
3490 tcg_temp_free_i32(tmp32_1);
3491 break;
3492 case 0x27: /* LHR R1,R2 [RRE] */
3493 tmp32_1 = load_reg32(r2);
3494 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3495 store_reg32(r1, tmp32_1);
3496 tcg_temp_free_i32(tmp32_1);
3497 break;
3498 case 0x80: /* NGR R1,R2 [RRE] */
3499 case 0x81: /* OGR R1,R2 [RRE] */
3500 case 0x82: /* XGR R1,R2 [RRE] */
3501 tmp = load_reg(r1);
3502 tmp2 = load_reg(r2);
3503 switch (op) {
3504 case 0x80:
3505 tcg_gen_and_i64(tmp, tmp, tmp2);
3506 break;
3507 case 0x81:
3508 tcg_gen_or_i64(tmp, tmp, tmp2);
3509 break;
3510 case 0x82:
3511 tcg_gen_xor_i64(tmp, tmp, tmp2);
3512 break;
3513 default:
3514 tcg_abort();
3516 store_reg(r1, tmp);
3517 set_cc_nz_u64(s, tmp);
3518 tcg_temp_free_i64(tmp);
3519 tcg_temp_free_i64(tmp2);
3520 break;
3521 case 0x83: /* FLOGR R1,R2 [RRE] */
3522 tmp = load_reg(r2);
3523 tmp32_1 = tcg_const_i32(r1);
3524 gen_helper_flogr(cc_op, tmp32_1, tmp);
3525 set_cc_static(s);
3526 tcg_temp_free_i64(tmp);
3527 tcg_temp_free_i32(tmp32_1);
3528 break;
3529 case 0x84: /* LLGCR R1,R2 [RRE] */
3530 tmp = load_reg(r2);
3531 tcg_gen_andi_i64(tmp, tmp, 0xff);
3532 store_reg(r1, tmp);
3533 tcg_temp_free_i64(tmp);
3534 break;
3535 case 0x85: /* LLGHR R1,R2 [RRE] */
3536 tmp = load_reg(r2);
3537 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3538 store_reg(r1, tmp);
3539 tcg_temp_free_i64(tmp);
3540 break;
3541 case 0x87: /* DLGR R1,R2 [RRE] */
3542 tmp32_1 = tcg_const_i32(r1);
3543 tmp = load_reg(r2);
3544 gen_helper_dlg(tmp32_1, tmp);
3545 tcg_temp_free_i64(tmp);
3546 tcg_temp_free_i32(tmp32_1);
3547 break;
3548 case 0x88: /* ALCGR R1,R2 [RRE] */
3549 tmp = load_reg(r1);
3550 tmp2 = load_reg(r2);
3551 tmp3 = tcg_temp_new_i64();
3552 gen_op_calc_cc(s);
3553 tcg_gen_extu_i32_i64(tmp3, cc_op);
3554 tcg_gen_shri_i64(tmp3, tmp3, 1);
3555 tcg_gen_andi_i64(tmp3, tmp3, 1);
3556 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3557 tcg_gen_add_i64(tmp3, tmp, tmp3);
3558 store_reg(r1, tmp3);
3559 set_cc_addu64(s, tmp, tmp2, tmp3);
3560 tcg_temp_free_i64(tmp);
3561 tcg_temp_free_i64(tmp2);
3562 tcg_temp_free_i64(tmp3);
3563 break;
3564 case 0x89: /* SLBGR R1,R2 [RRE] */
3565 tmp = load_reg(r1);
3566 tmp2 = load_reg(r2);
3567 tmp32_1 = tcg_const_i32(r1);
3568 gen_op_calc_cc(s);
3569 gen_helper_slbg(cc_op, cc_op, tmp32_1, tmp, tmp2);
3570 set_cc_static(s);
3571 tcg_temp_free_i64(tmp);
3572 tcg_temp_free_i64(tmp2);
3573 tcg_temp_free_i32(tmp32_1);
3574 break;
3575 case 0x94: /* LLCR R1,R2 [RRE] */
3576 tmp32_1 = load_reg32(r2);
3577 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3578 store_reg32(r1, tmp32_1);
3579 tcg_temp_free_i32(tmp32_1);
3580 break;
3581 case 0x95: /* LLHR R1,R2 [RRE] */
3582 tmp32_1 = load_reg32(r2);
3583 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3584 store_reg32(r1, tmp32_1);
3585 tcg_temp_free_i32(tmp32_1);
3586 break;
3587 case 0x96: /* MLR R1,R2 [RRE] */
3588 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3589 tmp2 = load_reg(r2);
3590 tmp3 = load_reg((r1 + 1) & 15);
3591 tcg_gen_ext32u_i64(tmp2, tmp2);
3592 tcg_gen_ext32u_i64(tmp3, tmp3);
3593 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3594 store_reg32_i64((r1 + 1) & 15, tmp2);
3595 tcg_gen_shri_i64(tmp2, tmp2, 32);
3596 store_reg32_i64(r1, tmp2);
3597 tcg_temp_free_i64(tmp2);
3598 tcg_temp_free_i64(tmp3);
3599 break;
3600 case 0x97: /* DLR R1,R2 [RRE] */
3601 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3602 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3603 tmp = load_reg(r1);
3604 tmp2 = load_reg(r2);
3605 tmp3 = load_reg((r1 + 1) & 15);
3606 tcg_gen_ext32u_i64(tmp2, tmp2);
3607 tcg_gen_ext32u_i64(tmp3, tmp3);
3608 tcg_gen_shli_i64(tmp, tmp, 32);
3609 tcg_gen_or_i64(tmp, tmp, tmp3);
3611 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3612 tcg_gen_div_i64(tmp, tmp, tmp2);
3613 store_reg32_i64((r1 + 1) & 15, tmp);
3614 store_reg32_i64(r1, tmp3);
3615 tcg_temp_free_i64(tmp);
3616 tcg_temp_free_i64(tmp2);
3617 tcg_temp_free_i64(tmp3);
3618 break;
3619 case 0x98: /* ALCR R1,R2 [RRE] */
3620 tmp32_1 = load_reg32(r1);
3621 tmp32_2 = load_reg32(r2);
3622 tmp32_3 = tcg_temp_new_i32();
3623 /* XXX possible optimization point */
3624 gen_op_calc_cc(s);
3625 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3626 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3627 store_reg32(r1, tmp32_3);
3628 tcg_temp_free_i32(tmp32_1);
3629 tcg_temp_free_i32(tmp32_2);
3630 tcg_temp_free_i32(tmp32_3);
3631 break;
3632 case 0x99: /* SLBR R1,R2 [RRE] */
3633 tmp32_1 = load_reg32(r2);
3634 tmp32_2 = tcg_const_i32(r1);
3635 gen_op_calc_cc(s);
3636 gen_helper_slb(cc_op, cc_op, tmp32_2, tmp32_1);
3637 set_cc_static(s);
3638 tcg_temp_free_i32(tmp32_1);
3639 tcg_temp_free_i32(tmp32_2);
3640 break;
3641 default:
3642 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3643 gen_illegal_opcode(s, 2);
3644 break;
3648 static void disas_c0(DisasContext *s, int op, int r1, int i2)
3650 TCGv_i64 tmp;
3651 TCGv_i32 tmp32_1, tmp32_2;
3652 uint64_t target = s->pc + i2 * 2LL;
3653 int l1;
3655 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3657 switch (op) {
3658 case 0: /* larl r1, i2 */
3659 tmp = tcg_const_i64(target);
3660 store_reg(r1, tmp);
3661 tcg_temp_free_i64(tmp);
3662 break;
3663 case 0x1: /* LGFI R1,I2 [RIL] */
3664 tmp = tcg_const_i64((int64_t)i2);
3665 store_reg(r1, tmp);
3666 tcg_temp_free_i64(tmp);
3667 break;
3668 case 0x4: /* BRCL M1,I2 [RIL] */
3669 /* m1 & (1 << (3 - cc)) */
3670 tmp32_1 = tcg_const_i32(3);
3671 tmp32_2 = tcg_const_i32(1);
3672 gen_op_calc_cc(s);
3673 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3674 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3675 tcg_temp_free_i32(tmp32_1);
3676 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3677 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3678 l1 = gen_new_label();
3679 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3680 gen_goto_tb(s, 0, target);
3681 gen_set_label(l1);
3682 gen_goto_tb(s, 1, s->pc + 6);
3683 s->is_jmp = DISAS_TB_JUMP;
3684 tcg_temp_free_i32(tmp32_1);
3685 tcg_temp_free_i32(tmp32_2);
3686 break;
3687 case 0x5: /* brasl r1, i2 */
3688 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3689 store_reg(r1, tmp);
3690 tcg_temp_free_i64(tmp);
3691 gen_goto_tb(s, 0, target);
3692 s->is_jmp = DISAS_TB_JUMP;
3693 break;
3694 case 0x7: /* XILF R1,I2 [RIL] */
3695 case 0xb: /* NILF R1,I2 [RIL] */
3696 case 0xd: /* OILF R1,I2 [RIL] */
3697 tmp32_1 = load_reg32(r1);
3698 switch (op) {
3699 case 0x7:
3700 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3701 break;
3702 case 0xb:
3703 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3704 break;
3705 case 0xd:
3706 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3707 break;
3708 default:
3709 tcg_abort();
3711 store_reg32(r1, tmp32_1);
3712 set_cc_nz_u32(s, tmp32_1);
3713 tcg_temp_free_i32(tmp32_1);
3714 break;
3715 case 0x9: /* IILF R1,I2 [RIL] */
3716 tmp32_1 = tcg_const_i32((uint32_t)i2);
3717 store_reg32(r1, tmp32_1);
3718 tcg_temp_free_i32(tmp32_1);
3719 break;
3720 case 0xa: /* NIHF R1,I2 [RIL] */
3721 tmp = load_reg(r1);
3722 tmp32_1 = tcg_temp_new_i32();
3723 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3724 | 0xffffffffULL);
3725 store_reg(r1, tmp);
3726 tcg_gen_shri_i64(tmp, tmp, 32);
3727 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3728 set_cc_nz_u32(s, tmp32_1);
3729 tcg_temp_free_i64(tmp);
3730 tcg_temp_free_i32(tmp32_1);
3731 break;
3732 case 0xe: /* LLIHF R1,I2 [RIL] */
3733 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3734 store_reg(r1, tmp);
3735 tcg_temp_free_i64(tmp);
3736 break;
3737 case 0xf: /* LLILF R1,I2 [RIL] */
3738 tmp = tcg_const_i64((uint32_t)i2);
3739 store_reg(r1, tmp);
3740 tcg_temp_free_i64(tmp);
3741 break;
3742 default:
3743 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3744 gen_illegal_opcode(s, 3);
3745 break;
3749 static void disas_c2(DisasContext *s, int op, int r1, int i2)
3751 TCGv_i64 tmp, tmp2, tmp3;
3752 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3754 switch (op) {
3755 case 0x4: /* SLGFI R1,I2 [RIL] */
3756 case 0xa: /* ALGFI R1,I2 [RIL] */
3757 tmp = load_reg(r1);
3758 tmp2 = tcg_const_i64((uint64_t)(uint32_t)i2);
3759 tmp3 = tcg_temp_new_i64();
3760 switch (op) {
3761 case 0x4:
3762 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3763 set_cc_subu64(s, tmp, tmp2, tmp3);
3764 break;
3765 case 0xa:
3766 tcg_gen_add_i64(tmp3, tmp, tmp2);
3767 set_cc_addu64(s, tmp, tmp2, tmp3);
3768 break;
3769 default:
3770 tcg_abort();
3772 store_reg(r1, tmp3);
3773 tcg_temp_free_i64(tmp);
3774 tcg_temp_free_i64(tmp2);
3775 tcg_temp_free_i64(tmp3);
3776 break;
3777 case 0x5: /* SLFI R1,I2 [RIL] */
3778 case 0xb: /* ALFI R1,I2 [RIL] */
3779 tmp32_1 = load_reg32(r1);
3780 tmp32_2 = tcg_const_i32(i2);
3781 tmp32_3 = tcg_temp_new_i32();
3782 switch (op) {
3783 case 0x5:
3784 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3785 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
3786 break;
3787 case 0xb:
3788 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3789 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3790 break;
3791 default:
3792 tcg_abort();
3794 store_reg32(r1, tmp32_3);
3795 tcg_temp_free_i32(tmp32_1);
3796 tcg_temp_free_i32(tmp32_2);
3797 tcg_temp_free_i32(tmp32_3);
3798 break;
3799 case 0xc: /* CGFI R1,I2 [RIL] */
3800 tmp = load_reg(r1);
3801 cmp_s64c(s, tmp, (int64_t)i2);
3802 tcg_temp_free_i64(tmp);
3803 break;
3804 case 0xe: /* CLGFI R1,I2 [RIL] */
3805 tmp = load_reg(r1);
3806 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3807 tcg_temp_free_i64(tmp);
3808 break;
3809 case 0xd: /* CFI R1,I2 [RIL] */
3810 tmp32_1 = load_reg32(r1);
3811 cmp_s32c(s, tmp32_1, i2);
3812 tcg_temp_free_i32(tmp32_1);
3813 break;
3814 case 0xf: /* CLFI R1,I2 [RIL] */
3815 tmp32_1 = load_reg32(r1);
3816 cmp_u32c(s, tmp32_1, i2);
3817 tcg_temp_free_i32(tmp32_1);
3818 break;
3819 default:
3820 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3821 gen_illegal_opcode(s, 3);
3822 break;
3826 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3828 switch (opc & 0xf) {
3829 case 0x4:
3830 tcg_gen_and_i32(tmp, tmp, tmp2);
3831 break;
3832 case 0x6:
3833 tcg_gen_or_i32(tmp, tmp, tmp2);
3834 break;
3835 case 0x7:
3836 tcg_gen_xor_i32(tmp, tmp, tmp2);
3837 break;
3838 default:
3839 tcg_abort();
3843 static void disas_s390_insn(DisasContext *s)
3845 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3846 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3847 unsigned char opc;
3848 uint64_t insn;
3849 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3850 TCGv_i32 vl;
3851 int ilc;
3852 int l1;
3854 opc = ldub_code(s->pc);
3855 LOG_DISAS("opc 0x%x\n", opc);
3857 ilc = get_ilc(opc);
3859 switch (opc) {
3860 #ifndef CONFIG_USER_ONLY
3861 case 0x01: /* SAM */
3862 insn = ld_code2(s->pc);
3863 /* set addressing mode, but we only do 64bit anyways */
3864 break;
3865 #endif
3866 case 0x6: /* BCTR R1,R2 [RR] */
3867 insn = ld_code2(s->pc);
3868 decode_rr(s, insn, &r1, &r2);
3869 tmp32_1 = load_reg32(r1);
3870 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3871 store_reg32(r1, tmp32_1);
3873 if (r2) {
3874 gen_update_cc_op(s);
3875 l1 = gen_new_label();
3876 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3878 /* not taking the branch, jump to after the instruction */
3879 gen_goto_tb(s, 0, s->pc + 2);
3880 gen_set_label(l1);
3882 /* take the branch, move R2 into psw.addr */
3883 tmp32_1 = load_reg32(r2);
3884 tmp = tcg_temp_new_i64();
3885 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3886 tcg_gen_mov_i64(psw_addr, tmp);
3887 s->is_jmp = DISAS_JUMP;
3888 tcg_temp_free_i32(tmp32_1);
3889 tcg_temp_free_i64(tmp);
3891 break;
3892 case 0x7: /* BCR M1,R2 [RR] */
3893 insn = ld_code2(s->pc);
3894 decode_rr(s, insn, &r1, &r2);
3895 if (r2) {
3896 tmp = load_reg(r2);
3897 gen_bcr(s, r1, tmp, s->pc);
3898 tcg_temp_free_i64(tmp);
3899 s->is_jmp = DISAS_TB_JUMP;
3900 } else {
3901 /* XXX: "serialization and checkpoint-synchronization function"? */
3903 break;
3904 case 0xa: /* SVC I [RR] */
3905 insn = ld_code2(s->pc);
3906 debug_insn(insn);
3907 i = insn & 0xff;
3908 update_psw_addr(s);
3909 gen_op_calc_cc(s);
3910 tmp32_1 = tcg_const_i32(i);
3911 tmp32_2 = tcg_const_i32(ilc * 2);
3912 tmp32_3 = tcg_const_i32(EXCP_SVC);
3913 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, int_svc_code));
3914 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUState, int_svc_ilc));
3915 gen_helper_exception(tmp32_3);
3916 s->is_jmp = DISAS_EXCP;
3917 tcg_temp_free_i32(tmp32_1);
3918 tcg_temp_free_i32(tmp32_2);
3919 tcg_temp_free_i32(tmp32_3);
3920 break;
3921 case 0xd: /* BASR R1,R2 [RR] */
3922 insn = ld_code2(s->pc);
3923 decode_rr(s, insn, &r1, &r2);
3924 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3925 store_reg(r1, tmp);
3926 if (r2) {
3927 tmp2 = load_reg(r2);
3928 tcg_gen_mov_i64(psw_addr, tmp2);
3929 tcg_temp_free_i64(tmp2);
3930 s->is_jmp = DISAS_JUMP;
3932 tcg_temp_free_i64(tmp);
3933 break;
3934 case 0xe: /* MVCL R1,R2 [RR] */
3935 insn = ld_code2(s->pc);
3936 decode_rr(s, insn, &r1, &r2);
3937 tmp32_1 = tcg_const_i32(r1);
3938 tmp32_2 = tcg_const_i32(r2);
3939 potential_page_fault(s);
3940 gen_helper_mvcl(cc_op, tmp32_1, tmp32_2);
3941 set_cc_static(s);
3942 tcg_temp_free_i32(tmp32_1);
3943 tcg_temp_free_i32(tmp32_2);
3944 break;
3945 case 0x10: /* LPR R1,R2 [RR] */
3946 insn = ld_code2(s->pc);
3947 decode_rr(s, insn, &r1, &r2);
3948 tmp32_1 = load_reg32(r2);
3949 set_cc_abs32(s, tmp32_1);
3950 gen_helper_abs_i32(tmp32_1, tmp32_1);
3951 store_reg32(r1, tmp32_1);
3952 tcg_temp_free_i32(tmp32_1);
3953 break;
3954 case 0x11: /* LNR R1,R2 [RR] */
3955 insn = ld_code2(s->pc);
3956 decode_rr(s, insn, &r1, &r2);
3957 tmp32_1 = load_reg32(r2);
3958 set_cc_nabs32(s, tmp32_1);
3959 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3960 store_reg32(r1, tmp32_1);
3961 tcg_temp_free_i32(tmp32_1);
3962 break;
3963 case 0x12: /* LTR R1,R2 [RR] */
3964 insn = ld_code2(s->pc);
3965 decode_rr(s, insn, &r1, &r2);
3966 tmp32_1 = load_reg32(r2);
3967 if (r1 != r2) {
3968 store_reg32(r1, tmp32_1);
3970 set_cc_s32(s, tmp32_1);
3971 tcg_temp_free_i32(tmp32_1);
3972 break;
3973 case 0x13: /* LCR R1,R2 [RR] */
3974 insn = ld_code2(s->pc);
3975 decode_rr(s, insn, &r1, &r2);
3976 tmp32_1 = load_reg32(r2);
3977 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3978 store_reg32(r1, tmp32_1);
3979 set_cc_comp32(s, tmp32_1);
3980 tcg_temp_free_i32(tmp32_1);
3981 break;
3982 case 0x14: /* NR R1,R2 [RR] */
3983 case 0x16: /* OR R1,R2 [RR] */
3984 case 0x17: /* XR R1,R2 [RR] */
3985 insn = ld_code2(s->pc);
3986 decode_rr(s, insn, &r1, &r2);
3987 tmp32_2 = load_reg32(r2);
3988 tmp32_1 = load_reg32(r1);
3989 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3990 store_reg32(r1, tmp32_1);
3991 set_cc_nz_u32(s, tmp32_1);
3992 tcg_temp_free_i32(tmp32_1);
3993 tcg_temp_free_i32(tmp32_2);
3994 break;
3995 case 0x18: /* LR R1,R2 [RR] */
3996 insn = ld_code2(s->pc);
3997 decode_rr(s, insn, &r1, &r2);
3998 tmp32_1 = load_reg32(r2);
3999 store_reg32(r1, tmp32_1);
4000 tcg_temp_free_i32(tmp32_1);
4001 break;
4002 case 0x15: /* CLR R1,R2 [RR] */
4003 case 0x19: /* CR R1,R2 [RR] */
4004 insn = ld_code2(s->pc);
4005 decode_rr(s, insn, &r1, &r2);
4006 tmp32_1 = load_reg32(r1);
4007 tmp32_2 = load_reg32(r2);
4008 if (opc == 0x15) {
4009 cmp_u32(s, tmp32_1, tmp32_2);
4010 } else {
4011 cmp_s32(s, tmp32_1, tmp32_2);
4013 tcg_temp_free_i32(tmp32_1);
4014 tcg_temp_free_i32(tmp32_2);
4015 break;
4016 case 0x1a: /* AR R1,R2 [RR] */
4017 case 0x1e: /* ALR R1,R2 [RR] */
4018 insn = ld_code2(s->pc);
4019 decode_rr(s, insn, &r1, &r2);
4020 tmp32_1 = load_reg32(r1);
4021 tmp32_2 = load_reg32(r2);
4022 tmp32_3 = tcg_temp_new_i32();
4023 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4024 store_reg32(r1, tmp32_3);
4025 if (opc == 0x1a) {
4026 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4027 } else {
4028 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4030 tcg_temp_free_i32(tmp32_1);
4031 tcg_temp_free_i32(tmp32_2);
4032 tcg_temp_free_i32(tmp32_3);
4033 break;
4034 case 0x1b: /* SR R1,R2 [RR] */
4035 case 0x1f: /* SLR R1,R2 [RR] */
4036 insn = ld_code2(s->pc);
4037 decode_rr(s, insn, &r1, &r2);
4038 tmp32_1 = load_reg32(r1);
4039 tmp32_2 = load_reg32(r2);
4040 tmp32_3 = tcg_temp_new_i32();
4041 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4042 store_reg32(r1, tmp32_3);
4043 if (opc == 0x1b) {
4044 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4045 } else {
4046 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4048 tcg_temp_free_i32(tmp32_1);
4049 tcg_temp_free_i32(tmp32_2);
4050 tcg_temp_free_i32(tmp32_3);
4051 break;
4052 case 0x1c: /* MR R1,R2 [RR] */
4053 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4054 insn = ld_code2(s->pc);
4055 decode_rr(s, insn, &r1, &r2);
4056 tmp2 = load_reg(r2);
4057 tmp3 = load_reg((r1 + 1) & 15);
4058 tcg_gen_ext32s_i64(tmp2, tmp2);
4059 tcg_gen_ext32s_i64(tmp3, tmp3);
4060 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4061 store_reg32_i64((r1 + 1) & 15, tmp2);
4062 tcg_gen_shri_i64(tmp2, tmp2, 32);
4063 store_reg32_i64(r1, tmp2);
4064 tcg_temp_free_i64(tmp2);
4065 tcg_temp_free_i64(tmp3);
4066 break;
4067 case 0x1d: /* DR R1,R2 [RR] */
4068 insn = ld_code2(s->pc);
4069 decode_rr(s, insn, &r1, &r2);
4070 tmp32_1 = load_reg32(r1);
4071 tmp32_2 = load_reg32(r1 + 1);
4072 tmp32_3 = load_reg32(r2);
4074 tmp = tcg_temp_new_i64(); /* dividend */
4075 tmp2 = tcg_temp_new_i64(); /* divisor */
4076 tmp3 = tcg_temp_new_i64();
4078 /* dividend is r(r1 << 32) | r(r1 + 1) */
4079 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4080 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4081 tcg_gen_shli_i64(tmp, tmp, 32);
4082 tcg_gen_or_i64(tmp, tmp, tmp2);
4084 /* divisor is r(r2) */
4085 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
4087 tcg_gen_div_i64(tmp3, tmp, tmp2);
4088 tcg_gen_rem_i64(tmp, tmp, tmp2);
4090 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4091 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4093 store_reg32(r1, tmp32_1); /* remainder */
4094 store_reg32(r1 + 1, tmp32_2); /* quotient */
4095 tcg_temp_free_i32(tmp32_1);
4096 tcg_temp_free_i32(tmp32_2);
4097 tcg_temp_free_i32(tmp32_3);
4098 tcg_temp_free_i64(tmp);
4099 tcg_temp_free_i64(tmp2);
4100 tcg_temp_free_i64(tmp3);
4101 break;
4102 case 0x28: /* LDR R1,R2 [RR] */
4103 insn = ld_code2(s->pc);
4104 decode_rr(s, insn, &r1, &r2);
4105 tmp = load_freg(r2);
4106 store_freg(r1, tmp);
4107 tcg_temp_free_i64(tmp);
4108 break;
4109 case 0x38: /* LER R1,R2 [RR] */
4110 insn = ld_code2(s->pc);
4111 decode_rr(s, insn, &r1, &r2);
4112 tmp32_1 = load_freg32(r2);
4113 store_freg32(r1, tmp32_1);
4114 tcg_temp_free_i32(tmp32_1);
4115 break;
4116 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4117 insn = ld_code4(s->pc);
4118 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4119 tmp2 = load_reg(r1);
4120 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
4121 tcg_temp_free_i64(tmp);
4122 tcg_temp_free_i64(tmp2);
4123 break;
4124 case 0x41: /* la */
4125 insn = ld_code4(s->pc);
4126 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4127 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
4128 tcg_temp_free_i64(tmp);
4129 break;
4130 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4131 insn = ld_code4(s->pc);
4132 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4133 tmp2 = load_reg(r1);
4134 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4135 tcg_temp_free_i64(tmp);
4136 tcg_temp_free_i64(tmp2);
4137 break;
4138 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4139 insn = ld_code4(s->pc);
4140 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4141 tmp2 = tcg_temp_new_i64();
4142 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4143 store_reg8(r1, tmp2);
4144 tcg_temp_free_i64(tmp);
4145 tcg_temp_free_i64(tmp2);
4146 break;
4147 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4148 insn = ld_code4(s->pc);
4149 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4150 tmp2 = load_reg(r1);
4151 tmp3 = tcg_const_i64(s->pc + 4);
4152 update_psw_addr(s);
4153 gen_op_calc_cc(s);
4154 gen_helper_ex(cc_op, cc_op, tmp2, tmp, tmp3);
4155 set_cc_static(s);
4156 tcg_temp_free_i64(tmp);
4157 tcg_temp_free_i64(tmp2);
4158 tcg_temp_free_i64(tmp3);
4159 break;
4160 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4161 insn = ld_code4(s->pc);
4162 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4163 tcg_temp_free_i64(tmp);
4165 tmp32_1 = load_reg32(r1);
4166 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
4167 store_reg32(r1, tmp32_1);
4169 gen_update_cc_op(s);
4170 l1 = gen_new_label();
4171 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
4173 /* not taking the branch, jump to after the instruction */
4174 gen_goto_tb(s, 0, s->pc + 4);
4175 gen_set_label(l1);
4177 /* take the branch, move R2 into psw.addr */
4178 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4179 tcg_gen_mov_i64(psw_addr, tmp);
4180 s->is_jmp = DISAS_JUMP;
4181 tcg_temp_free_i32(tmp32_1);
4182 tcg_temp_free_i64(tmp);
4183 break;
4184 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4185 insn = ld_code4(s->pc);
4186 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4187 gen_bcr(s, r1, tmp, s->pc + 4);
4188 tcg_temp_free_i64(tmp);
4189 s->is_jmp = DISAS_TB_JUMP;
4190 break;
4191 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4192 insn = ld_code4(s->pc);
4193 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4194 tmp2 = tcg_temp_new_i64();
4195 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4196 store_reg32_i64(r1, tmp2);
4197 tcg_temp_free_i64(tmp);
4198 tcg_temp_free_i64(tmp2);
4199 break;
4200 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4201 insn = ld_code4(s->pc);
4202 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4203 tmp32_1 = load_reg32(r1);
4204 tmp32_2 = tcg_temp_new_i32();
4205 tmp2 = tcg_temp_new_i64();
4206 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4207 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4208 cmp_s32(s, tmp32_1, tmp32_2);
4209 tcg_temp_free_i32(tmp32_1);
4210 tcg_temp_free_i32(tmp32_2);
4211 tcg_temp_free_i64(tmp);
4212 tcg_temp_free_i64(tmp2);
4213 break;
4214 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4215 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4216 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4217 insn = ld_code4(s->pc);
4218 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4219 tmp2 = tcg_temp_new_i64();
4220 tmp32_1 = load_reg32(r1);
4221 tmp32_2 = tcg_temp_new_i32();
4222 tmp32_3 = tcg_temp_new_i32();
4224 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4225 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4226 switch (opc) {
4227 case 0x4a:
4228 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4229 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4230 break;
4231 case 0x4b:
4232 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4233 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4234 break;
4235 case 0x4c:
4236 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
4237 break;
4238 default:
4239 tcg_abort();
4241 store_reg32(r1, tmp32_3);
4243 tcg_temp_free_i32(tmp32_1);
4244 tcg_temp_free_i32(tmp32_2);
4245 tcg_temp_free_i32(tmp32_3);
4246 tcg_temp_free_i64(tmp);
4247 tcg_temp_free_i64(tmp2);
4248 break;
4249 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4250 insn = ld_code4(s->pc);
4251 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4252 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
4253 store_reg(r1, tmp2);
4254 tcg_gen_mov_i64(psw_addr, tmp);
4255 tcg_temp_free_i64(tmp);
4256 tcg_temp_free_i64(tmp2);
4257 s->is_jmp = DISAS_JUMP;
4258 break;
4259 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4260 insn = ld_code4(s->pc);
4261 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4262 tmp2 = tcg_temp_new_i64();
4263 tmp32_1 = tcg_temp_new_i32();
4264 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
4265 gen_helper_cvd(tmp2, tmp32_1);
4266 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4267 tcg_temp_free_i64(tmp);
4268 tcg_temp_free_i64(tmp2);
4269 tcg_temp_free_i32(tmp32_1);
4270 break;
4271 case 0x50: /* st r1, d2(x2, b2) */
4272 insn = ld_code4(s->pc);
4273 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4274 tmp2 = load_reg(r1);
4275 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4276 tcg_temp_free_i64(tmp);
4277 tcg_temp_free_i64(tmp2);
4278 break;
4279 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4280 insn = ld_code4(s->pc);
4281 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4282 tmp2 = tcg_temp_new_i64();
4283 tmp32_1 = tcg_temp_new_i32();
4284 tmp32_2 = load_reg32(r1);
4285 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4286 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4287 cmp_u32(s, tmp32_2, tmp32_1);
4288 tcg_temp_free_i64(tmp);
4289 tcg_temp_free_i64(tmp2);
4290 tcg_temp_free_i32(tmp32_1);
4291 tcg_temp_free_i32(tmp32_2);
4292 break;
4293 case 0x54: /* N R1,D2(X2,B2) [RX] */
4294 case 0x56: /* O R1,D2(X2,B2) [RX] */
4295 case 0x57: /* X R1,D2(X2,B2) [RX] */
4296 insn = ld_code4(s->pc);
4297 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4298 tmp2 = tcg_temp_new_i64();
4299 tmp32_1 = load_reg32(r1);
4300 tmp32_2 = tcg_temp_new_i32();
4301 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4302 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4303 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4304 store_reg32(r1, tmp32_1);
4305 set_cc_nz_u32(s, tmp32_1);
4306 tcg_temp_free_i64(tmp);
4307 tcg_temp_free_i64(tmp2);
4308 tcg_temp_free_i32(tmp32_1);
4309 tcg_temp_free_i32(tmp32_2);
4310 break;
4311 case 0x58: /* l r1, d2(x2, b2) */
4312 insn = ld_code4(s->pc);
4313 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4314 tmp2 = tcg_temp_new_i64();
4315 tmp32_1 = tcg_temp_new_i32();
4316 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4317 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4318 store_reg32(r1, tmp32_1);
4319 tcg_temp_free_i64(tmp);
4320 tcg_temp_free_i64(tmp2);
4321 tcg_temp_free_i32(tmp32_1);
4322 break;
4323 case 0x59: /* C R1,D2(X2,B2) [RX] */
4324 insn = ld_code4(s->pc);
4325 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4326 tmp2 = tcg_temp_new_i64();
4327 tmp32_1 = tcg_temp_new_i32();
4328 tmp32_2 = load_reg32(r1);
4329 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4330 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4331 cmp_s32(s, tmp32_2, tmp32_1);
4332 tcg_temp_free_i64(tmp);
4333 tcg_temp_free_i64(tmp2);
4334 tcg_temp_free_i32(tmp32_1);
4335 tcg_temp_free_i32(tmp32_2);
4336 break;
4337 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4338 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4339 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4340 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4341 insn = ld_code4(s->pc);
4342 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4343 tmp32_1 = load_reg32(r1);
4344 tmp32_2 = tcg_temp_new_i32();
4345 tmp32_3 = tcg_temp_new_i32();
4346 tcg_gen_qemu_ld32s(tmp, tmp, get_mem_index(s));
4347 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4348 switch (opc) {
4349 case 0x5a:
4350 case 0x5e:
4351 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4352 break;
4353 case 0x5b:
4354 case 0x5f:
4355 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4356 break;
4357 default:
4358 tcg_abort();
4360 store_reg32(r1, tmp32_3);
4361 switch (opc) {
4362 case 0x5a:
4363 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4364 break;
4365 case 0x5e:
4366 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4367 break;
4368 case 0x5b:
4369 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4370 break;
4371 case 0x5f:
4372 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4373 break;
4374 default:
4375 tcg_abort();
4377 tcg_temp_free_i64(tmp);
4378 tcg_temp_free_i32(tmp32_1);
4379 tcg_temp_free_i32(tmp32_2);
4380 tcg_temp_free_i32(tmp32_3);
4381 break;
4382 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4383 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4384 insn = ld_code4(s->pc);
4385 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4386 tmp2 = tcg_temp_new_i64();
4387 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4388 tmp3 = load_reg((r1 + 1) & 15);
4389 tcg_gen_ext32s_i64(tmp2, tmp2);
4390 tcg_gen_ext32s_i64(tmp3, tmp3);
4391 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4392 store_reg32_i64((r1 + 1) & 15, tmp2);
4393 tcg_gen_shri_i64(tmp2, tmp2, 32);
4394 store_reg32_i64(r1, tmp2);
4395 tcg_temp_free_i64(tmp);
4396 tcg_temp_free_i64(tmp2);
4397 tcg_temp_free_i64(tmp3);
4398 break;
4399 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4400 insn = ld_code4(s->pc);
4401 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4402 tmp32_1 = load_reg32(r1);
4403 tmp32_2 = load_reg32(r1 + 1);
4405 tmp = tcg_temp_new_i64();
4406 tmp2 = tcg_temp_new_i64();
4408 /* dividend is r(r1 << 32) | r(r1 + 1) */
4409 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4410 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4411 tcg_gen_shli_i64(tmp, tmp, 32);
4412 tcg_gen_or_i64(tmp, tmp, tmp2);
4414 /* divisor is in memory */
4415 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
4417 /* XXX divisor == 0 -> FixP divide exception */
4419 tcg_gen_div_i64(tmp3, tmp, tmp2);
4420 tcg_gen_rem_i64(tmp, tmp, tmp2);
4422 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4423 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4425 store_reg32(r1, tmp32_1); /* remainder */
4426 store_reg32(r1 + 1, tmp32_2); /* quotient */
4427 tcg_temp_free_i32(tmp32_1);
4428 tcg_temp_free_i32(tmp32_2);
4429 tcg_temp_free_i64(tmp);
4430 tcg_temp_free_i64(tmp2);
4431 tcg_temp_free_i64(tmp3);
4432 break;
4433 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4434 insn = ld_code4(s->pc);
4435 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4436 tmp2 = load_freg(r1);
4437 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4438 tcg_temp_free_i64(tmp);
4439 tcg_temp_free_i64(tmp2);
4440 break;
4441 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4442 insn = ld_code4(s->pc);
4443 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4444 tmp2 = tcg_temp_new_i64();
4445 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4446 store_freg(r1, tmp2);
4447 tcg_temp_free_i64(tmp);
4448 tcg_temp_free_i64(tmp2);
4449 break;
4450 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4451 insn = ld_code4(s->pc);
4452 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4453 tmp2 = tcg_temp_new_i64();
4454 tmp32_1 = load_freg32(r1);
4455 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4456 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4457 tcg_temp_free_i64(tmp);
4458 tcg_temp_free_i64(tmp2);
4459 tcg_temp_free_i32(tmp32_1);
4460 break;
4461 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4462 insn = ld_code4(s->pc);
4463 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4464 tmp2 = tcg_temp_new_i64();
4465 tmp32_1 = load_reg32(r1);
4466 tmp32_2 = tcg_temp_new_i32();
4467 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4468 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4469 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4470 store_reg32(r1, tmp32_1);
4471 tcg_temp_free_i64(tmp);
4472 tcg_temp_free_i64(tmp2);
4473 tcg_temp_free_i32(tmp32_1);
4474 tcg_temp_free_i32(tmp32_2);
4475 break;
4476 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4477 insn = ld_code4(s->pc);
4478 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4479 tmp2 = tcg_temp_new_i64();
4480 tmp32_1 = tcg_temp_new_i32();
4481 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4482 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4483 store_freg32(r1, tmp32_1);
4484 tcg_temp_free_i64(tmp);
4485 tcg_temp_free_i64(tmp2);
4486 tcg_temp_free_i32(tmp32_1);
4487 break;
4488 #ifndef CONFIG_USER_ONLY
4489 case 0x80: /* SSM D2(B2) [S] */
4490 /* Set System Mask */
4491 check_privileged(s, ilc);
4492 insn = ld_code4(s->pc);
4493 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4494 tmp = get_address(s, 0, b2, d2);
4495 tmp2 = tcg_temp_new_i64();
4496 tmp3 = tcg_temp_new_i64();
4497 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4498 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4499 tcg_gen_shli_i64(tmp2, tmp2, 56);
4500 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4501 tcg_temp_free_i64(tmp);
4502 tcg_temp_free_i64(tmp2);
4503 tcg_temp_free_i64(tmp3);
4504 break;
4505 case 0x82: /* LPSW D2(B2) [S] */
4506 /* Load PSW */
4507 check_privileged(s, ilc);
4508 insn = ld_code4(s->pc);
4509 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4510 tmp = get_address(s, 0, b2, d2);
4511 tmp2 = tcg_temp_new_i64();
4512 tmp3 = tcg_temp_new_i64();
4513 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4514 tcg_gen_addi_i64(tmp, tmp, 4);
4515 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4516 gen_helper_load_psw(tmp2, tmp3);
4517 tcg_temp_free_i64(tmp);
4518 tcg_temp_free_i64(tmp2);
4519 tcg_temp_free_i64(tmp3);
4520 /* we need to keep cc_op intact */
4521 s->is_jmp = DISAS_JUMP;
4522 break;
4523 case 0x83: /* DIAG R1,R3,D2 [RS] */
4524 /* Diagnose call (KVM hypercall) */
4525 check_privileged(s, ilc);
4526 potential_page_fault(s);
4527 insn = ld_code4(s->pc);
4528 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4529 tmp32_1 = tcg_const_i32(insn & 0xfff);
4530 tmp2 = load_reg(2);
4531 tmp3 = load_reg(1);
4532 gen_helper_diag(tmp2, tmp32_1, tmp2, tmp3);
4533 store_reg(2, tmp2);
4534 tcg_temp_free_i32(tmp32_1);
4535 tcg_temp_free_i64(tmp2);
4536 tcg_temp_free_i64(tmp3);
4537 break;
4538 #endif
4539 case 0x88: /* SRL R1,D2(B2) [RS] */
4540 case 0x89: /* SLL R1,D2(B2) [RS] */
4541 case 0x8a: /* SRA R1,D2(B2) [RS] */
4542 insn = ld_code4(s->pc);
4543 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4544 tmp = get_address(s, 0, b2, d2);
4545 tmp32_1 = load_reg32(r1);
4546 tmp32_2 = tcg_temp_new_i32();
4547 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4548 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4549 switch (opc) {
4550 case 0x88:
4551 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4552 break;
4553 case 0x89:
4554 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4555 break;
4556 case 0x8a:
4557 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4558 set_cc_s32(s, tmp32_1);
4559 break;
4560 default:
4561 tcg_abort();
4563 store_reg32(r1, tmp32_1);
4564 tcg_temp_free_i64(tmp);
4565 tcg_temp_free_i32(tmp32_1);
4566 tcg_temp_free_i32(tmp32_2);
4567 break;
4568 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4569 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4570 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4571 insn = ld_code4(s->pc);
4572 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4573 tmp = get_address(s, 0, b2, d2); /* shift */
4574 tmp2 = tcg_temp_new_i64();
4575 tmp32_1 = load_reg32(r1);
4576 tmp32_2 = load_reg32(r1 + 1);
4577 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4578 switch (opc) {
4579 case 0x8c:
4580 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4581 break;
4582 case 0x8d:
4583 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4584 break;
4585 case 0x8e:
4586 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4587 set_cc_s64(s, tmp2);
4588 break;
4590 tcg_gen_shri_i64(tmp, tmp2, 32);
4591 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4592 store_reg32(r1, tmp32_1);
4593 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4594 store_reg32(r1 + 1, tmp32_2);
4595 break;
4596 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4597 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4598 insn = ld_code4(s->pc);
4599 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4601 tmp = get_address(s, 0, b2, d2);
4602 tmp2 = tcg_temp_new_i64();
4603 tmp3 = tcg_const_i64(4);
4604 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4605 for (i = r1;; i = (i + 1) % 16) {
4606 if (opc == 0x98) {
4607 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4608 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4609 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4610 } else {
4611 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4613 if (i == r3) {
4614 break;
4616 tcg_gen_add_i64(tmp, tmp, tmp3);
4618 tcg_temp_free_i64(tmp2);
4619 tcg_temp_free_i64(tmp3);
4620 tcg_temp_free_i64(tmp4);
4621 break;
4622 case 0x91: /* TM D1(B1),I2 [SI] */
4623 insn = ld_code4(s->pc);
4624 tmp = decode_si(s, insn, &i2, &b1, &d1);
4625 tmp2 = tcg_const_i64(i2);
4626 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4627 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4628 tcg_temp_free_i64(tmp);
4629 tcg_temp_free_i64(tmp2);
4630 break;
4631 case 0x92: /* MVI D1(B1),I2 [SI] */
4632 insn = ld_code4(s->pc);
4633 tmp = decode_si(s, insn, &i2, &b1, &d1);
4634 tmp2 = tcg_const_i64(i2);
4635 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4636 tcg_temp_free_i64(tmp);
4637 tcg_temp_free_i64(tmp2);
4638 break;
4639 case 0x94: /* NI D1(B1),I2 [SI] */
4640 case 0x96: /* OI D1(B1),I2 [SI] */
4641 case 0x97: /* XI D1(B1),I2 [SI] */
4642 insn = ld_code4(s->pc);
4643 tmp = decode_si(s, insn, &i2, &b1, &d1);
4644 tmp2 = tcg_temp_new_i64();
4645 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4646 switch (opc) {
4647 case 0x94:
4648 tcg_gen_andi_i64(tmp2, tmp2, i2);
4649 break;
4650 case 0x96:
4651 tcg_gen_ori_i64(tmp2, tmp2, i2);
4652 break;
4653 case 0x97:
4654 tcg_gen_xori_i64(tmp2, tmp2, i2);
4655 break;
4656 default:
4657 tcg_abort();
4659 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4660 set_cc_nz_u64(s, tmp2);
4661 tcg_temp_free_i64(tmp);
4662 tcg_temp_free_i64(tmp2);
4663 break;
4664 case 0x95: /* CLI D1(B1),I2 [SI] */
4665 insn = ld_code4(s->pc);
4666 tmp = decode_si(s, insn, &i2, &b1, &d1);
4667 tmp2 = tcg_temp_new_i64();
4668 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4669 cmp_u64c(s, tmp2, i2);
4670 tcg_temp_free_i64(tmp);
4671 tcg_temp_free_i64(tmp2);
4672 break;
4673 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4674 insn = ld_code4(s->pc);
4675 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4676 tmp = get_address(s, 0, b2, d2);
4677 tmp32_1 = tcg_const_i32(r1);
4678 tmp32_2 = tcg_const_i32(r3);
4679 potential_page_fault(s);
4680 gen_helper_lam(tmp32_1, tmp, tmp32_2);
4681 tcg_temp_free_i64(tmp);
4682 tcg_temp_free_i32(tmp32_1);
4683 tcg_temp_free_i32(tmp32_2);
4684 break;
4685 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4686 insn = ld_code4(s->pc);
4687 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4688 tmp = get_address(s, 0, b2, d2);
4689 tmp32_1 = tcg_const_i32(r1);
4690 tmp32_2 = tcg_const_i32(r3);
4691 potential_page_fault(s);
4692 gen_helper_stam(tmp32_1, tmp, tmp32_2);
4693 tcg_temp_free_i64(tmp);
4694 tcg_temp_free_i32(tmp32_1);
4695 tcg_temp_free_i32(tmp32_2);
4696 break;
4697 case 0xa5:
4698 insn = ld_code4(s->pc);
4699 r1 = (insn >> 20) & 0xf;
4700 op = (insn >> 16) & 0xf;
4701 i2 = insn & 0xffff;
4702 disas_a5(s, op, r1, i2);
4703 break;
4704 case 0xa7:
4705 insn = ld_code4(s->pc);
4706 r1 = (insn >> 20) & 0xf;
4707 op = (insn >> 16) & 0xf;
4708 i2 = (short)insn;
4709 disas_a7(s, op, r1, i2);
4710 break;
4711 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4712 insn = ld_code4(s->pc);
4713 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4714 tmp = get_address(s, 0, b2, d2);
4715 tmp32_1 = tcg_const_i32(r1);
4716 tmp32_2 = tcg_const_i32(r3);
4717 potential_page_fault(s);
4718 gen_helper_mvcle(cc_op, tmp32_1, tmp, tmp32_2);
4719 set_cc_static(s);
4720 tcg_temp_free_i64(tmp);
4721 tcg_temp_free_i32(tmp32_1);
4722 tcg_temp_free_i32(tmp32_2);
4723 break;
4724 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4725 insn = ld_code4(s->pc);
4726 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4727 tmp = get_address(s, 0, b2, d2);
4728 tmp32_1 = tcg_const_i32(r1);
4729 tmp32_2 = tcg_const_i32(r3);
4730 potential_page_fault(s);
4731 gen_helper_clcle(cc_op, tmp32_1, tmp, tmp32_2);
4732 set_cc_static(s);
4733 tcg_temp_free_i64(tmp);
4734 tcg_temp_free_i32(tmp32_1);
4735 tcg_temp_free_i32(tmp32_2);
4736 break;
4737 #ifndef CONFIG_USER_ONLY
4738 case 0xac: /* STNSM D1(B1),I2 [SI] */
4739 case 0xad: /* STOSM D1(B1),I2 [SI] */
4740 check_privileged(s, ilc);
4741 insn = ld_code4(s->pc);
4742 tmp = decode_si(s, insn, &i2, &b1, &d1);
4743 tmp2 = tcg_temp_new_i64();
4744 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4745 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4746 if (opc == 0xac) {
4747 tcg_gen_andi_i64(psw_mask, psw_mask,
4748 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4749 } else {
4750 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4752 tcg_temp_free_i64(tmp);
4753 tcg_temp_free_i64(tmp2);
4754 break;
4755 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4756 check_privileged(s, ilc);
4757 insn = ld_code4(s->pc);
4758 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4759 tmp = get_address(s, 0, b2, d2);
4760 tmp2 = load_reg(r3);
4761 tmp32_1 = tcg_const_i32(r1);
4762 potential_page_fault(s);
4763 gen_helper_sigp(cc_op, tmp, tmp32_1, tmp2);
4764 set_cc_static(s);
4765 tcg_temp_free_i64(tmp);
4766 tcg_temp_free_i64(tmp2);
4767 tcg_temp_free_i32(tmp32_1);
4768 break;
4769 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4770 check_privileged(s, ilc);
4771 insn = ld_code4(s->pc);
4772 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4773 tmp32_1 = tcg_const_i32(r1);
4774 potential_page_fault(s);
4775 gen_helper_lra(cc_op, tmp, tmp32_1);
4776 set_cc_static(s);
4777 tcg_temp_free_i64(tmp);
4778 tcg_temp_free_i32(tmp32_1);
4779 break;
4780 #endif
4781 case 0xb2:
4782 insn = ld_code4(s->pc);
4783 op = (insn >> 16) & 0xff;
4784 switch (op) {
4785 case 0x9c: /* STFPC D2(B2) [S] */
4786 d2 = insn & 0xfff;
4787 b2 = (insn >> 12) & 0xf;
4788 tmp32_1 = tcg_temp_new_i32();
4789 tmp = tcg_temp_new_i64();
4790 tmp2 = get_address(s, 0, b2, d2);
4791 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
4792 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4793 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4794 tcg_temp_free_i32(tmp32_1);
4795 tcg_temp_free_i64(tmp);
4796 tcg_temp_free_i64(tmp2);
4797 break;
4798 default:
4799 disas_b2(s, op, insn);
4800 break;
4802 break;
4803 case 0xb3:
4804 insn = ld_code4(s->pc);
4805 op = (insn >> 16) & 0xff;
4806 r3 = (insn >> 12) & 0xf; /* aka m3 */
4807 r1 = (insn >> 4) & 0xf;
4808 r2 = insn & 0xf;
4809 disas_b3(s, op, r3, r1, r2);
4810 break;
4811 #ifndef CONFIG_USER_ONLY
4812 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4813 /* Store Control */
4814 check_privileged(s, ilc);
4815 insn = ld_code4(s->pc);
4816 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4817 tmp = get_address(s, 0, b2, d2);
4818 tmp32_1 = tcg_const_i32(r1);
4819 tmp32_2 = tcg_const_i32(r3);
4820 potential_page_fault(s);
4821 gen_helper_stctl(tmp32_1, tmp, tmp32_2);
4822 tcg_temp_free_i64(tmp);
4823 tcg_temp_free_i32(tmp32_1);
4824 tcg_temp_free_i32(tmp32_2);
4825 break;
4826 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4827 /* Load Control */
4828 check_privileged(s, ilc);
4829 insn = ld_code4(s->pc);
4830 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4831 tmp = get_address(s, 0, b2, d2);
4832 tmp32_1 = tcg_const_i32(r1);
4833 tmp32_2 = tcg_const_i32(r3);
4834 potential_page_fault(s);
4835 gen_helper_lctl(tmp32_1, tmp, tmp32_2);
4836 tcg_temp_free_i64(tmp);
4837 tcg_temp_free_i32(tmp32_1);
4838 tcg_temp_free_i32(tmp32_2);
4839 break;
4840 #endif
4841 case 0xb9:
4842 insn = ld_code4(s->pc);
4843 r1 = (insn >> 4) & 0xf;
4844 r2 = insn & 0xf;
4845 op = (insn >> 16) & 0xff;
4846 disas_b9(s, op, r1, r2);
4847 break;
4848 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4849 insn = ld_code4(s->pc);
4850 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4851 tmp = get_address(s, 0, b2, d2);
4852 tmp32_1 = tcg_const_i32(r1);
4853 tmp32_2 = tcg_const_i32(r3);
4854 potential_page_fault(s);
4855 gen_helper_cs(cc_op, tmp32_1, tmp, tmp32_2);
4856 set_cc_static(s);
4857 tcg_temp_free_i64(tmp);
4858 tcg_temp_free_i32(tmp32_1);
4859 tcg_temp_free_i32(tmp32_2);
4860 break;
4861 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4862 insn = ld_code4(s->pc);
4863 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4864 tmp = get_address(s, 0, b2, d2);
4865 tmp32_1 = load_reg32(r1);
4866 tmp32_2 = tcg_const_i32(r3);
4867 potential_page_fault(s);
4868 gen_helper_clm(cc_op, tmp32_1, tmp32_2, tmp);
4869 set_cc_static(s);
4870 tcg_temp_free_i64(tmp);
4871 tcg_temp_free_i32(tmp32_1);
4872 tcg_temp_free_i32(tmp32_2);
4873 break;
4874 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4875 insn = ld_code4(s->pc);
4876 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4877 tmp = get_address(s, 0, b2, d2);
4878 tmp32_1 = load_reg32(r1);
4879 tmp32_2 = tcg_const_i32(r3);
4880 potential_page_fault(s);
4881 gen_helper_stcm(tmp32_1, tmp32_2, tmp);
4882 tcg_temp_free_i64(tmp);
4883 tcg_temp_free_i32(tmp32_1);
4884 tcg_temp_free_i32(tmp32_2);
4885 break;
4886 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4887 insn = ld_code4(s->pc);
4888 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4889 if (r3 == 15) {
4890 /* effectively a 32-bit load */
4891 tmp = get_address(s, 0, b2, d2);
4892 tmp32_1 = tcg_temp_new_i32();
4893 tmp32_2 = tcg_const_i32(r3);
4894 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4895 store_reg32_i64(r1, tmp);
4896 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4897 set_cc_icm(s, tmp32_2, tmp32_1);
4898 tcg_temp_free_i64(tmp);
4899 tcg_temp_free_i32(tmp32_1);
4900 tcg_temp_free_i32(tmp32_2);
4901 } else if (r3) {
4902 uint32_t mask = 0x00ffffffUL;
4903 uint32_t shift = 24;
4904 int m3 = r3;
4905 tmp = get_address(s, 0, b2, d2);
4906 tmp2 = tcg_temp_new_i64();
4907 tmp32_1 = load_reg32(r1);
4908 tmp32_2 = tcg_temp_new_i32();
4909 tmp32_3 = tcg_const_i32(r3);
4910 tmp32_4 = tcg_const_i32(0);
4911 while (m3) {
4912 if (m3 & 8) {
4913 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4914 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4915 if (shift) {
4916 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4918 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4919 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4920 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4921 tcg_gen_addi_i64(tmp, tmp, 1);
4923 m3 = (m3 << 1) & 0xf;
4924 mask = (mask >> 8) | 0xff000000UL;
4925 shift -= 8;
4927 store_reg32(r1, tmp32_1);
4928 set_cc_icm(s, tmp32_3, tmp32_4);
4929 tcg_temp_free_i64(tmp);
4930 tcg_temp_free_i64(tmp2);
4931 tcg_temp_free_i32(tmp32_1);
4932 tcg_temp_free_i32(tmp32_2);
4933 tcg_temp_free_i32(tmp32_3);
4934 tcg_temp_free_i32(tmp32_4);
4935 } else {
4936 /* i.e. env->cc = 0 */
4937 gen_op_movi_cc(s, 0);
4939 break;
4940 case 0xc0:
4941 case 0xc2:
4942 insn = ld_code6(s->pc);
4943 r1 = (insn >> 36) & 0xf;
4944 op = (insn >> 32) & 0xf;
4945 i2 = (int)insn;
4946 switch (opc) {
4947 case 0xc0:
4948 disas_c0(s, op, r1, i2);
4949 break;
4950 case 0xc2:
4951 disas_c2(s, op, r1, i2);
4952 break;
4953 default:
4954 tcg_abort();
4956 break;
4957 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4958 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4959 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4960 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4961 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4962 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4963 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4964 insn = ld_code6(s->pc);
4965 vl = tcg_const_i32((insn >> 32) & 0xff);
4966 b1 = (insn >> 28) & 0xf;
4967 b2 = (insn >> 12) & 0xf;
4968 d1 = (insn >> 16) & 0xfff;
4969 d2 = insn & 0xfff;
4970 tmp = get_address(s, 0, b1, d1);
4971 tmp2 = get_address(s, 0, b2, d2);
4972 switch (opc) {
4973 case 0xd2:
4974 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4975 break;
4976 case 0xd4:
4977 potential_page_fault(s);
4978 gen_helper_nc(cc_op, vl, tmp, tmp2);
4979 set_cc_static(s);
4980 break;
4981 case 0xd5:
4982 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
4983 break;
4984 case 0xd6:
4985 potential_page_fault(s);
4986 gen_helper_oc(cc_op, vl, tmp, tmp2);
4987 set_cc_static(s);
4988 break;
4989 case 0xd7:
4990 potential_page_fault(s);
4991 gen_helper_xc(cc_op, vl, tmp, tmp2);
4992 set_cc_static(s);
4993 break;
4994 case 0xdc:
4995 potential_page_fault(s);
4996 gen_helper_tr(vl, tmp, tmp2);
4997 set_cc_static(s);
4998 break;
4999 case 0xf3:
5000 potential_page_fault(s);
5001 gen_helper_unpk(vl, tmp, tmp2);
5002 break;
5003 default:
5004 tcg_abort();
5006 tcg_temp_free_i64(tmp);
5007 tcg_temp_free_i64(tmp2);
5008 break;
5009 #ifndef CONFIG_USER_ONLY
5010 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5011 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5012 check_privileged(s, ilc);
5013 potential_page_fault(s);
5014 insn = ld_code6(s->pc);
5015 r1 = (insn >> 36) & 0xf;
5016 r3 = (insn >> 32) & 0xf;
5017 b1 = (insn >> 28) & 0xf;
5018 d1 = (insn >> 16) & 0xfff;
5019 b2 = (insn >> 12) & 0xf;
5020 d2 = insn & 0xfff;
5021 tmp = load_reg(r1);
5022 /* XXX key in r3 */
5023 tmp2 = get_address(s, 0, b1, d1);
5024 tmp3 = get_address(s, 0, b2, d2);
5025 if (opc == 0xda) {
5026 gen_helper_mvcp(cc_op, tmp, tmp2, tmp3);
5027 } else {
5028 gen_helper_mvcs(cc_op, tmp, tmp2, tmp3);
5030 set_cc_static(s);
5031 tcg_temp_free_i64(tmp);
5032 tcg_temp_free_i64(tmp2);
5033 tcg_temp_free_i64(tmp3);
5034 break;
5035 #endif
5036 case 0xe3:
5037 insn = ld_code6(s->pc);
5038 debug_insn(insn);
5039 op = insn & 0xff;
5040 r1 = (insn >> 36) & 0xf;
5041 x2 = (insn >> 32) & 0xf;
5042 b2 = (insn >> 28) & 0xf;
5043 d2 = ((int)((((insn >> 16) & 0xfff)
5044 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5045 disas_e3(s, op, r1, x2, b2, d2 );
5046 break;
5047 #ifndef CONFIG_USER_ONLY
5048 case 0xe5:
5049 /* Test Protection */
5050 check_privileged(s, ilc);
5051 insn = ld_code6(s->pc);
5052 debug_insn(insn);
5053 disas_e5(s, insn);
5054 break;
5055 #endif
5056 case 0xeb:
5057 insn = ld_code6(s->pc);
5058 debug_insn(insn);
5059 op = insn & 0xff;
5060 r1 = (insn >> 36) & 0xf;
5061 r3 = (insn >> 32) & 0xf;
5062 b2 = (insn >> 28) & 0xf;
5063 d2 = ((int)((((insn >> 16) & 0xfff)
5064 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5065 disas_eb(s, op, r1, r3, b2, d2);
5066 break;
5067 case 0xed:
5068 insn = ld_code6(s->pc);
5069 debug_insn(insn);
5070 op = insn & 0xff;
5071 r1 = (insn >> 36) & 0xf;
5072 x2 = (insn >> 32) & 0xf;
5073 b2 = (insn >> 28) & 0xf;
5074 d2 = (short)((insn >> 16) & 0xfff);
5075 r1b = (insn >> 12) & 0xf;
5076 disas_ed(s, op, r1, x2, b2, d2, r1b);
5077 break;
5078 default:
5079 LOG_DISAS("unimplemented opcode 0x%x\n", opc);
5080 gen_illegal_opcode(s, ilc);
5081 break;
5084 /* Instruction length is encoded in the opcode */
5085 s->pc += (ilc * 2);
5088 static inline void gen_intermediate_code_internal(CPUState *env,
5089 TranslationBlock *tb,
5090 int search_pc)
5092 DisasContext dc;
5093 target_ulong pc_start;
5094 uint64_t next_page_start;
5095 uint16_t *gen_opc_end;
5096 int j, lj = -1;
5097 int num_insns, max_insns;
5098 CPUBreakpoint *bp;
5100 pc_start = tb->pc;
5102 /* 31-bit mode */
5103 if (!(tb->flags & FLAG_MASK_64)) {
5104 pc_start &= 0x7fffffff;
5107 dc.pc = pc_start;
5108 dc.is_jmp = DISAS_NEXT;
5109 dc.tb = tb;
5110 dc.cc_op = CC_OP_DYNAMIC;
5112 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5114 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5116 num_insns = 0;
5117 max_insns = tb->cflags & CF_COUNT_MASK;
5118 if (max_insns == 0) {
5119 max_insns = CF_COUNT_MASK;
5122 gen_icount_start();
5124 do {
5125 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5126 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5127 if (bp->pc == dc.pc) {
5128 gen_debug(&dc);
5129 break;
5133 if (search_pc) {
5134 j = gen_opc_ptr - gen_opc_buf;
5135 if (lj < j) {
5136 lj++;
5137 while (lj < j) {
5138 gen_opc_instr_start[lj++] = 0;
5141 gen_opc_pc[lj] = dc.pc;
5142 gen_opc_cc_op[lj] = dc.cc_op;
5143 gen_opc_instr_start[lj] = 1;
5144 gen_opc_icount[lj] = num_insns;
5146 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
5147 gen_io_start();
5149 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5150 LOG_DISAS("pc " TARGET_FMT_lx "\n",
5151 dc.pc);
5152 #endif
5153 disas_s390_insn(&dc);
5155 num_insns++;
5156 if (env->singlestep_enabled) {
5157 gen_debug(&dc);
5159 } while (!dc.is_jmp && gen_opc_ptr < gen_opc_end && dc.pc < next_page_start
5160 && num_insns < max_insns && !env->singlestep_enabled
5161 && !singlestep);
5163 if (!dc.is_jmp) {
5164 update_psw_addr(&dc);
5167 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5168 gen_op_calc_cc(&dc);
5169 } else {
5170 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5171 is in env */
5172 gen_op_set_cc_op(&dc);
5175 if (tb->cflags & CF_LAST_IO) {
5176 gen_io_end();
5178 /* Generate the return instruction */
5179 if (dc.is_jmp != DISAS_TB_JUMP) {
5180 tcg_gen_exit_tb(0);
5182 gen_icount_end(tb, num_insns);
5183 *gen_opc_ptr = INDEX_op_end;
5184 if (search_pc) {
5185 j = gen_opc_ptr - gen_opc_buf;
5186 lj++;
5187 while (lj <= j) {
5188 gen_opc_instr_start[lj++] = 0;
5190 } else {
5191 tb->size = dc.pc - pc_start;
5192 tb->icount = num_insns;
5194 #if defined(S390X_DEBUG_DISAS)
5195 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
5196 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5197 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5198 log_target_disas(pc_start, dc.pc - pc_start, 1);
5199 qemu_log("\n");
5201 #endif
5204 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
5206 gen_intermediate_code_internal(env, tb, 0);
5209 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
5211 gen_intermediate_code_internal(env, tb, 1);
5214 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5216 int cc_op;
5217 env->psw.addr = gen_opc_pc[pc_pos];
5218 cc_op = gen_opc_cc_op[pc_pos];
5219 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5220 env->cc_op = cc_op;