target-s390x: Add missing tcg_temp_free_i64() in do_mh()
[qemu/cris-port.git] / target-s390x / translate.c
bloba11cb19b87ee33f074e995321f57d64aca63696b
1 /*
2 * S/390 translation
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
26 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
27 /* #define DEBUG_INLINE_BRANCHES */
28 #define S390X_DEBUG_DISAS
29 /* #define S390X_DEBUG_DISAS_VERBOSE */
31 #ifdef S390X_DEBUG_DISAS_VERBOSE
32 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
33 #else
34 # define LOG_DISAS(...) do { } while (0)
35 #endif
37 #include "cpu.h"
38 #include "exec-all.h"
39 #include "disas.h"
40 #include "tcg-op.h"
41 #include "qemu-log.h"
43 /* global register indexes */
44 static TCGv_ptr cpu_env;
46 #include "gen-icount.h"
47 #include "helpers.h"
48 #define GEN_HELPER 1
49 #include "helpers.h"
51 typedef struct DisasContext DisasContext;
52 struct DisasContext {
53 uint64_t pc;
54 int is_jmp;
55 enum cc_op cc_op;
56 struct TranslationBlock *tb;
59 #define DISAS_EXCP 4
61 static void gen_op_calc_cc(DisasContext *s);
63 #ifdef DEBUG_INLINE_BRANCHES
64 static uint64_t inline_branch_hit[CC_OP_MAX];
65 static uint64_t inline_branch_miss[CC_OP_MAX];
66 #endif
68 static inline void debug_insn(uint64_t insn)
70 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
73 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
75 if (!(s->tb->flags & FLAG_MASK_64)) {
76 if (s->tb->flags & FLAG_MASK_32) {
77 return pc | 0x80000000;
80 return pc;
83 void cpu_dump_state(CPUState *env, FILE *f, fprintf_function cpu_fprintf,
84 int flags)
86 int i;
88 for (i = 0; i < 16; i++) {
89 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
90 if ((i % 4) == 3) {
91 cpu_fprintf(f, "\n");
92 } else {
93 cpu_fprintf(f, " ");
97 for (i = 0; i < 16; i++) {
98 cpu_fprintf(f, "F%02d=%016" PRIx64, i, *(uint64_t *)&env->fregs[i]);
99 if ((i % 4) == 3) {
100 cpu_fprintf(f, "\n");
101 } else {
102 cpu_fprintf(f, " ");
106 cpu_fprintf(f, "\n");
108 #ifndef CONFIG_USER_ONLY
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
117 #endif
119 cpu_fprintf(f, "\n");
121 if (env->cc_op > 3) {
122 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
123 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
124 } else {
125 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
126 env->psw.mask, env->psw.addr, env->cc_op);
129 #ifdef DEBUG_INLINE_BRANCHES
130 for (i = 0; i < CC_OP_MAX; i++) {
131 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
132 inline_branch_miss[i], inline_branch_hit[i]);
134 #endif
137 static TCGv_i64 psw_addr;
138 static TCGv_i64 psw_mask;
140 static TCGv_i32 cc_op;
141 static TCGv_i64 cc_src;
142 static TCGv_i64 cc_dst;
143 static TCGv_i64 cc_vr;
145 static char cpu_reg_names[10*3 + 6*4];
146 static TCGv_i64 regs[16];
148 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
150 void s390x_translate_init(void)
152 int i;
153 size_t cpu_reg_names_size = sizeof(cpu_reg_names);
154 char *p;
156 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
157 psw_addr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.addr),
158 "psw_addr");
159 psw_mask = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.mask),
160 "psw_mask");
162 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
163 "cc_op");
164 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_src),
165 "cc_src");
166 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_dst),
167 "cc_dst");
168 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_vr),
169 "cc_vr");
171 p = cpu_reg_names;
172 for (i = 0; i < 16; i++) {
173 snprintf(p, cpu_reg_names_size, "r%d", i);
174 regs[i] = tcg_global_mem_new(TCG_AREG0,
175 offsetof(CPUState, regs[i]), p);
176 p += (i < 10) ? 3 : 4;
177 cpu_reg_names_size -= (i < 10) ? 3 : 4;
181 static inline TCGv_i64 load_reg(int reg)
183 TCGv_i64 r = tcg_temp_new_i64();
184 tcg_gen_mov_i64(r, regs[reg]);
185 return r;
188 static inline TCGv_i64 load_freg(int reg)
190 TCGv_i64 r = tcg_temp_new_i64();
191 tcg_gen_ld_i64(r, cpu_env, offsetof(CPUState, fregs[reg].d));
192 return r;
195 static inline TCGv_i32 load_freg32(int reg)
197 TCGv_i32 r = tcg_temp_new_i32();
198 tcg_gen_ld_i32(r, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
199 return r;
202 static inline TCGv_i32 load_reg32(int reg)
204 TCGv_i32 r = tcg_temp_new_i32();
205 tcg_gen_trunc_i64_i32(r, regs[reg]);
206 return r;
209 static inline TCGv_i64 load_reg32_i64(int reg)
211 TCGv_i64 r = tcg_temp_new_i64();
212 tcg_gen_ext32s_i64(r, regs[reg]);
213 return r;
216 static inline void store_reg(int reg, TCGv_i64 v)
218 tcg_gen_mov_i64(regs[reg], v);
221 static inline void store_freg(int reg, TCGv_i64 v)
223 tcg_gen_st_i64(v, cpu_env, offsetof(CPUState, fregs[reg].d));
226 static inline void store_reg32(int reg, TCGv_i32 v)
228 #if HOST_LONG_BITS == 32
229 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
230 #else
231 TCGv_i64 tmp = tcg_temp_new_i64();
232 tcg_gen_extu_i32_i64(tmp, v);
233 /* 32 bit register writes keep the upper half */
234 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 32);
235 tcg_temp_free_i64(tmp);
236 #endif
239 static inline void store_reg32_i64(int reg, TCGv_i64 v)
241 /* 32 bit register writes keep the upper half */
242 #if HOST_LONG_BITS == 32
243 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), TCGV_LOW(v));
244 #else
245 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
246 #endif
249 static inline void store_reg16(int reg, TCGv_i32 v)
251 TCGv_i64 tmp = tcg_temp_new_i64();
252 tcg_gen_extu_i32_i64(tmp, v);
253 /* 16 bit register writes keep the upper bytes */
254 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 16);
255 tcg_temp_free_i64(tmp);
258 static inline void store_reg8(int reg, TCGv_i64 v)
260 /* 8 bit register writes keep the upper bytes */
261 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
264 static inline void store_freg32(int reg, TCGv_i32 v)
266 tcg_gen_st_i32(v, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
269 static inline void update_psw_addr(DisasContext *s)
271 /* psw.addr */
272 tcg_gen_movi_i64(psw_addr, s->pc);
275 static inline void potential_page_fault(DisasContext *s)
277 #ifndef CONFIG_USER_ONLY
278 update_psw_addr(s);
279 gen_op_calc_cc(s);
280 #endif
283 static inline uint64_t ld_code2(uint64_t pc)
285 return (uint64_t)lduw_code(pc);
288 static inline uint64_t ld_code4(uint64_t pc)
290 return (uint64_t)ldl_code(pc);
293 static inline uint64_t ld_code6(uint64_t pc)
295 uint64_t opc;
296 opc = (uint64_t)lduw_code(pc) << 32;
297 opc |= (uint64_t)(uint32_t)ldl_code(pc+2);
298 return opc;
301 static inline int get_mem_index(DisasContext *s)
303 switch (s->tb->flags & FLAG_MASK_ASC) {
304 case PSW_ASC_PRIMARY >> 32:
305 return 0;
306 case PSW_ASC_SECONDARY >> 32:
307 return 1;
308 case PSW_ASC_HOME >> 32:
309 return 2;
310 default:
311 tcg_abort();
312 break;
316 static inline void gen_debug(DisasContext *s)
318 TCGv_i32 tmp = tcg_const_i32(EXCP_DEBUG);
319 update_psw_addr(s);
320 gen_op_calc_cc(s);
321 gen_helper_exception(tmp);
322 tcg_temp_free_i32(tmp);
323 s->is_jmp = DISAS_EXCP;
326 #ifdef CONFIG_USER_ONLY
328 static void gen_illegal_opcode(DisasContext *s, int ilc)
330 TCGv_i32 tmp = tcg_const_i32(EXCP_SPEC);
331 update_psw_addr(s);
332 gen_op_calc_cc(s);
333 gen_helper_exception(tmp);
334 tcg_temp_free_i32(tmp);
335 s->is_jmp = DISAS_EXCP;
338 #else /* CONFIG_USER_ONLY */
340 static void debug_print_inst(DisasContext *s, int ilc)
342 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
343 uint64_t inst = 0;
345 switch (ilc & 3) {
346 case 1:
347 inst = ld_code2(s->pc);
348 break;
349 case 2:
350 inst = ld_code4(s->pc);
351 break;
352 case 3:
353 inst = ld_code6(s->pc);
354 break;
357 fprintf(stderr, "Illegal instruction [%d at %016" PRIx64 "]: 0x%016"
358 PRIx64 "\n", ilc, s->pc, inst);
359 #endif
362 static void gen_program_exception(DisasContext *s, int ilc, int code)
364 TCGv_i32 tmp;
366 debug_print_inst(s, ilc);
368 /* remember what pgm exeption this was */
369 tmp = tcg_const_i32(code);
370 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_code));
371 tcg_temp_free_i32(tmp);
373 tmp = tcg_const_i32(ilc);
374 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_ilc));
375 tcg_temp_free_i32(tmp);
377 /* advance past instruction */
378 s->pc += (ilc * 2);
379 update_psw_addr(s);
381 /* save off cc */
382 gen_op_calc_cc(s);
384 /* trigger exception */
385 tmp = tcg_const_i32(EXCP_PGM);
386 gen_helper_exception(tmp);
387 tcg_temp_free_i32(tmp);
389 /* end TB here */
390 s->is_jmp = DISAS_EXCP;
394 static void gen_illegal_opcode(DisasContext *s, int ilc)
396 gen_program_exception(s, ilc, PGM_SPECIFICATION);
399 static void gen_privileged_exception(DisasContext *s, int ilc)
401 gen_program_exception(s, ilc, PGM_PRIVILEGED);
404 static void check_privileged(DisasContext *s, int ilc)
406 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
407 gen_privileged_exception(s, ilc);
411 #endif /* CONFIG_USER_ONLY */
413 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
415 TCGv_i64 tmp;
417 /* 31-bitify the immediate part; register contents are dealt with below */
418 if (!(s->tb->flags & FLAG_MASK_64)) {
419 d2 &= 0x7fffffffUL;
422 if (x2) {
423 if (d2) {
424 tmp = tcg_const_i64(d2);
425 tcg_gen_add_i64(tmp, tmp, regs[x2]);
426 } else {
427 tmp = load_reg(x2);
429 if (b2) {
430 tcg_gen_add_i64(tmp, tmp, regs[b2]);
432 } else if (b2) {
433 if (d2) {
434 tmp = tcg_const_i64(d2);
435 tcg_gen_add_i64(tmp, tmp, regs[b2]);
436 } else {
437 tmp = load_reg(b2);
439 } else {
440 tmp = tcg_const_i64(d2);
443 /* 31-bit mode mask if there are values loaded from registers */
444 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
445 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
448 return tmp;
451 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
453 s->cc_op = CC_OP_CONST0 + val;
456 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
458 tcg_gen_discard_i64(cc_src);
459 tcg_gen_mov_i64(cc_dst, dst);
460 tcg_gen_discard_i64(cc_vr);
461 s->cc_op = op;
464 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
466 tcg_gen_discard_i64(cc_src);
467 tcg_gen_extu_i32_i64(cc_dst, dst);
468 tcg_gen_discard_i64(cc_vr);
469 s->cc_op = op;
472 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
473 TCGv_i64 dst)
475 tcg_gen_mov_i64(cc_src, src);
476 tcg_gen_mov_i64(cc_dst, dst);
477 tcg_gen_discard_i64(cc_vr);
478 s->cc_op = op;
481 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
482 TCGv_i32 dst)
484 tcg_gen_extu_i32_i64(cc_src, src);
485 tcg_gen_extu_i32_i64(cc_dst, dst);
486 tcg_gen_discard_i64(cc_vr);
487 s->cc_op = op;
490 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
491 TCGv_i64 dst, TCGv_i64 vr)
493 tcg_gen_mov_i64(cc_src, src);
494 tcg_gen_mov_i64(cc_dst, dst);
495 tcg_gen_mov_i64(cc_vr, vr);
496 s->cc_op = op;
499 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
500 TCGv_i32 dst, TCGv_i32 vr)
502 tcg_gen_extu_i32_i64(cc_src, src);
503 tcg_gen_extu_i32_i64(cc_dst, dst);
504 tcg_gen_extu_i32_i64(cc_vr, vr);
505 s->cc_op = op;
508 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
510 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
513 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
515 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
518 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
519 enum cc_op cond)
521 gen_op_update2_cc_i32(s, cond, v1, v2);
524 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
525 enum cc_op cond)
527 gen_op_update2_cc_i64(s, cond, v1, v2);
530 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
532 cmp_32(s, v1, v2, CC_OP_LTGT_32);
535 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
537 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
540 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
542 /* XXX optimize for the constant? put it in s? */
543 TCGv_i32 tmp = tcg_const_i32(v2);
544 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
545 tcg_temp_free_i32(tmp);
548 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
550 TCGv_i32 tmp = tcg_const_i32(v2);
551 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
552 tcg_temp_free_i32(tmp);
555 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
557 cmp_64(s, v1, v2, CC_OP_LTGT_64);
560 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
562 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
565 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
567 TCGv_i64 tmp = tcg_const_i64(v2);
568 cmp_s64(s, v1, tmp);
569 tcg_temp_free_i64(tmp);
572 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
574 TCGv_i64 tmp = tcg_const_i64(v2);
575 cmp_u64(s, v1, tmp);
576 tcg_temp_free_i64(tmp);
579 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
581 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
584 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
586 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
589 static void set_cc_add64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
591 gen_op_update3_cc_i64(s, CC_OP_ADD_64, v1, v2, vr);
594 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
595 TCGv_i64 vr)
597 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
600 static void set_cc_sub64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
602 gen_op_update3_cc_i64(s, CC_OP_SUB_64, v1, v2, vr);
605 static void set_cc_subu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
606 TCGv_i64 vr)
608 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, v1, v2, vr);
611 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
613 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
616 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
618 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
621 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
623 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
626 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
627 TCGv_i32 vr)
629 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
632 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
634 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
637 static void set_cc_subu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
638 TCGv_i32 vr)
640 gen_op_update3_cc_i32(s, CC_OP_SUBU_32, v1, v2, vr);
643 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
645 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
648 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
650 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
653 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
655 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
658 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
660 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
663 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
665 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
668 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
670 tcg_gen_extu_i32_i64(cc_src, v1);
671 tcg_gen_mov_i64(cc_dst, v2);
672 tcg_gen_discard_i64(cc_vr);
673 s->cc_op = CC_OP_LTGT_F32;
676 static void set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
678 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
681 static inline void set_cc_nz_f64(DisasContext *s, TCGv_i64 v1)
683 gen_op_update1_cc_i64(s, CC_OP_NZ_F64, v1);
686 /* CC value is in env->cc_op */
687 static inline void set_cc_static(DisasContext *s)
689 tcg_gen_discard_i64(cc_src);
690 tcg_gen_discard_i64(cc_dst);
691 tcg_gen_discard_i64(cc_vr);
692 s->cc_op = CC_OP_STATIC;
695 static inline void gen_op_set_cc_op(DisasContext *s)
697 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
698 tcg_gen_movi_i32(cc_op, s->cc_op);
702 static inline void gen_update_cc_op(DisasContext *s)
704 gen_op_set_cc_op(s);
707 /* calculates cc into cc_op */
708 static void gen_op_calc_cc(DisasContext *s)
710 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
711 TCGv_i64 dummy = tcg_const_i64(0);
713 switch (s->cc_op) {
714 case CC_OP_CONST0:
715 case CC_OP_CONST1:
716 case CC_OP_CONST2:
717 case CC_OP_CONST3:
718 /* s->cc_op is the cc value */
719 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
720 break;
721 case CC_OP_STATIC:
722 /* env->cc_op already is the cc value */
723 break;
724 case CC_OP_NZ:
725 case CC_OP_ABS_64:
726 case CC_OP_NABS_64:
727 case CC_OP_ABS_32:
728 case CC_OP_NABS_32:
729 case CC_OP_LTGT0_32:
730 case CC_OP_LTGT0_64:
731 case CC_OP_COMP_32:
732 case CC_OP_COMP_64:
733 case CC_OP_NZ_F32:
734 case CC_OP_NZ_F64:
735 /* 1 argument */
736 gen_helper_calc_cc(cc_op, local_cc_op, dummy, cc_dst, dummy);
737 break;
738 case CC_OP_ICM:
739 case CC_OP_LTGT_32:
740 case CC_OP_LTGT_64:
741 case CC_OP_LTUGTU_32:
742 case CC_OP_LTUGTU_64:
743 case CC_OP_TM_32:
744 case CC_OP_TM_64:
745 case CC_OP_LTGT_F32:
746 case CC_OP_LTGT_F64:
747 case CC_OP_SLAG:
748 /* 2 arguments */
749 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, dummy);
750 break;
751 case CC_OP_ADD_64:
752 case CC_OP_ADDU_64:
753 case CC_OP_SUB_64:
754 case CC_OP_SUBU_64:
755 case CC_OP_ADD_32:
756 case CC_OP_ADDU_32:
757 case CC_OP_SUB_32:
758 case CC_OP_SUBU_32:
759 /* 3 arguments */
760 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, cc_vr);
761 break;
762 case CC_OP_DYNAMIC:
763 /* unknown operation - assume 3 arguments and cc_op in env */
764 gen_helper_calc_cc(cc_op, cc_op, cc_src, cc_dst, cc_vr);
765 break;
766 default:
767 tcg_abort();
770 tcg_temp_free_i32(local_cc_op);
772 /* We now have cc in cc_op as constant */
773 set_cc_static(s);
776 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
778 debug_insn(insn);
780 *r1 = (insn >> 4) & 0xf;
781 *r2 = insn & 0xf;
784 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
785 int *x2, int *b2, int *d2)
787 debug_insn(insn);
789 *r1 = (insn >> 20) & 0xf;
790 *x2 = (insn >> 16) & 0xf;
791 *b2 = (insn >> 12) & 0xf;
792 *d2 = insn & 0xfff;
794 return get_address(s, *x2, *b2, *d2);
797 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
798 int *b2, int *d2)
800 debug_insn(insn);
802 *r1 = (insn >> 20) & 0xf;
803 /* aka m3 */
804 *r3 = (insn >> 16) & 0xf;
805 *b2 = (insn >> 12) & 0xf;
806 *d2 = insn & 0xfff;
809 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
810 int *b1, int *d1)
812 debug_insn(insn);
814 *i2 = (insn >> 16) & 0xff;
815 *b1 = (insn >> 12) & 0xf;
816 *d1 = insn & 0xfff;
818 return get_address(s, 0, *b1, *d1);
821 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
823 TranslationBlock *tb;
825 gen_update_cc_op(s);
827 tb = s->tb;
828 /* NOTE: we handle the case where the TB spans two pages here */
829 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
830 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
831 /* jump to same page: we can use a direct jump */
832 tcg_gen_goto_tb(tb_num);
833 tcg_gen_movi_i64(psw_addr, pc);
834 tcg_gen_exit_tb((long)tb + tb_num);
835 } else {
836 /* jump to another page: currently not optimized */
837 tcg_gen_movi_i64(psw_addr, pc);
838 tcg_gen_exit_tb(0);
842 static inline void account_noninline_branch(DisasContext *s, int cc_op)
844 #ifdef DEBUG_INLINE_BRANCHES
845 inline_branch_miss[cc_op]++;
846 #endif
849 static inline void account_inline_branch(DisasContext *s)
851 #ifdef DEBUG_INLINE_BRANCHES
852 inline_branch_hit[s->cc_op]++;
853 #endif
856 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
858 TCGv_i32 tmp, tmp2, r;
859 TCGv_i64 tmp64;
860 int old_cc_op;
862 switch (s->cc_op) {
863 case CC_OP_LTGT0_32:
864 tmp = tcg_temp_new_i32();
865 tcg_gen_trunc_i64_i32(tmp, cc_dst);
866 switch (mask) {
867 case 0x8 | 0x4: /* dst <= 0 */
868 tcg_gen_brcondi_i32(TCG_COND_GT, tmp, 0, skip);
869 break;
870 case 0x8 | 0x2: /* dst >= 0 */
871 tcg_gen_brcondi_i32(TCG_COND_LT, tmp, 0, skip);
872 break;
873 case 0x8: /* dst == 0 */
874 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
875 break;
876 case 0x7: /* dst != 0 */
877 case 0x6: /* dst != 0 */
878 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
879 break;
880 case 0x4: /* dst < 0 */
881 tcg_gen_brcondi_i32(TCG_COND_GE, tmp, 0, skip);
882 break;
883 case 0x2: /* dst > 0 */
884 tcg_gen_brcondi_i32(TCG_COND_LE, tmp, 0, skip);
885 break;
886 default:
887 tcg_temp_free_i32(tmp);
888 goto do_dynamic;
890 account_inline_branch(s);
891 tcg_temp_free_i32(tmp);
892 break;
893 case CC_OP_LTGT0_64:
894 switch (mask) {
895 case 0x8 | 0x4: /* dst <= 0 */
896 tcg_gen_brcondi_i64(TCG_COND_GT, cc_dst, 0, skip);
897 break;
898 case 0x8 | 0x2: /* dst >= 0 */
899 tcg_gen_brcondi_i64(TCG_COND_LT, cc_dst, 0, skip);
900 break;
901 case 0x8: /* dst == 0 */
902 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
903 break;
904 case 0x7: /* dst != 0 */
905 case 0x6: /* dst != 0 */
906 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
907 break;
908 case 0x4: /* dst < 0 */
909 tcg_gen_brcondi_i64(TCG_COND_GE, cc_dst, 0, skip);
910 break;
911 case 0x2: /* dst > 0 */
912 tcg_gen_brcondi_i64(TCG_COND_LE, cc_dst, 0, skip);
913 break;
914 default:
915 goto do_dynamic;
917 account_inline_branch(s);
918 break;
919 case CC_OP_LTGT_32:
920 tmp = tcg_temp_new_i32();
921 tmp2 = tcg_temp_new_i32();
922 tcg_gen_trunc_i64_i32(tmp, cc_src);
923 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
924 switch (mask) {
925 case 0x8 | 0x4: /* src <= dst */
926 tcg_gen_brcond_i32(TCG_COND_GT, tmp, tmp2, skip);
927 break;
928 case 0x8 | 0x2: /* src >= dst */
929 tcg_gen_brcond_i32(TCG_COND_LT, tmp, tmp2, skip);
930 break;
931 case 0x8: /* src == dst */
932 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
933 break;
934 case 0x7: /* src != dst */
935 case 0x6: /* src != dst */
936 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
937 break;
938 case 0x4: /* src < dst */
939 tcg_gen_brcond_i32(TCG_COND_GE, tmp, tmp2, skip);
940 break;
941 case 0x2: /* src > dst */
942 tcg_gen_brcond_i32(TCG_COND_LE, tmp, tmp2, skip);
943 break;
944 default:
945 tcg_temp_free_i32(tmp);
946 tcg_temp_free_i32(tmp2);
947 goto do_dynamic;
949 account_inline_branch(s);
950 tcg_temp_free_i32(tmp);
951 tcg_temp_free_i32(tmp2);
952 break;
953 case CC_OP_LTGT_64:
954 switch (mask) {
955 case 0x8 | 0x4: /* src <= dst */
956 tcg_gen_brcond_i64(TCG_COND_GT, cc_src, cc_dst, skip);
957 break;
958 case 0x8 | 0x2: /* src >= dst */
959 tcg_gen_brcond_i64(TCG_COND_LT, cc_src, cc_dst, skip);
960 break;
961 case 0x8: /* src == dst */
962 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
963 break;
964 case 0x7: /* src != dst */
965 case 0x6: /* src != dst */
966 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
967 break;
968 case 0x4: /* src < dst */
969 tcg_gen_brcond_i64(TCG_COND_GE, cc_src, cc_dst, skip);
970 break;
971 case 0x2: /* src > dst */
972 tcg_gen_brcond_i64(TCG_COND_LE, cc_src, cc_dst, skip);
973 break;
974 default:
975 goto do_dynamic;
977 account_inline_branch(s);
978 break;
979 case CC_OP_LTUGTU_32:
980 tmp = tcg_temp_new_i32();
981 tmp2 = tcg_temp_new_i32();
982 tcg_gen_trunc_i64_i32(tmp, cc_src);
983 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
984 switch (mask) {
985 case 0x8 | 0x4: /* src <= dst */
986 tcg_gen_brcond_i32(TCG_COND_GTU, tmp, tmp2, skip);
987 break;
988 case 0x8 | 0x2: /* src >= dst */
989 tcg_gen_brcond_i32(TCG_COND_LTU, tmp, tmp2, skip);
990 break;
991 case 0x8: /* src == dst */
992 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
993 break;
994 case 0x7: /* src != dst */
995 case 0x6: /* src != dst */
996 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
997 break;
998 case 0x4: /* src < dst */
999 tcg_gen_brcond_i32(TCG_COND_GEU, tmp, tmp2, skip);
1000 break;
1001 case 0x2: /* src > dst */
1002 tcg_gen_brcond_i32(TCG_COND_LEU, tmp, tmp2, skip);
1003 break;
1004 default:
1005 tcg_temp_free_i32(tmp);
1006 tcg_temp_free_i32(tmp2);
1007 goto do_dynamic;
1009 account_inline_branch(s);
1010 tcg_temp_free_i32(tmp);
1011 tcg_temp_free_i32(tmp2);
1012 break;
1013 case CC_OP_LTUGTU_64:
1014 switch (mask) {
1015 case 0x8 | 0x4: /* src <= dst */
1016 tcg_gen_brcond_i64(TCG_COND_GTU, cc_src, cc_dst, skip);
1017 break;
1018 case 0x8 | 0x2: /* src >= dst */
1019 tcg_gen_brcond_i64(TCG_COND_LTU, cc_src, cc_dst, skip);
1020 break;
1021 case 0x8: /* src == dst */
1022 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
1023 break;
1024 case 0x7: /* src != dst */
1025 case 0x6: /* src != dst */
1026 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
1027 break;
1028 case 0x4: /* src < dst */
1029 tcg_gen_brcond_i64(TCG_COND_GEU, cc_src, cc_dst, skip);
1030 break;
1031 case 0x2: /* src > dst */
1032 tcg_gen_brcond_i64(TCG_COND_LEU, cc_src, cc_dst, skip);
1033 break;
1034 default:
1035 goto do_dynamic;
1037 account_inline_branch(s);
1038 break;
1039 case CC_OP_NZ:
1040 switch (mask) {
1041 /* dst == 0 || dst != 0 */
1042 case 0x8 | 0x4:
1043 case 0x8 | 0x4 | 0x2:
1044 case 0x8 | 0x4 | 0x2 | 0x1:
1045 case 0x8 | 0x4 | 0x1:
1046 break;
1047 /* dst == 0 */
1048 case 0x8:
1049 case 0x8 | 0x2:
1050 case 0x8 | 0x2 | 0x1:
1051 case 0x8 | 0x1:
1052 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1053 break;
1054 /* dst != 0 */
1055 case 0x4:
1056 case 0x4 | 0x2:
1057 case 0x4 | 0x2 | 0x1:
1058 case 0x4 | 0x1:
1059 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1060 break;
1061 default:
1062 goto do_dynamic;
1064 account_inline_branch(s);
1065 break;
1066 case CC_OP_TM_32:
1067 tmp = tcg_temp_new_i32();
1068 tmp2 = tcg_temp_new_i32();
1070 tcg_gen_trunc_i64_i32(tmp, cc_src);
1071 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
1072 tcg_gen_and_i32(tmp, tmp, tmp2);
1073 switch (mask) {
1074 case 0x8: /* val & mask == 0 */
1075 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1076 break;
1077 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1078 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1079 break;
1080 default:
1081 goto do_dynamic;
1083 tcg_temp_free_i32(tmp);
1084 account_inline_branch(s);
1085 break;
1086 case CC_OP_TM_64:
1087 tmp64 = tcg_temp_new_i64();
1089 tcg_gen_and_i64(tmp64, cc_src, cc_dst);
1090 switch (mask) {
1091 case 0x8: /* val & mask == 0 */
1092 tcg_gen_brcondi_i64(TCG_COND_NE, tmp64, 0, skip);
1093 break;
1094 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1095 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp64, 0, skip);
1096 break;
1097 default:
1098 tcg_temp_free_i64(tmp64);
1099 goto do_dynamic;
1101 tcg_temp_free_i64(tmp64);
1102 account_inline_branch(s);
1103 break;
1104 case CC_OP_ICM:
1105 switch (mask) {
1106 case 0x8: /* val == 0 */
1107 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1108 break;
1109 case 0x4 | 0x2 | 0x1: /* val != 0 */
1110 case 0x4 | 0x2: /* val != 0 */
1111 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1112 break;
1113 default:
1114 goto do_dynamic;
1116 account_inline_branch(s);
1117 break;
1118 case CC_OP_STATIC:
1119 old_cc_op = s->cc_op;
1120 goto do_dynamic_nocccalc;
1121 case CC_OP_DYNAMIC:
1122 default:
1123 do_dynamic:
1124 old_cc_op = s->cc_op;
1125 /* calculate cc value */
1126 gen_op_calc_cc(s);
1128 do_dynamic_nocccalc:
1129 /* jump based on cc */
1130 account_noninline_branch(s, old_cc_op);
1132 switch (mask) {
1133 case 0x8 | 0x4 | 0x2 | 0x1:
1134 /* always true */
1135 break;
1136 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1137 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 3, skip);
1138 break;
1139 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1140 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 2, skip);
1141 break;
1142 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1143 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 1, skip);
1144 break;
1145 case 0x8 | 0x2: /* cc == 0 || cc == 2 */
1146 tmp = tcg_temp_new_i32();
1147 tcg_gen_andi_i32(tmp, cc_op, 1);
1148 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1149 tcg_temp_free_i32(tmp);
1150 break;
1151 case 0x8 | 0x4: /* cc < 2 */
1152 tcg_gen_brcondi_i32(TCG_COND_GEU, cc_op, 2, skip);
1153 break;
1154 case 0x8: /* cc == 0 */
1155 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 0, skip);
1156 break;
1157 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1158 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 0, skip);
1159 break;
1160 case 0x4 | 0x1: /* cc == 1 || cc == 3 */
1161 tmp = tcg_temp_new_i32();
1162 tcg_gen_andi_i32(tmp, cc_op, 1);
1163 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1164 tcg_temp_free_i32(tmp);
1165 break;
1166 case 0x4: /* cc == 1 */
1167 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 1, skip);
1168 break;
1169 case 0x2 | 0x1: /* cc > 1 */
1170 tcg_gen_brcondi_i32(TCG_COND_LEU, cc_op, 1, skip);
1171 break;
1172 case 0x2: /* cc == 2 */
1173 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 2, skip);
1174 break;
1175 case 0x1: /* cc == 3 */
1176 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 3, skip);
1177 break;
1178 default: /* cc is masked by something else */
1179 tmp = tcg_const_i32(3);
1180 /* 3 - cc */
1181 tcg_gen_sub_i32(tmp, tmp, cc_op);
1182 tmp2 = tcg_const_i32(1);
1183 /* 1 << (3 - cc) */
1184 tcg_gen_shl_i32(tmp2, tmp2, tmp);
1185 r = tcg_const_i32(mask);
1186 /* mask & (1 << (3 - cc)) */
1187 tcg_gen_and_i32(r, r, tmp2);
1188 tcg_temp_free_i32(tmp);
1189 tcg_temp_free_i32(tmp2);
1191 tcg_gen_brcondi_i32(TCG_COND_EQ, r, 0, skip);
1192 tcg_temp_free_i32(r);
1193 break;
1195 break;
1199 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1200 uint64_t offset)
1202 int skip;
1204 if (mask == 0xf) {
1205 /* unconditional */
1206 tcg_gen_mov_i64(psw_addr, target);
1207 tcg_gen_exit_tb(0);
1208 } else if (mask == 0) {
1209 /* ignore cc and never match */
1210 gen_goto_tb(s, 0, offset + 2);
1211 } else {
1212 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1214 tcg_gen_mov_i64(new_addr, target);
1215 skip = gen_new_label();
1216 gen_jcc(s, mask, skip);
1217 tcg_gen_mov_i64(psw_addr, new_addr);
1218 tcg_temp_free_i64(new_addr);
1219 tcg_gen_exit_tb(0);
1220 gen_set_label(skip);
1221 tcg_temp_free_i64(new_addr);
1222 gen_goto_tb(s, 1, offset + 2);
1226 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1228 int skip;
1230 if (mask == 0xf) {
1231 /* unconditional */
1232 gen_goto_tb(s, 0, s->pc + offset);
1233 } else if (mask == 0) {
1234 /* ignore cc and never match */
1235 gen_goto_tb(s, 0, s->pc + 4);
1236 } else {
1237 skip = gen_new_label();
1238 gen_jcc(s, mask, skip);
1239 gen_goto_tb(s, 0, s->pc + offset);
1240 gen_set_label(skip);
1241 gen_goto_tb(s, 1, s->pc + 4);
1243 s->is_jmp = DISAS_TB_JUMP;
1246 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1248 TCGv_i64 tmp, tmp2;
1249 int i;
1250 int l_memset = gen_new_label();
1251 int l_out = gen_new_label();
1252 TCGv_i64 dest = tcg_temp_local_new_i64();
1253 TCGv_i64 src = tcg_temp_local_new_i64();
1254 TCGv_i32 vl;
1256 /* Find out if we should use the inline version of mvc */
1257 switch (l) {
1258 case 0:
1259 case 1:
1260 case 2:
1261 case 3:
1262 case 4:
1263 case 5:
1264 case 6:
1265 case 7:
1266 case 11:
1267 case 15:
1268 /* use inline */
1269 break;
1270 default:
1271 /* Fall back to helper */
1272 vl = tcg_const_i32(l);
1273 potential_page_fault(s);
1274 gen_helper_mvc(vl, s1, s2);
1275 tcg_temp_free_i32(vl);
1276 return;
1279 tcg_gen_mov_i64(dest, s1);
1280 tcg_gen_mov_i64(src, s2);
1282 if (!(s->tb->flags & FLAG_MASK_64)) {
1283 /* XXX what if we overflow while moving? */
1284 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1285 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1288 tmp = tcg_temp_new_i64();
1289 tcg_gen_addi_i64(tmp, src, 1);
1290 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1291 tcg_temp_free_i64(tmp);
1293 switch (l) {
1294 case 0:
1295 tmp = tcg_temp_new_i64();
1297 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1298 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1300 tcg_temp_free_i64(tmp);
1301 break;
1302 case 1:
1303 tmp = tcg_temp_new_i64();
1305 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1306 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1308 tcg_temp_free_i64(tmp);
1309 break;
1310 case 3:
1311 tmp = tcg_temp_new_i64();
1313 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1314 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1316 tcg_temp_free_i64(tmp);
1317 break;
1318 case 4:
1319 tmp = tcg_temp_new_i64();
1320 tmp2 = tcg_temp_new_i64();
1322 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1323 tcg_gen_addi_i64(src, src, 4);
1324 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1325 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1326 tcg_gen_addi_i64(dest, dest, 4);
1327 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1329 tcg_temp_free_i64(tmp);
1330 tcg_temp_free_i64(tmp2);
1331 break;
1332 case 7:
1333 tmp = tcg_temp_new_i64();
1335 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1336 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1338 tcg_temp_free_i64(tmp);
1339 break;
1340 default:
1341 /* The inline version can become too big for too uneven numbers, only
1342 use it on known good lengths */
1343 tmp = tcg_temp_new_i64();
1344 tmp2 = tcg_const_i64(8);
1345 for (i = 0; (i + 7) <= l; i += 8) {
1346 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1347 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1349 tcg_gen_add_i64(src, src, tmp2);
1350 tcg_gen_add_i64(dest, dest, tmp2);
1353 tcg_temp_free_i64(tmp2);
1354 tmp2 = tcg_const_i64(1);
1356 for (; i <= l; i++) {
1357 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1358 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1360 tcg_gen_add_i64(src, src, tmp2);
1361 tcg_gen_add_i64(dest, dest, tmp2);
1364 tcg_temp_free_i64(tmp2);
1365 tcg_temp_free_i64(tmp);
1366 break;
1369 tcg_gen_br(l_out);
1371 gen_set_label(l_memset);
1372 /* memset case (dest == (src + 1)) */
1374 tmp = tcg_temp_new_i64();
1375 tmp2 = tcg_temp_new_i64();
1376 /* fill tmp with the byte */
1377 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1378 tcg_gen_shli_i64(tmp2, tmp, 8);
1379 tcg_gen_or_i64(tmp, tmp, tmp2);
1380 tcg_gen_shli_i64(tmp2, tmp, 16);
1381 tcg_gen_or_i64(tmp, tmp, tmp2);
1382 tcg_gen_shli_i64(tmp2, tmp, 32);
1383 tcg_gen_or_i64(tmp, tmp, tmp2);
1384 tcg_temp_free_i64(tmp2);
1386 tmp2 = tcg_const_i64(8);
1388 for (i = 0; (i + 7) <= l; i += 8) {
1389 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1390 tcg_gen_addi_i64(dest, dest, 8);
1393 tcg_temp_free_i64(tmp2);
1394 tmp2 = tcg_const_i64(1);
1396 for (; i <= l; i++) {
1397 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1398 tcg_gen_addi_i64(dest, dest, 1);
1401 tcg_temp_free_i64(tmp2);
1402 tcg_temp_free_i64(tmp);
1404 gen_set_label(l_out);
1406 tcg_temp_free(dest);
1407 tcg_temp_free(src);
1410 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1412 TCGv_i64 tmp;
1413 TCGv_i64 tmp2;
1414 TCGv_i32 vl;
1416 /* check for simple 32bit or 64bit match */
1417 switch (l) {
1418 case 0:
1419 tmp = tcg_temp_new_i64();
1420 tmp2 = tcg_temp_new_i64();
1422 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1423 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1424 cmp_u64(s, tmp, tmp2);
1426 tcg_temp_free_i64(tmp);
1427 tcg_temp_free_i64(tmp2);
1428 return;
1429 case 1:
1430 tmp = tcg_temp_new_i64();
1431 tmp2 = tcg_temp_new_i64();
1433 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1434 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1435 cmp_u64(s, tmp, tmp2);
1437 tcg_temp_free_i64(tmp);
1438 tcg_temp_free_i64(tmp2);
1439 return;
1440 case 3:
1441 tmp = tcg_temp_new_i64();
1442 tmp2 = tcg_temp_new_i64();
1444 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1445 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1446 cmp_u64(s, tmp, tmp2);
1448 tcg_temp_free_i64(tmp);
1449 tcg_temp_free_i64(tmp2);
1450 return;
1451 case 7:
1452 tmp = tcg_temp_new_i64();
1453 tmp2 = tcg_temp_new_i64();
1455 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1456 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1457 cmp_u64(s, tmp, tmp2);
1459 tcg_temp_free_i64(tmp);
1460 tcg_temp_free_i64(tmp2);
1461 return;
1464 potential_page_fault(s);
1465 vl = tcg_const_i32(l);
1466 gen_helper_clc(cc_op, vl, s1, s2);
1467 tcg_temp_free_i32(vl);
1468 set_cc_static(s);
1471 static void disas_e3(DisasContext* s, int op, int r1, int x2, int b2, int d2)
1473 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1474 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1476 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1477 op, r1, x2, b2, d2);
1478 addr = get_address(s, x2, b2, d2);
1479 switch (op) {
1480 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1481 case 0x4: /* lg r1,d2(x2,b2) */
1482 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1483 if (op == 0x2) {
1484 set_cc_s64(s, regs[r1]);
1486 break;
1487 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1488 tmp2 = tcg_temp_new_i64();
1489 tmp32_1 = tcg_temp_new_i32();
1490 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1491 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1492 store_reg32(r1, tmp32_1);
1493 set_cc_s32(s, tmp32_1);
1494 tcg_temp_free_i64(tmp2);
1495 tcg_temp_free_i32(tmp32_1);
1496 break;
1497 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1498 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1499 tmp2 = tcg_temp_new_i64();
1500 if (op == 0xc) {
1501 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1502 } else {
1503 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1505 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1506 tcg_temp_free_i64(tmp2);
1507 break;
1508 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1509 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1510 tmp2 = tcg_temp_new_i64();
1511 if (op == 0x1d) {
1512 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1513 } else {
1514 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1516 tmp4 = load_reg(r1 + 1);
1517 tmp3 = tcg_temp_new_i64();
1518 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1519 store_reg(r1 + 1, tmp3);
1520 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1521 store_reg(r1, tmp3);
1522 tcg_temp_free_i64(tmp2);
1523 tcg_temp_free_i64(tmp3);
1524 tcg_temp_free_i64(tmp4);
1525 break;
1526 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1527 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1528 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1529 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1530 if (op == 0x1a) {
1531 tmp2 = tcg_temp_new_i64();
1532 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1533 } else if (op == 0x18) {
1534 tmp2 = tcg_temp_new_i64();
1535 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1536 } else {
1537 tmp2 = tcg_temp_new_i64();
1538 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1540 tmp4 = load_reg(r1);
1541 tmp3 = tcg_temp_new_i64();
1542 tcg_gen_add_i64(tmp3, tmp4, tmp2);
1543 store_reg(r1, tmp3);
1544 switch (op) {
1545 case 0x8:
1546 case 0x18:
1547 set_cc_add64(s, tmp4, tmp2, tmp3);
1548 break;
1549 case 0xa:
1550 case 0x1a:
1551 set_cc_addu64(s, tmp4, tmp2, tmp3);
1552 break;
1553 default:
1554 tcg_abort();
1556 tcg_temp_free_i64(tmp2);
1557 tcg_temp_free_i64(tmp3);
1558 tcg_temp_free_i64(tmp4);
1559 break;
1560 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1561 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1562 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1563 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1564 tmp2 = tcg_temp_new_i64();
1565 if (op == 0x19) {
1566 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1567 } else if (op == 0x1b) {
1568 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1569 } else {
1570 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1572 tmp4 = load_reg(r1);
1573 tmp3 = tcg_temp_new_i64();
1574 tcg_gen_sub_i64(tmp3, tmp4, tmp2);
1575 store_reg(r1, tmp3);
1576 switch (op) {
1577 case 0x9:
1578 case 0x19:
1579 set_cc_sub64(s, tmp4, tmp2, tmp3);
1580 break;
1581 case 0xb:
1582 case 0x1b:
1583 set_cc_subu64(s, tmp4, tmp2, tmp3);
1584 break;
1585 default:
1586 tcg_abort();
1588 tcg_temp_free_i64(tmp2);
1589 tcg_temp_free_i64(tmp3);
1590 tcg_temp_free_i64(tmp4);
1591 break;
1592 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1593 tmp2 = tcg_temp_new_i64();
1594 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1595 tcg_gen_bswap64_i64(tmp2, tmp2);
1596 store_reg(r1, tmp2);
1597 tcg_temp_free_i64(tmp2);
1598 break;
1599 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1600 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1601 tmp2 = tcg_temp_new_i64();
1602 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1603 if (op == 0x14) {
1604 tcg_gen_ext32s_i64(tmp2, tmp2);
1606 store_reg(r1, tmp2);
1607 tcg_temp_free_i64(tmp2);
1608 break;
1609 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1610 tmp2 = tcg_temp_new_i64();
1611 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1612 store_reg(r1, tmp2);
1613 tcg_temp_free_i64(tmp2);
1614 break;
1615 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1616 tmp2 = tcg_temp_new_i64();
1617 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1618 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1619 store_reg(r1, tmp2);
1620 tcg_temp_free_i64(tmp2);
1621 break;
1622 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1623 tmp2 = tcg_temp_new_i64();
1624 tmp32_1 = tcg_temp_new_i32();
1625 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1626 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1627 tcg_temp_free_i64(tmp2);
1628 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1629 store_reg32(r1, tmp32_1);
1630 tcg_temp_free_i32(tmp32_1);
1631 break;
1632 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1633 tmp2 = tcg_temp_new_i64();
1634 tmp32_1 = tcg_temp_new_i32();
1635 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1636 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1637 tcg_temp_free_i64(tmp2);
1638 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1639 store_reg16(r1, tmp32_1);
1640 tcg_temp_free_i32(tmp32_1);
1641 break;
1642 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1643 case 0x21: /* CLG R1,D2(X2,B2) */
1644 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1645 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1646 tmp2 = tcg_temp_new_i64();
1647 switch (op) {
1648 case 0x20:
1649 case 0x21:
1650 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1651 break;
1652 case 0x30:
1653 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1654 break;
1655 case 0x31:
1656 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1657 break;
1658 default:
1659 tcg_abort();
1661 switch (op) {
1662 case 0x20:
1663 case 0x30:
1664 cmp_s64(s, regs[r1], tmp2);
1665 break;
1666 case 0x21:
1667 case 0x31:
1668 cmp_u64(s, regs[r1], tmp2);
1669 break;
1670 default:
1671 tcg_abort();
1673 tcg_temp_free_i64(tmp2);
1674 break;
1675 case 0x24: /* stg r1, d2(x2,b2) */
1676 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1677 break;
1678 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1679 tmp32_1 = load_reg32(r1);
1680 tmp2 = tcg_temp_new_i64();
1681 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1682 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1683 tcg_temp_free_i32(tmp32_1);
1684 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1685 tcg_temp_free_i64(tmp2);
1686 break;
1687 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1688 tmp32_1 = load_reg32(r1);
1689 tmp2 = tcg_temp_new_i64();
1690 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1691 tcg_temp_free_i32(tmp32_1);
1692 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1693 tcg_temp_free_i64(tmp2);
1694 break;
1695 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1696 tmp32_1 = load_reg32(r1);
1697 tmp32_2 = tcg_temp_new_i32();
1698 tmp2 = tcg_temp_new_i64();
1699 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1700 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1701 tcg_temp_free_i64(tmp2);
1702 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1703 store_reg32(r1, tmp32_2);
1704 set_cc_nz_u32(s, tmp32_2);
1705 tcg_temp_free_i32(tmp32_1);
1706 tcg_temp_free_i32(tmp32_2);
1707 break;
1708 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1709 tmp3 = tcg_temp_new_i64();
1710 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1711 store_reg32_i64(r1, tmp3);
1712 tcg_temp_free_i64(tmp3);
1713 break;
1714 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1715 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1716 tmp32_1 = load_reg32(r1);
1717 tmp32_2 = tcg_temp_new_i32();
1718 tmp32_3 = tcg_temp_new_i32();
1719 tmp2 = tcg_temp_new_i64();
1720 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1721 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1722 tcg_temp_free_i64(tmp2);
1723 switch (op) {
1724 case 0x5a:
1725 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
1726 break;
1727 case 0x5b:
1728 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
1729 break;
1730 default:
1731 tcg_abort();
1733 store_reg32(r1, tmp32_3);
1734 switch (op) {
1735 case 0x5a:
1736 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
1737 break;
1738 case 0x5b:
1739 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
1740 break;
1741 default:
1742 tcg_abort();
1744 tcg_temp_free_i32(tmp32_1);
1745 tcg_temp_free_i32(tmp32_2);
1746 tcg_temp_free_i32(tmp32_3);
1747 break;
1748 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1749 store_reg(r1, addr);
1750 break;
1751 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1752 tmp32_1 = load_reg32(r1);
1753 tmp2 = tcg_temp_new_i64();
1754 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1755 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1756 tcg_temp_free_i32(tmp32_1);
1757 tcg_temp_free_i64(tmp2);
1758 break;
1759 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1760 tmp3 = tcg_temp_new_i64();
1761 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1762 store_reg8(r1, tmp3);
1763 tcg_temp_free_i64(tmp3);
1764 break;
1765 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1766 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1767 tmp2 = tcg_temp_new_i64();
1768 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1769 switch (op) {
1770 case 0x76:
1771 tcg_gen_ext8s_i64(tmp2, tmp2);
1772 store_reg32_i64(r1, tmp2);
1773 break;
1774 case 0x77:
1775 tcg_gen_ext8s_i64(tmp2, tmp2);
1776 store_reg(r1, tmp2);
1777 break;
1778 default:
1779 tcg_abort();
1781 tcg_temp_free_i64(tmp2);
1782 break;
1783 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1784 tmp2 = tcg_temp_new_i64();
1785 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1786 store_reg32_i64(r1, tmp2);
1787 tcg_temp_free_i64(tmp2);
1788 break;
1789 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1790 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1791 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1792 tmp3 = tcg_temp_new_i64();
1793 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1794 switch (op) {
1795 case 0x80:
1796 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1797 break;
1798 case 0x81:
1799 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1800 break;
1801 case 0x82:
1802 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1803 break;
1804 default:
1805 tcg_abort();
1807 set_cc_nz_u64(s, regs[r1]);
1808 tcg_temp_free_i64(tmp3);
1809 break;
1810 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1811 tmp2 = tcg_temp_new_i64();
1812 tmp32_1 = tcg_const_i32(r1);
1813 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1814 gen_helper_mlg(tmp32_1, tmp2);
1815 tcg_temp_free_i64(tmp2);
1816 tcg_temp_free_i32(tmp32_1);
1817 break;
1818 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1819 tmp2 = tcg_temp_new_i64();
1820 tmp32_1 = tcg_const_i32(r1);
1821 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1822 gen_helper_dlg(tmp32_1, tmp2);
1823 tcg_temp_free_i64(tmp2);
1824 tcg_temp_free_i32(tmp32_1);
1825 break;
1826 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1827 tmp2 = tcg_temp_new_i64();
1828 tmp3 = tcg_temp_new_i64();
1829 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1830 /* XXX possible optimization point */
1831 gen_op_calc_cc(s);
1832 tcg_gen_extu_i32_i64(tmp3, cc_op);
1833 tcg_gen_shri_i64(tmp3, tmp3, 1);
1834 tcg_gen_andi_i64(tmp3, tmp3, 1);
1835 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1836 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1837 store_reg(r1, tmp3);
1838 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1839 tcg_temp_free_i64(tmp2);
1840 tcg_temp_free_i64(tmp3);
1841 break;
1842 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1843 tmp2 = tcg_temp_new_i64();
1844 tmp32_1 = tcg_const_i32(r1);
1845 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1846 /* XXX possible optimization point */
1847 gen_op_calc_cc(s);
1848 gen_helper_slbg(cc_op, cc_op, tmp32_1, regs[r1], tmp2);
1849 set_cc_static(s);
1850 tcg_temp_free_i64(tmp2);
1851 tcg_temp_free_i32(tmp32_1);
1852 break;
1853 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1854 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1855 break;
1856 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1857 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1858 break;
1859 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1860 tmp2 = tcg_temp_new_i64();
1861 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1862 store_reg32_i64(r1, tmp2);
1863 tcg_temp_free_i64(tmp2);
1864 break;
1865 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1866 tmp2 = tcg_temp_new_i64();
1867 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1868 store_reg32_i64(r1, tmp2);
1869 tcg_temp_free_i64(tmp2);
1870 break;
1871 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1872 tmp2 = tcg_temp_new_i64();
1873 tmp3 = load_reg((r1 + 1) & 15);
1874 tcg_gen_ext32u_i64(tmp3, tmp3);
1875 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1876 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1877 store_reg32_i64((r1 + 1) & 15, tmp2);
1878 tcg_gen_shri_i64(tmp2, tmp2, 32);
1879 store_reg32_i64(r1, tmp2);
1880 tcg_temp_free_i64(tmp2);
1881 tcg_temp_free_i64(tmp3);
1882 break;
1883 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1884 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1885 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1886 tmp = load_reg(r1);
1887 tmp2 = tcg_temp_new_i64();
1888 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1889 tmp3 = load_reg((r1 + 1) & 15);
1890 tcg_gen_ext32u_i64(tmp2, tmp2);
1891 tcg_gen_ext32u_i64(tmp3, tmp3);
1892 tcg_gen_shli_i64(tmp, tmp, 32);
1893 tcg_gen_or_i64(tmp, tmp, tmp3);
1895 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1896 tcg_gen_div_i64(tmp, tmp, tmp2);
1897 store_reg32_i64((r1 + 1) & 15, tmp);
1898 store_reg32_i64(r1, tmp3);
1899 tcg_temp_free_i64(tmp);
1900 tcg_temp_free_i64(tmp2);
1901 tcg_temp_free_i64(tmp3);
1902 break;
1903 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1904 tmp2 = tcg_temp_new_i64();
1905 tmp32_1 = load_reg32(r1);
1906 tmp32_2 = tcg_temp_new_i32();
1907 tmp32_3 = tcg_temp_new_i32();
1908 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1909 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1910 /* XXX possible optimization point */
1911 gen_op_calc_cc(s);
1912 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1913 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1914 store_reg32(r1, tmp32_3);
1915 tcg_temp_free_i64(tmp2);
1916 tcg_temp_free_i32(tmp32_1);
1917 tcg_temp_free_i32(tmp32_2);
1918 tcg_temp_free_i32(tmp32_3);
1919 break;
1920 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1921 tmp2 = tcg_temp_new_i64();
1922 tmp32_1 = tcg_const_i32(r1);
1923 tmp32_2 = tcg_temp_new_i32();
1924 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1925 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1926 /* XXX possible optimization point */
1927 gen_op_calc_cc(s);
1928 gen_helper_slb(cc_op, cc_op, tmp32_1, tmp32_2);
1929 set_cc_static(s);
1930 tcg_temp_free_i64(tmp2);
1931 tcg_temp_free_i32(tmp32_1);
1932 tcg_temp_free_i32(tmp32_2);
1933 break;
1934 default:
1935 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1936 gen_illegal_opcode(s, 3);
1937 break;
1939 tcg_temp_free_i64(addr);
1942 #ifndef CONFIG_USER_ONLY
1943 static void disas_e5(DisasContext* s, uint64_t insn)
1945 TCGv_i64 tmp, tmp2;
1946 int op = (insn >> 32) & 0xff;
1948 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1949 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1951 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1952 switch (op) {
1953 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1954 /* Test Protection */
1955 potential_page_fault(s);
1956 gen_helper_tprot(cc_op, tmp, tmp2);
1957 set_cc_static(s);
1958 break;
1959 default:
1960 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1961 gen_illegal_opcode(s, 3);
1962 break;
1965 tcg_temp_free_i64(tmp);
1966 tcg_temp_free_i64(tmp2);
1968 #endif
1970 static void disas_eb(DisasContext *s, int op, int r1, int r3, int b2, int d2)
1972 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1973 TCGv_i32 tmp32_1, tmp32_2;
1974 int i, stm_len;
1975 int ilc = 3;
1977 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1978 op, r1, r3, b2, d2);
1979 switch (op) {
1980 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1981 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1982 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1983 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1984 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1985 if (b2) {
1986 tmp = get_address(s, 0, b2, d2);
1987 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1988 } else {
1989 tmp = tcg_const_i64(d2 & 0x3f);
1991 switch (op) {
1992 case 0xc:
1993 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1994 break;
1995 case 0xd:
1996 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1997 break;
1998 case 0xa:
1999 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
2000 break;
2001 case 0xb:
2002 tmp2 = tcg_temp_new_i64();
2003 tmp3 = tcg_temp_new_i64();
2004 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
2005 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
2006 /* override sign bit with source sign */
2007 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
2008 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
2009 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
2010 tcg_temp_free_i64(tmp2);
2011 tcg_temp_free_i64(tmp3);
2012 break;
2013 case 0x1c:
2014 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
2015 break;
2016 default:
2017 tcg_abort();
2018 break;
2020 if (op == 0xa) {
2021 set_cc_s64(s, regs[r1]);
2023 tcg_temp_free_i64(tmp);
2024 break;
2025 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2026 if (b2) {
2027 tmp = get_address(s, 0, b2, d2);
2028 tcg_gen_andi_i64(tmp, tmp, 0x3f);
2029 } else {
2030 tmp = tcg_const_i64(d2 & 0x3f);
2032 tmp32_1 = tcg_temp_new_i32();
2033 tmp32_2 = load_reg32(r3);
2034 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2035 switch (op) {
2036 case 0x1d:
2037 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
2038 break;
2039 default:
2040 tcg_abort();
2041 break;
2043 store_reg32(r1, tmp32_1);
2044 tcg_temp_free_i64(tmp);
2045 tcg_temp_free_i32(tmp32_1);
2046 tcg_temp_free_i32(tmp32_2);
2047 break;
2048 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2049 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2050 stm_len = 8;
2051 goto do_mh;
2052 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2053 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2054 stm_len = 4;
2055 do_mh:
2056 /* Apparently, unrolling lmg/stmg of any size gains performance -
2057 even for very long ones... */
2058 tmp = get_address(s, 0, b2, d2);
2059 tmp3 = tcg_const_i64(stm_len);
2060 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
2061 for (i = r1;; i = (i + 1) % 16) {
2062 switch (op) {
2063 case 0x4:
2064 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
2065 break;
2066 case 0x96:
2067 tmp2 = tcg_temp_new_i64();
2068 #if HOST_LONG_BITS == 32
2069 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2070 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
2071 #else
2072 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2073 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
2074 tcg_gen_ext32u_i64(regs[i], regs[i]);
2075 tcg_gen_or_i64(regs[i], regs[i], tmp2);
2076 #endif
2077 tcg_temp_free_i64(tmp2);
2078 break;
2079 case 0x24:
2080 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
2081 break;
2082 case 0x26:
2083 tmp2 = tcg_temp_new_i64();
2084 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
2085 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2086 tcg_temp_free_i64(tmp2);
2087 break;
2088 default:
2089 tcg_abort();
2091 if (i == r3) {
2092 break;
2094 tcg_gen_add_i64(tmp, tmp, tmp3);
2096 tcg_temp_free_i64(tmp);
2097 tcg_temp_free_i64(tmp3);
2098 tcg_temp_free_i64(tmp4);
2099 break;
2100 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2101 tmp = get_address(s, 0, b2, d2);
2102 tmp32_1 = tcg_const_i32(r1);
2103 tmp32_2 = tcg_const_i32(r3);
2104 potential_page_fault(s);
2105 gen_helper_stcmh(tmp32_1, tmp, tmp32_2);
2106 tcg_temp_free_i64(tmp);
2107 tcg_temp_free_i32(tmp32_1);
2108 tcg_temp_free_i32(tmp32_2);
2109 break;
2110 #ifndef CONFIG_USER_ONLY
2111 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2112 /* Load Control */
2113 check_privileged(s, ilc);
2114 tmp = get_address(s, 0, b2, d2);
2115 tmp32_1 = tcg_const_i32(r1);
2116 tmp32_2 = tcg_const_i32(r3);
2117 potential_page_fault(s);
2118 gen_helper_lctlg(tmp32_1, tmp, tmp32_2);
2119 tcg_temp_free_i64(tmp);
2120 tcg_temp_free_i32(tmp32_1);
2121 tcg_temp_free_i32(tmp32_2);
2122 break;
2123 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2124 /* Store Control */
2125 check_privileged(s, ilc);
2126 tmp = get_address(s, 0, b2, d2);
2127 tmp32_1 = tcg_const_i32(r1);
2128 tmp32_2 = tcg_const_i32(r3);
2129 potential_page_fault(s);
2130 gen_helper_stctg(tmp32_1, tmp, tmp32_2);
2131 tcg_temp_free_i64(tmp);
2132 tcg_temp_free_i32(tmp32_1);
2133 tcg_temp_free_i32(tmp32_2);
2134 break;
2135 #endif
2136 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2137 tmp = get_address(s, 0, b2, d2);
2138 tmp32_1 = tcg_const_i32(r1);
2139 tmp32_2 = tcg_const_i32(r3);
2140 potential_page_fault(s);
2141 /* XXX rewrite in tcg */
2142 gen_helper_csg(cc_op, tmp32_1, tmp, tmp32_2);
2143 set_cc_static(s);
2144 tcg_temp_free_i64(tmp);
2145 tcg_temp_free_i32(tmp32_1);
2146 tcg_temp_free_i32(tmp32_2);
2147 break;
2148 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2149 tmp = get_address(s, 0, b2, d2);
2150 tmp32_1 = tcg_const_i32(r1);
2151 tmp32_2 = tcg_const_i32(r3);
2152 potential_page_fault(s);
2153 /* XXX rewrite in tcg */
2154 gen_helper_cdsg(cc_op, tmp32_1, tmp, tmp32_2);
2155 set_cc_static(s);
2156 tcg_temp_free_i64(tmp);
2157 tcg_temp_free_i32(tmp32_1);
2158 tcg_temp_free_i32(tmp32_2);
2159 break;
2160 case 0x51: /* TMY D1(B1),I2 [SIY] */
2161 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2162 tmp2 = tcg_const_i64((r1 << 4) | r3);
2163 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
2164 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2165 that incurs less conversions */
2166 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
2167 tcg_temp_free_i64(tmp);
2168 tcg_temp_free_i64(tmp2);
2169 break;
2170 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2171 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2172 tmp2 = tcg_const_i64((r1 << 4) | r3);
2173 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
2174 tcg_temp_free_i64(tmp);
2175 tcg_temp_free_i64(tmp2);
2176 break;
2177 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2178 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
2179 tmp = tcg_temp_new_i64();
2180 tmp32_1 = tcg_temp_new_i32();
2181 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
2182 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2183 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
2184 tcg_temp_free_i64(tmp);
2185 tcg_temp_free_i64(tmp3);
2186 tcg_temp_free_i32(tmp32_1);
2187 break;
2188 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2189 tmp = get_address(s, 0, b2, d2);
2190 tmp32_1 = tcg_const_i32(r1);
2191 tmp32_2 = tcg_const_i32(r3);
2192 potential_page_fault(s);
2193 /* XXX split CC calculation out */
2194 gen_helper_icmh(cc_op, tmp32_1, tmp, tmp32_2);
2195 set_cc_static(s);
2196 tcg_temp_free_i64(tmp);
2197 tcg_temp_free_i32(tmp32_1);
2198 tcg_temp_free_i32(tmp32_2);
2199 break;
2200 default:
2201 LOG_DISAS("illegal eb operation 0x%x\n", op);
2202 gen_illegal_opcode(s, ilc);
2203 break;
2207 static void disas_ed(DisasContext *s, int op, int r1, int x2, int b2, int d2,
2208 int r1b)
2210 TCGv_i32 tmp_r1, tmp32;
2211 TCGv_i64 addr, tmp;
2212 addr = get_address(s, x2, b2, d2);
2213 tmp_r1 = tcg_const_i32(r1);
2214 switch (op) {
2215 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2216 potential_page_fault(s);
2217 gen_helper_lxdb(tmp_r1, addr);
2218 break;
2219 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2220 tmp = tcg_temp_new_i64();
2221 tmp32 = load_freg32(r1);
2222 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2223 set_cc_cmp_f32_i64(s, tmp32, tmp);
2224 tcg_temp_free_i64(tmp);
2225 tcg_temp_free_i32(tmp32);
2226 break;
2227 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2228 tmp = tcg_temp_new_i64();
2229 tmp32 = tcg_temp_new_i32();
2230 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2231 tcg_gen_trunc_i64_i32(tmp32, tmp);
2232 gen_helper_aeb(tmp_r1, tmp32);
2233 tcg_temp_free_i64(tmp);
2234 tcg_temp_free_i32(tmp32);
2236 tmp32 = load_freg32(r1);
2237 set_cc_nz_f32(s, tmp32);
2238 tcg_temp_free_i32(tmp32);
2239 break;
2240 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2241 tmp = tcg_temp_new_i64();
2242 tmp32 = tcg_temp_new_i32();
2243 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2244 tcg_gen_trunc_i64_i32(tmp32, tmp);
2245 gen_helper_seb(tmp_r1, tmp32);
2246 tcg_temp_free_i64(tmp);
2247 tcg_temp_free_i32(tmp32);
2249 tmp32 = load_freg32(r1);
2250 set_cc_nz_f32(s, tmp32);
2251 tcg_temp_free_i32(tmp32);
2252 break;
2253 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2254 tmp = tcg_temp_new_i64();
2255 tmp32 = tcg_temp_new_i32();
2256 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2257 tcg_gen_trunc_i64_i32(tmp32, tmp);
2258 gen_helper_deb(tmp_r1, tmp32);
2259 tcg_temp_free_i64(tmp);
2260 tcg_temp_free_i32(tmp32);
2261 break;
2262 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2263 potential_page_fault(s);
2264 gen_helper_tceb(cc_op, tmp_r1, addr);
2265 set_cc_static(s);
2266 break;
2267 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2268 potential_page_fault(s);
2269 gen_helper_tcdb(cc_op, tmp_r1, addr);
2270 set_cc_static(s);
2271 break;
2272 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2273 potential_page_fault(s);
2274 gen_helper_tcxb(cc_op, tmp_r1, addr);
2275 set_cc_static(s);
2276 break;
2277 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2278 tmp = tcg_temp_new_i64();
2279 tmp32 = tcg_temp_new_i32();
2280 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2281 tcg_gen_trunc_i64_i32(tmp32, tmp);
2282 gen_helper_meeb(tmp_r1, tmp32);
2283 tcg_temp_free_i64(tmp);
2284 tcg_temp_free_i32(tmp32);
2285 break;
2286 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2287 potential_page_fault(s);
2288 gen_helper_cdb(cc_op, tmp_r1, addr);
2289 set_cc_static(s);
2290 break;
2291 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2292 potential_page_fault(s);
2293 gen_helper_adb(cc_op, tmp_r1, addr);
2294 set_cc_static(s);
2295 break;
2296 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2297 potential_page_fault(s);
2298 gen_helper_sdb(cc_op, tmp_r1, addr);
2299 set_cc_static(s);
2300 break;
2301 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2302 potential_page_fault(s);
2303 gen_helper_mdb(tmp_r1, addr);
2304 break;
2305 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2306 potential_page_fault(s);
2307 gen_helper_ddb(tmp_r1, addr);
2308 break;
2309 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2310 /* for RXF insns, r1 is R3 and r1b is R1 */
2311 tmp32 = tcg_const_i32(r1b);
2312 potential_page_fault(s);
2313 gen_helper_madb(tmp32, addr, tmp_r1);
2314 tcg_temp_free_i32(tmp32);
2315 break;
2316 default:
2317 LOG_DISAS("illegal ed operation 0x%x\n", op);
2318 gen_illegal_opcode(s, 3);
2319 return;
2321 tcg_temp_free_i32(tmp_r1);
2322 tcg_temp_free_i64(addr);
2325 static void disas_a5(DisasContext *s, int op, int r1, int i2)
2327 TCGv_i64 tmp, tmp2;
2328 TCGv_i32 tmp32;
2329 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2330 switch (op) {
2331 case 0x0: /* IIHH R1,I2 [RI] */
2332 tmp = tcg_const_i64(i2);
2333 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2334 break;
2335 case 0x1: /* IIHL R1,I2 [RI] */
2336 tmp = tcg_const_i64(i2);
2337 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2338 break;
2339 case 0x2: /* IILH R1,I2 [RI] */
2340 tmp = tcg_const_i64(i2);
2341 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2342 break;
2343 case 0x3: /* IILL R1,I2 [RI] */
2344 tmp = tcg_const_i64(i2);
2345 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2346 break;
2347 case 0x4: /* NIHH R1,I2 [RI] */
2348 case 0x8: /* OIHH R1,I2 [RI] */
2349 tmp = load_reg(r1);
2350 tmp32 = tcg_temp_new_i32();
2351 switch (op) {
2352 case 0x4:
2353 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2354 | 0x0000ffffffffffffULL);
2355 tcg_gen_and_i64(tmp, tmp, tmp2);
2356 break;
2357 case 0x8:
2358 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2359 tcg_gen_or_i64(tmp, tmp, tmp2);
2360 break;
2361 default:
2362 tcg_abort();
2364 store_reg(r1, tmp);
2365 tcg_gen_shri_i64(tmp2, tmp, 48);
2366 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2367 set_cc_nz_u32(s, tmp32);
2368 tcg_temp_free_i64(tmp2);
2369 tcg_temp_free_i32(tmp32);
2370 break;
2371 case 0x5: /* NIHL R1,I2 [RI] */
2372 case 0x9: /* OIHL R1,I2 [RI] */
2373 tmp = load_reg(r1);
2374 tmp32 = tcg_temp_new_i32();
2375 switch (op) {
2376 case 0x5:
2377 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2378 | 0xffff0000ffffffffULL);
2379 tcg_gen_and_i64(tmp, tmp, tmp2);
2380 break;
2381 case 0x9:
2382 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2383 tcg_gen_or_i64(tmp, tmp, tmp2);
2384 break;
2385 default:
2386 tcg_abort();
2388 store_reg(r1, tmp);
2389 tcg_gen_shri_i64(tmp2, tmp, 32);
2390 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2391 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2392 set_cc_nz_u32(s, tmp32);
2393 tcg_temp_free_i64(tmp2);
2394 tcg_temp_free_i32(tmp32);
2395 break;
2396 case 0x6: /* NILH R1,I2 [RI] */
2397 case 0xa: /* OILH R1,I2 [RI] */
2398 tmp = load_reg(r1);
2399 tmp32 = tcg_temp_new_i32();
2400 switch (op) {
2401 case 0x6:
2402 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2403 | 0xffffffff0000ffffULL);
2404 tcg_gen_and_i64(tmp, tmp, tmp2);
2405 break;
2406 case 0xa:
2407 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2408 tcg_gen_or_i64(tmp, tmp, tmp2);
2409 break;
2410 default:
2411 tcg_abort();
2413 store_reg(r1, tmp);
2414 tcg_gen_shri_i64(tmp, tmp, 16);
2415 tcg_gen_trunc_i64_i32(tmp32, tmp);
2416 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2417 set_cc_nz_u32(s, tmp32);
2418 tcg_temp_free_i64(tmp2);
2419 tcg_temp_free_i32(tmp32);
2420 break;
2421 case 0x7: /* NILL R1,I2 [RI] */
2422 case 0xb: /* OILL R1,I2 [RI] */
2423 tmp = load_reg(r1);
2424 tmp32 = tcg_temp_new_i32();
2425 switch (op) {
2426 case 0x7:
2427 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2428 tcg_gen_and_i64(tmp, tmp, tmp2);
2429 break;
2430 case 0xb:
2431 tmp2 = tcg_const_i64(i2);
2432 tcg_gen_or_i64(tmp, tmp, tmp2);
2433 break;
2434 default:
2435 tcg_abort();
2437 store_reg(r1, tmp);
2438 tcg_gen_trunc_i64_i32(tmp32, tmp);
2439 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2440 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2441 tcg_temp_free_i64(tmp2);
2442 tcg_temp_free_i32(tmp32);
2443 break;
2444 case 0xc: /* LLIHH R1,I2 [RI] */
2445 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2446 store_reg(r1, tmp);
2447 break;
2448 case 0xd: /* LLIHL R1,I2 [RI] */
2449 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2450 store_reg(r1, tmp);
2451 break;
2452 case 0xe: /* LLILH R1,I2 [RI] */
2453 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2454 store_reg(r1, tmp);
2455 break;
2456 case 0xf: /* LLILL R1,I2 [RI] */
2457 tmp = tcg_const_i64(i2);
2458 store_reg(r1, tmp);
2459 break;
2460 default:
2461 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2462 gen_illegal_opcode(s, 2);
2463 return;
2465 tcg_temp_free_i64(tmp);
2468 static void disas_a7(DisasContext *s, int op, int r1, int i2)
2470 TCGv_i64 tmp, tmp2;
2471 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2472 int l1;
2474 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2475 switch (op) {
2476 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2477 case 0x1: /* TMLL or TML R1,I2 [RI] */
2478 case 0x2: /* TMHH R1,I2 [RI] */
2479 case 0x3: /* TMHL R1,I2 [RI] */
2480 tmp = load_reg(r1);
2481 tmp2 = tcg_const_i64((uint16_t)i2);
2482 switch (op) {
2483 case 0x0:
2484 tcg_gen_shri_i64(tmp, tmp, 16);
2485 break;
2486 case 0x1:
2487 break;
2488 case 0x2:
2489 tcg_gen_shri_i64(tmp, tmp, 48);
2490 break;
2491 case 0x3:
2492 tcg_gen_shri_i64(tmp, tmp, 32);
2493 break;
2495 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2496 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2497 tcg_temp_free_i64(tmp);
2498 tcg_temp_free_i64(tmp2);
2499 break;
2500 case 0x4: /* brc m1, i2 */
2501 gen_brc(r1, s, i2 * 2LL);
2502 return;
2503 case 0x5: /* BRAS R1,I2 [RI] */
2504 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2505 store_reg(r1, tmp);
2506 tcg_temp_free_i64(tmp);
2507 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2508 s->is_jmp = DISAS_TB_JUMP;
2509 break;
2510 case 0x6: /* BRCT R1,I2 [RI] */
2511 tmp32_1 = load_reg32(r1);
2512 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2513 store_reg32(r1, tmp32_1);
2514 gen_update_cc_op(s);
2515 l1 = gen_new_label();
2516 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2517 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2518 gen_set_label(l1);
2519 gen_goto_tb(s, 1, s->pc + 4);
2520 s->is_jmp = DISAS_TB_JUMP;
2521 tcg_temp_free_i32(tmp32_1);
2522 break;
2523 case 0x7: /* BRCTG R1,I2 [RI] */
2524 tmp = load_reg(r1);
2525 tcg_gen_subi_i64(tmp, tmp, 1);
2526 store_reg(r1, tmp);
2527 gen_update_cc_op(s);
2528 l1 = gen_new_label();
2529 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2530 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2531 gen_set_label(l1);
2532 gen_goto_tb(s, 1, s->pc + 4);
2533 s->is_jmp = DISAS_TB_JUMP;
2534 tcg_temp_free_i64(tmp);
2535 break;
2536 case 0x8: /* lhi r1, i2 */
2537 tmp32_1 = tcg_const_i32(i2);
2538 store_reg32(r1, tmp32_1);
2539 tcg_temp_free_i32(tmp32_1);
2540 break;
2541 case 0x9: /* lghi r1, i2 */
2542 tmp = tcg_const_i64(i2);
2543 store_reg(r1, tmp);
2544 tcg_temp_free_i64(tmp);
2545 break;
2546 case 0xa: /* AHI R1,I2 [RI] */
2547 tmp32_1 = load_reg32(r1);
2548 tmp32_2 = tcg_temp_new_i32();
2549 tmp32_3 = tcg_const_i32(i2);
2551 if (i2 < 0) {
2552 tcg_gen_subi_i32(tmp32_2, tmp32_1, -i2);
2553 } else {
2554 tcg_gen_add_i32(tmp32_2, tmp32_1, tmp32_3);
2557 store_reg32(r1, tmp32_2);
2558 set_cc_add32(s, tmp32_1, tmp32_3, tmp32_2);
2559 tcg_temp_free_i32(tmp32_1);
2560 tcg_temp_free_i32(tmp32_2);
2561 tcg_temp_free_i32(tmp32_3);
2562 break;
2563 case 0xb: /* aghi r1, i2 */
2564 tmp = load_reg(r1);
2565 tmp2 = tcg_const_i64(i2);
2567 if (i2 < 0) {
2568 tcg_gen_subi_i64(regs[r1], tmp, -i2);
2569 } else {
2570 tcg_gen_add_i64(regs[r1], tmp, tmp2);
2572 set_cc_add64(s, tmp, tmp2, regs[r1]);
2573 tcg_temp_free_i64(tmp);
2574 tcg_temp_free_i64(tmp2);
2575 break;
2576 case 0xc: /* MHI R1,I2 [RI] */
2577 tmp32_1 = load_reg32(r1);
2578 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2579 store_reg32(r1, tmp32_1);
2580 tcg_temp_free_i32(tmp32_1);
2581 break;
2582 case 0xd: /* MGHI R1,I2 [RI] */
2583 tmp = load_reg(r1);
2584 tcg_gen_muli_i64(tmp, tmp, i2);
2585 store_reg(r1, tmp);
2586 tcg_temp_free_i64(tmp);
2587 break;
2588 case 0xe: /* CHI R1,I2 [RI] */
2589 tmp32_1 = load_reg32(r1);
2590 cmp_s32c(s, tmp32_1, i2);
2591 tcg_temp_free_i32(tmp32_1);
2592 break;
2593 case 0xf: /* CGHI R1,I2 [RI] */
2594 tmp = load_reg(r1);
2595 cmp_s64c(s, tmp, i2);
2596 tcg_temp_free_i64(tmp);
2597 break;
2598 default:
2599 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2600 gen_illegal_opcode(s, 2);
2601 return;
2605 static void disas_b2(DisasContext *s, int op, uint32_t insn)
2607 TCGv_i64 tmp, tmp2, tmp3;
2608 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2609 int r1, r2;
2610 int ilc = 2;
2611 #ifndef CONFIG_USER_ONLY
2612 int r3, d2, b2;
2613 #endif
2615 r1 = (insn >> 4) & 0xf;
2616 r2 = insn & 0xf;
2618 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2620 switch (op) {
2621 case 0x22: /* IPM R1 [RRE] */
2622 tmp32_1 = tcg_const_i32(r1);
2623 gen_op_calc_cc(s);
2624 gen_helper_ipm(cc_op, tmp32_1);
2625 tcg_temp_free_i32(tmp32_1);
2626 break;
2627 case 0x41: /* CKSM R1,R2 [RRE] */
2628 tmp32_1 = tcg_const_i32(r1);
2629 tmp32_2 = tcg_const_i32(r2);
2630 potential_page_fault(s);
2631 gen_helper_cksm(tmp32_1, tmp32_2);
2632 tcg_temp_free_i32(tmp32_1);
2633 tcg_temp_free_i32(tmp32_2);
2634 gen_op_movi_cc(s, 0);
2635 break;
2636 case 0x4e: /* SAR R1,R2 [RRE] */
2637 tmp32_1 = load_reg32(r2);
2638 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r1]));
2639 tcg_temp_free_i32(tmp32_1);
2640 break;
2641 case 0x4f: /* EAR R1,R2 [RRE] */
2642 tmp32_1 = tcg_temp_new_i32();
2643 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r2]));
2644 store_reg32(r1, tmp32_1);
2645 tcg_temp_free_i32(tmp32_1);
2646 break;
2647 case 0x52: /* MSR R1,R2 [RRE] */
2648 tmp32_1 = load_reg32(r1);
2649 tmp32_2 = load_reg32(r2);
2650 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2651 store_reg32(r1, tmp32_1);
2652 tcg_temp_free_i32(tmp32_1);
2653 tcg_temp_free_i32(tmp32_2);
2654 break;
2655 case 0x54: /* MVPG R1,R2 [RRE] */
2656 tmp = load_reg(0);
2657 tmp2 = load_reg(r1);
2658 tmp3 = load_reg(r2);
2659 potential_page_fault(s);
2660 gen_helper_mvpg(tmp, tmp2, tmp3);
2661 tcg_temp_free_i64(tmp);
2662 tcg_temp_free_i64(tmp2);
2663 tcg_temp_free_i64(tmp3);
2664 /* XXX check CCO bit and set CC accordingly */
2665 gen_op_movi_cc(s, 0);
2666 break;
2667 case 0x55: /* MVST R1,R2 [RRE] */
2668 tmp32_1 = load_reg32(0);
2669 tmp32_2 = tcg_const_i32(r1);
2670 tmp32_3 = tcg_const_i32(r2);
2671 potential_page_fault(s);
2672 gen_helper_mvst(tmp32_1, tmp32_2, tmp32_3);
2673 tcg_temp_free_i32(tmp32_1);
2674 tcg_temp_free_i32(tmp32_2);
2675 tcg_temp_free_i32(tmp32_3);
2676 gen_op_movi_cc(s, 1);
2677 break;
2678 case 0x5d: /* CLST R1,R2 [RRE] */
2679 tmp32_1 = load_reg32(0);
2680 tmp32_2 = tcg_const_i32(r1);
2681 tmp32_3 = tcg_const_i32(r2);
2682 potential_page_fault(s);
2683 gen_helper_clst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2684 set_cc_static(s);
2685 tcg_temp_free_i32(tmp32_1);
2686 tcg_temp_free_i32(tmp32_2);
2687 tcg_temp_free_i32(tmp32_3);
2688 break;
2689 case 0x5e: /* SRST R1,R2 [RRE] */
2690 tmp32_1 = load_reg32(0);
2691 tmp32_2 = tcg_const_i32(r1);
2692 tmp32_3 = tcg_const_i32(r2);
2693 potential_page_fault(s);
2694 gen_helper_srst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2695 set_cc_static(s);
2696 tcg_temp_free_i32(tmp32_1);
2697 tcg_temp_free_i32(tmp32_2);
2698 tcg_temp_free_i32(tmp32_3);
2699 break;
2701 #ifndef CONFIG_USER_ONLY
2702 case 0x02: /* STIDP D2(B2) [S] */
2703 /* Store CPU ID */
2704 check_privileged(s, ilc);
2705 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2706 tmp = get_address(s, 0, b2, d2);
2707 potential_page_fault(s);
2708 gen_helper_stidp(tmp);
2709 tcg_temp_free_i64(tmp);
2710 break;
2711 case 0x04: /* SCK D2(B2) [S] */
2712 /* Set Clock */
2713 check_privileged(s, ilc);
2714 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2715 tmp = get_address(s, 0, b2, d2);
2716 potential_page_fault(s);
2717 gen_helper_sck(cc_op, tmp);
2718 set_cc_static(s);
2719 tcg_temp_free_i64(tmp);
2720 break;
2721 case 0x05: /* STCK D2(B2) [S] */
2722 /* Store Clock */
2723 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2724 tmp = get_address(s, 0, b2, d2);
2725 potential_page_fault(s);
2726 gen_helper_stck(cc_op, tmp);
2727 set_cc_static(s);
2728 tcg_temp_free_i64(tmp);
2729 break;
2730 case 0x06: /* SCKC D2(B2) [S] */
2731 /* Set Clock Comparator */
2732 check_privileged(s, ilc);
2733 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2734 tmp = get_address(s, 0, b2, d2);
2735 potential_page_fault(s);
2736 gen_helper_sckc(tmp);
2737 tcg_temp_free_i64(tmp);
2738 break;
2739 case 0x07: /* STCKC D2(B2) [S] */
2740 /* Store Clock Comparator */
2741 check_privileged(s, ilc);
2742 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2743 tmp = get_address(s, 0, b2, d2);
2744 potential_page_fault(s);
2745 gen_helper_stckc(tmp);
2746 tcg_temp_free_i64(tmp);
2747 break;
2748 case 0x08: /* SPT D2(B2) [S] */
2749 /* Set CPU Timer */
2750 check_privileged(s, ilc);
2751 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2752 tmp = get_address(s, 0, b2, d2);
2753 potential_page_fault(s);
2754 gen_helper_spt(tmp);
2755 tcg_temp_free_i64(tmp);
2756 break;
2757 case 0x09: /* STPT D2(B2) [S] */
2758 /* Store CPU Timer */
2759 check_privileged(s, ilc);
2760 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2761 tmp = get_address(s, 0, b2, d2);
2762 potential_page_fault(s);
2763 gen_helper_stpt(tmp);
2764 tcg_temp_free_i64(tmp);
2765 break;
2766 case 0x0a: /* SPKA D2(B2) [S] */
2767 /* Set PSW Key from Address */
2768 check_privileged(s, ilc);
2769 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2770 tmp = get_address(s, 0, b2, d2);
2771 tmp2 = tcg_temp_new_i64();
2772 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2773 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2774 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2775 tcg_temp_free_i64(tmp2);
2776 tcg_temp_free_i64(tmp);
2777 break;
2778 case 0x0d: /* PTLB [S] */
2779 /* Purge TLB */
2780 check_privileged(s, ilc);
2781 gen_helper_ptlb();
2782 break;
2783 case 0x10: /* SPX D2(B2) [S] */
2784 /* Set Prefix Register */
2785 check_privileged(s, ilc);
2786 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2787 tmp = get_address(s, 0, b2, d2);
2788 potential_page_fault(s);
2789 gen_helper_spx(tmp);
2790 tcg_temp_free_i64(tmp);
2791 break;
2792 case 0x11: /* STPX D2(B2) [S] */
2793 /* Store Prefix */
2794 check_privileged(s, ilc);
2795 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2796 tmp = get_address(s, 0, b2, d2);
2797 tmp2 = tcg_temp_new_i64();
2798 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUState, psa));
2799 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2800 tcg_temp_free_i64(tmp);
2801 tcg_temp_free_i64(tmp2);
2802 break;
2803 case 0x12: /* STAP D2(B2) [S] */
2804 /* Store CPU Address */
2805 check_privileged(s, ilc);
2806 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2807 tmp = get_address(s, 0, b2, d2);
2808 tmp2 = tcg_temp_new_i64();
2809 tmp32_1 = tcg_temp_new_i32();
2810 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, cpu_num));
2811 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2812 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2813 tcg_temp_free_i64(tmp);
2814 tcg_temp_free_i64(tmp2);
2815 tcg_temp_free_i32(tmp32_1);
2816 break;
2817 case 0x21: /* IPTE R1,R2 [RRE] */
2818 /* Invalidate PTE */
2819 check_privileged(s, ilc);
2820 r1 = (insn >> 4) & 0xf;
2821 r2 = insn & 0xf;
2822 tmp = load_reg(r1);
2823 tmp2 = load_reg(r2);
2824 gen_helper_ipte(tmp, tmp2);
2825 tcg_temp_free_i64(tmp);
2826 tcg_temp_free_i64(tmp2);
2827 break;
2828 case 0x29: /* ISKE R1,R2 [RRE] */
2829 /* Insert Storage Key Extended */
2830 check_privileged(s, ilc);
2831 r1 = (insn >> 4) & 0xf;
2832 r2 = insn & 0xf;
2833 tmp = load_reg(r2);
2834 tmp2 = tcg_temp_new_i64();
2835 gen_helper_iske(tmp2, tmp);
2836 store_reg(r1, tmp2);
2837 tcg_temp_free_i64(tmp);
2838 tcg_temp_free_i64(tmp2);
2839 break;
2840 case 0x2a: /* RRBE R1,R2 [RRE] */
2841 /* Set Storage Key Extended */
2842 check_privileged(s, ilc);
2843 r1 = (insn >> 4) & 0xf;
2844 r2 = insn & 0xf;
2845 tmp32_1 = load_reg32(r1);
2846 tmp = load_reg(r2);
2847 gen_helper_rrbe(cc_op, tmp32_1, tmp);
2848 set_cc_static(s);
2849 tcg_temp_free_i32(tmp32_1);
2850 tcg_temp_free_i64(tmp);
2851 break;
2852 case 0x2b: /* SSKE R1,R2 [RRE] */
2853 /* Set Storage Key Extended */
2854 check_privileged(s, ilc);
2855 r1 = (insn >> 4) & 0xf;
2856 r2 = insn & 0xf;
2857 tmp32_1 = load_reg32(r1);
2858 tmp = load_reg(r2);
2859 gen_helper_sske(tmp32_1, tmp);
2860 tcg_temp_free_i32(tmp32_1);
2861 tcg_temp_free_i64(tmp);
2862 break;
2863 case 0x34: /* STCH ? */
2864 /* Store Subchannel */
2865 check_privileged(s, ilc);
2866 gen_op_movi_cc(s, 3);
2867 break;
2868 case 0x46: /* STURA R1,R2 [RRE] */
2869 /* Store Using Real Address */
2870 check_privileged(s, ilc);
2871 r1 = (insn >> 4) & 0xf;
2872 r2 = insn & 0xf;
2873 tmp32_1 = load_reg32(r1);
2874 tmp = load_reg(r2);
2875 potential_page_fault(s);
2876 gen_helper_stura(tmp, tmp32_1);
2877 tcg_temp_free_i32(tmp32_1);
2878 tcg_temp_free_i64(tmp);
2879 break;
2880 case 0x50: /* CSP R1,R2 [RRE] */
2881 /* Compare And Swap And Purge */
2882 check_privileged(s, ilc);
2883 r1 = (insn >> 4) & 0xf;
2884 r2 = insn & 0xf;
2885 tmp32_1 = tcg_const_i32(r1);
2886 tmp32_2 = tcg_const_i32(r2);
2887 gen_helper_csp(cc_op, tmp32_1, tmp32_2);
2888 set_cc_static(s);
2889 tcg_temp_free_i32(tmp32_1);
2890 tcg_temp_free_i32(tmp32_2);
2891 break;
2892 case 0x5f: /* CHSC ? */
2893 /* Channel Subsystem Call */
2894 check_privileged(s, ilc);
2895 gen_op_movi_cc(s, 3);
2896 break;
2897 case 0x78: /* STCKE D2(B2) [S] */
2898 /* Store Clock Extended */
2899 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2900 tmp = get_address(s, 0, b2, d2);
2901 potential_page_fault(s);
2902 gen_helper_stcke(cc_op, tmp);
2903 set_cc_static(s);
2904 tcg_temp_free_i64(tmp);
2905 break;
2906 case 0x79: /* SACF D2(B2) [S] */
2907 /* Store Clock Extended */
2908 check_privileged(s, ilc);
2909 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2910 tmp = get_address(s, 0, b2, d2);
2911 potential_page_fault(s);
2912 gen_helper_sacf(tmp);
2913 tcg_temp_free_i64(tmp);
2914 /* addressing mode has changed, so end the block */
2915 s->pc += ilc * 2;
2916 update_psw_addr(s);
2917 s->is_jmp = DISAS_EXCP;
2918 break;
2919 case 0x7d: /* STSI D2,(B2) [S] */
2920 check_privileged(s, ilc);
2921 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2922 tmp = get_address(s, 0, b2, d2);
2923 tmp32_1 = load_reg32(0);
2924 tmp32_2 = load_reg32(1);
2925 potential_page_fault(s);
2926 gen_helper_stsi(cc_op, tmp, tmp32_1, tmp32_2);
2927 set_cc_static(s);
2928 tcg_temp_free_i64(tmp);
2929 tcg_temp_free_i32(tmp32_1);
2930 tcg_temp_free_i32(tmp32_2);
2931 break;
2932 case 0x9d: /* LFPC D2(B2) [S] */
2933 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2934 tmp = get_address(s, 0, b2, d2);
2935 tmp2 = tcg_temp_new_i64();
2936 tmp32_1 = tcg_temp_new_i32();
2937 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2938 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2939 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
2940 tcg_temp_free_i64(tmp);
2941 tcg_temp_free_i64(tmp2);
2942 tcg_temp_free_i32(tmp32_1);
2943 break;
2944 case 0xb1: /* STFL D2(B2) [S] */
2945 /* Store Facility List (CPU features) at 200 */
2946 check_privileged(s, ilc);
2947 tmp2 = tcg_const_i64(0xc0000000);
2948 tmp = tcg_const_i64(200);
2949 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2950 tcg_temp_free_i64(tmp2);
2951 tcg_temp_free_i64(tmp);
2952 break;
2953 case 0xb2: /* LPSWE D2(B2) [S] */
2954 /* Load PSW Extended */
2955 check_privileged(s, ilc);
2956 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2957 tmp = get_address(s, 0, b2, d2);
2958 tmp2 = tcg_temp_new_i64();
2959 tmp3 = tcg_temp_new_i64();
2960 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2961 tcg_gen_addi_i64(tmp, tmp, 8);
2962 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2963 gen_helper_load_psw(tmp2, tmp3);
2964 /* we need to keep cc_op intact */
2965 s->is_jmp = DISAS_JUMP;
2966 tcg_temp_free_i64(tmp);
2967 break;
2968 case 0x20: /* SERVC R1,R2 [RRE] */
2969 /* SCLP Service call (PV hypercall) */
2970 check_privileged(s, ilc);
2971 potential_page_fault(s);
2972 tmp32_1 = load_reg32(r2);
2973 tmp = load_reg(r1);
2974 gen_helper_servc(cc_op, tmp32_1, tmp);
2975 set_cc_static(s);
2976 tcg_temp_free_i32(tmp32_1);
2977 tcg_temp_free_i64(tmp);
2978 break;
2979 #endif
2980 default:
2981 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2982 gen_illegal_opcode(s, ilc);
2983 break;
2987 static void disas_b3(DisasContext *s, int op, int m3, int r1, int r2)
2989 TCGv_i64 tmp;
2990 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2991 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
2992 #define FP_HELPER(i) \
2993 tmp32_1 = tcg_const_i32(r1); \
2994 tmp32_2 = tcg_const_i32(r2); \
2995 gen_helper_ ## i (tmp32_1, tmp32_2); \
2996 tcg_temp_free_i32(tmp32_1); \
2997 tcg_temp_free_i32(tmp32_2);
2999 #define FP_HELPER_CC(i) \
3000 tmp32_1 = tcg_const_i32(r1); \
3001 tmp32_2 = tcg_const_i32(r2); \
3002 gen_helper_ ## i (cc_op, tmp32_1, tmp32_2); \
3003 set_cc_static(s); \
3004 tcg_temp_free_i32(tmp32_1); \
3005 tcg_temp_free_i32(tmp32_2);
3007 switch (op) {
3008 case 0x0: /* LPEBR R1,R2 [RRE] */
3009 FP_HELPER_CC(lpebr);
3010 break;
3011 case 0x2: /* LTEBR R1,R2 [RRE] */
3012 FP_HELPER_CC(ltebr);
3013 break;
3014 case 0x3: /* LCEBR R1,R2 [RRE] */
3015 FP_HELPER_CC(lcebr);
3016 break;
3017 case 0x4: /* LDEBR R1,R2 [RRE] */
3018 FP_HELPER(ldebr);
3019 break;
3020 case 0x5: /* LXDBR R1,R2 [RRE] */
3021 FP_HELPER(lxdbr);
3022 break;
3023 case 0x9: /* CEBR R1,R2 [RRE] */
3024 FP_HELPER_CC(cebr);
3025 break;
3026 case 0xa: /* AEBR R1,R2 [RRE] */
3027 FP_HELPER_CC(aebr);
3028 break;
3029 case 0xb: /* SEBR R1,R2 [RRE] */
3030 FP_HELPER_CC(sebr);
3031 break;
3032 case 0xd: /* DEBR R1,R2 [RRE] */
3033 FP_HELPER(debr);
3034 break;
3035 case 0x10: /* LPDBR R1,R2 [RRE] */
3036 FP_HELPER_CC(lpdbr);
3037 break;
3038 case 0x12: /* LTDBR R1,R2 [RRE] */
3039 FP_HELPER_CC(ltdbr);
3040 break;
3041 case 0x13: /* LCDBR R1,R2 [RRE] */
3042 FP_HELPER_CC(lcdbr);
3043 break;
3044 case 0x15: /* SQBDR R1,R2 [RRE] */
3045 FP_HELPER(sqdbr);
3046 break;
3047 case 0x17: /* MEEBR R1,R2 [RRE] */
3048 FP_HELPER(meebr);
3049 break;
3050 case 0x19: /* CDBR R1,R2 [RRE] */
3051 FP_HELPER_CC(cdbr);
3052 break;
3053 case 0x1a: /* ADBR R1,R2 [RRE] */
3054 FP_HELPER_CC(adbr);
3055 break;
3056 case 0x1b: /* SDBR R1,R2 [RRE] */
3057 FP_HELPER_CC(sdbr);
3058 break;
3059 case 0x1c: /* MDBR R1,R2 [RRE] */
3060 FP_HELPER(mdbr);
3061 break;
3062 case 0x1d: /* DDBR R1,R2 [RRE] */
3063 FP_HELPER(ddbr);
3064 break;
3065 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3066 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3067 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3068 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3069 tmp32_1 = tcg_const_i32(m3);
3070 tmp32_2 = tcg_const_i32(r2);
3071 tmp32_3 = tcg_const_i32(r1);
3072 switch (op) {
3073 case 0xe:
3074 gen_helper_maebr(tmp32_1, tmp32_3, tmp32_2);
3075 break;
3076 case 0x1e:
3077 gen_helper_madbr(tmp32_1, tmp32_3, tmp32_2);
3078 break;
3079 case 0x1f:
3080 gen_helper_msdbr(tmp32_1, tmp32_3, tmp32_2);
3081 break;
3082 default:
3083 tcg_abort();
3085 tcg_temp_free_i32(tmp32_1);
3086 tcg_temp_free_i32(tmp32_2);
3087 tcg_temp_free_i32(tmp32_3);
3088 break;
3089 case 0x40: /* LPXBR R1,R2 [RRE] */
3090 FP_HELPER_CC(lpxbr);
3091 break;
3092 case 0x42: /* LTXBR R1,R2 [RRE] */
3093 FP_HELPER_CC(ltxbr);
3094 break;
3095 case 0x43: /* LCXBR R1,R2 [RRE] */
3096 FP_HELPER_CC(lcxbr);
3097 break;
3098 case 0x44: /* LEDBR R1,R2 [RRE] */
3099 FP_HELPER(ledbr);
3100 break;
3101 case 0x45: /* LDXBR R1,R2 [RRE] */
3102 FP_HELPER(ldxbr);
3103 break;
3104 case 0x46: /* LEXBR R1,R2 [RRE] */
3105 FP_HELPER(lexbr);
3106 break;
3107 case 0x49: /* CXBR R1,R2 [RRE] */
3108 FP_HELPER_CC(cxbr);
3109 break;
3110 case 0x4a: /* AXBR R1,R2 [RRE] */
3111 FP_HELPER_CC(axbr);
3112 break;
3113 case 0x4b: /* SXBR R1,R2 [RRE] */
3114 FP_HELPER_CC(sxbr);
3115 break;
3116 case 0x4c: /* MXBR R1,R2 [RRE] */
3117 FP_HELPER(mxbr);
3118 break;
3119 case 0x4d: /* DXBR R1,R2 [RRE] */
3120 FP_HELPER(dxbr);
3121 break;
3122 case 0x65: /* LXR R1,R2 [RRE] */
3123 tmp = load_freg(r2);
3124 store_freg(r1, tmp);
3125 tcg_temp_free_i64(tmp);
3126 tmp = load_freg(r2 + 2);
3127 store_freg(r1 + 2, tmp);
3128 tcg_temp_free_i64(tmp);
3129 break;
3130 case 0x74: /* LZER R1 [RRE] */
3131 tmp32_1 = tcg_const_i32(r1);
3132 gen_helper_lzer(tmp32_1);
3133 tcg_temp_free_i32(tmp32_1);
3134 break;
3135 case 0x75: /* LZDR R1 [RRE] */
3136 tmp32_1 = tcg_const_i32(r1);
3137 gen_helper_lzdr(tmp32_1);
3138 tcg_temp_free_i32(tmp32_1);
3139 break;
3140 case 0x76: /* LZXR R1 [RRE] */
3141 tmp32_1 = tcg_const_i32(r1);
3142 gen_helper_lzxr(tmp32_1);
3143 tcg_temp_free_i32(tmp32_1);
3144 break;
3145 case 0x84: /* SFPC R1 [RRE] */
3146 tmp32_1 = load_reg32(r1);
3147 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3148 tcg_temp_free_i32(tmp32_1);
3149 break;
3150 case 0x8c: /* EFPC R1 [RRE] */
3151 tmp32_1 = tcg_temp_new_i32();
3152 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3153 store_reg32(r1, tmp32_1);
3154 tcg_temp_free_i32(tmp32_1);
3155 break;
3156 case 0x94: /* CEFBR R1,R2 [RRE] */
3157 case 0x95: /* CDFBR R1,R2 [RRE] */
3158 case 0x96: /* CXFBR R1,R2 [RRE] */
3159 tmp32_1 = tcg_const_i32(r1);
3160 tmp32_2 = load_reg32(r2);
3161 switch (op) {
3162 case 0x94:
3163 gen_helper_cefbr(tmp32_1, tmp32_2);
3164 break;
3165 case 0x95:
3166 gen_helper_cdfbr(tmp32_1, tmp32_2);
3167 break;
3168 case 0x96:
3169 gen_helper_cxfbr(tmp32_1, tmp32_2);
3170 break;
3171 default:
3172 tcg_abort();
3174 tcg_temp_free_i32(tmp32_1);
3175 tcg_temp_free_i32(tmp32_2);
3176 break;
3177 case 0x98: /* CFEBR R1,R2 [RRE] */
3178 case 0x99: /* CFDBR R1,R2 [RRE] */
3179 case 0x9a: /* CFXBR R1,R2 [RRE] */
3180 tmp32_1 = tcg_const_i32(r1);
3181 tmp32_2 = tcg_const_i32(r2);
3182 tmp32_3 = tcg_const_i32(m3);
3183 switch (op) {
3184 case 0x98:
3185 gen_helper_cfebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3186 break;
3187 case 0x99:
3188 gen_helper_cfdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3189 break;
3190 case 0x9a:
3191 gen_helper_cfxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3192 break;
3193 default:
3194 tcg_abort();
3196 set_cc_static(s);
3197 tcg_temp_free_i32(tmp32_1);
3198 tcg_temp_free_i32(tmp32_2);
3199 tcg_temp_free_i32(tmp32_3);
3200 break;
3201 case 0xa4: /* CEGBR R1,R2 [RRE] */
3202 case 0xa5: /* CDGBR R1,R2 [RRE] */
3203 tmp32_1 = tcg_const_i32(r1);
3204 tmp = load_reg(r2);
3205 switch (op) {
3206 case 0xa4:
3207 gen_helper_cegbr(tmp32_1, tmp);
3208 break;
3209 case 0xa5:
3210 gen_helper_cdgbr(tmp32_1, tmp);
3211 break;
3212 default:
3213 tcg_abort();
3215 tcg_temp_free_i32(tmp32_1);
3216 tcg_temp_free_i64(tmp);
3217 break;
3218 case 0xa6: /* CXGBR R1,R2 [RRE] */
3219 tmp32_1 = tcg_const_i32(r1);
3220 tmp = load_reg(r2);
3221 gen_helper_cxgbr(tmp32_1, tmp);
3222 tcg_temp_free_i32(tmp32_1);
3223 tcg_temp_free_i64(tmp);
3224 break;
3225 case 0xa8: /* CGEBR R1,R2 [RRE] */
3226 tmp32_1 = tcg_const_i32(r1);
3227 tmp32_2 = tcg_const_i32(r2);
3228 tmp32_3 = tcg_const_i32(m3);
3229 gen_helper_cgebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3230 set_cc_static(s);
3231 tcg_temp_free_i32(tmp32_1);
3232 tcg_temp_free_i32(tmp32_2);
3233 tcg_temp_free_i32(tmp32_3);
3234 break;
3235 case 0xa9: /* CGDBR R1,R2 [RRE] */
3236 tmp32_1 = tcg_const_i32(r1);
3237 tmp32_2 = tcg_const_i32(r2);
3238 tmp32_3 = tcg_const_i32(m3);
3239 gen_helper_cgdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3240 set_cc_static(s);
3241 tcg_temp_free_i32(tmp32_1);
3242 tcg_temp_free_i32(tmp32_2);
3243 tcg_temp_free_i32(tmp32_3);
3244 break;
3245 case 0xaa: /* CGXBR R1,R2 [RRE] */
3246 tmp32_1 = tcg_const_i32(r1);
3247 tmp32_2 = tcg_const_i32(r2);
3248 tmp32_3 = tcg_const_i32(m3);
3249 gen_helper_cgxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3250 set_cc_static(s);
3251 tcg_temp_free_i32(tmp32_1);
3252 tcg_temp_free_i32(tmp32_2);
3253 tcg_temp_free_i32(tmp32_3);
3254 break;
3255 default:
3256 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3257 gen_illegal_opcode(s, 2);
3258 break;
3261 #undef FP_HELPER_CC
3262 #undef FP_HELPER
3265 static void disas_b9(DisasContext *s, int op, int r1, int r2)
3267 TCGv_i64 tmp, tmp2, tmp3;
3268 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3270 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3271 switch (op) {
3272 case 0x0: /* LPGR R1,R2 [RRE] */
3273 case 0x1: /* LNGR R1,R2 [RRE] */
3274 case 0x2: /* LTGR R1,R2 [RRE] */
3275 case 0x3: /* LCGR R1,R2 [RRE] */
3276 case 0x10: /* LPGFR R1,R2 [RRE] */
3277 case 0x11: /* LNFGR R1,R2 [RRE] */
3278 case 0x12: /* LTGFR R1,R2 [RRE] */
3279 case 0x13: /* LCGFR R1,R2 [RRE] */
3280 if (op & 0x10) {
3281 tmp = load_reg32_i64(r2);
3282 } else {
3283 tmp = load_reg(r2);
3285 switch (op & 0xf) {
3286 case 0x0: /* LP?GR */
3287 set_cc_abs64(s, tmp);
3288 gen_helper_abs_i64(tmp, tmp);
3289 store_reg(r1, tmp);
3290 break;
3291 case 0x1: /* LN?GR */
3292 set_cc_nabs64(s, tmp);
3293 gen_helper_nabs_i64(tmp, tmp);
3294 store_reg(r1, tmp);
3295 break;
3296 case 0x2: /* LT?GR */
3297 if (r1 != r2) {
3298 store_reg(r1, tmp);
3300 set_cc_s64(s, tmp);
3301 break;
3302 case 0x3: /* LC?GR */
3303 tcg_gen_neg_i64(regs[r1], tmp);
3304 set_cc_comp64(s, regs[r1]);
3305 break;
3307 tcg_temp_free_i64(tmp);
3308 break;
3309 case 0x4: /* LGR R1,R2 [RRE] */
3310 store_reg(r1, regs[r2]);
3311 break;
3312 case 0x6: /* LGBR R1,R2 [RRE] */
3313 tmp2 = load_reg(r2);
3314 tcg_gen_ext8s_i64(tmp2, tmp2);
3315 store_reg(r1, tmp2);
3316 tcg_temp_free_i64(tmp2);
3317 break;
3318 case 0x8: /* AGR R1,R2 [RRE] */
3319 case 0xa: /* ALGR R1,R2 [RRE] */
3320 tmp = load_reg(r1);
3321 tmp2 = load_reg(r2);
3322 tmp3 = tcg_temp_new_i64();
3323 tcg_gen_add_i64(tmp3, tmp, tmp2);
3324 store_reg(r1, tmp3);
3325 switch (op) {
3326 case 0x8:
3327 set_cc_add64(s, tmp, tmp2, tmp3);
3328 break;
3329 case 0xa:
3330 set_cc_addu64(s, tmp, tmp2, tmp3);
3331 break;
3332 default:
3333 tcg_abort();
3335 tcg_temp_free_i64(tmp);
3336 tcg_temp_free_i64(tmp2);
3337 tcg_temp_free_i64(tmp3);
3338 break;
3339 case 0x9: /* SGR R1,R2 [RRE] */
3340 case 0xb: /* SLGR R1,R2 [RRE] */
3341 case 0x1b: /* SLGFR R1,R2 [RRE] */
3342 case 0x19: /* SGFR R1,R2 [RRE] */
3343 tmp = load_reg(r1);
3344 switch (op) {
3345 case 0x1b:
3346 tmp32_1 = load_reg32(r2);
3347 tmp2 = tcg_temp_new_i64();
3348 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3349 tcg_temp_free_i32(tmp32_1);
3350 break;
3351 case 0x19:
3352 tmp32_1 = load_reg32(r2);
3353 tmp2 = tcg_temp_new_i64();
3354 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3355 tcg_temp_free_i32(tmp32_1);
3356 break;
3357 default:
3358 tmp2 = load_reg(r2);
3359 break;
3361 tmp3 = tcg_temp_new_i64();
3362 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3363 store_reg(r1, tmp3);
3364 switch (op) {
3365 case 0x9:
3366 case 0x19:
3367 set_cc_sub64(s, tmp, tmp2, tmp3);
3368 break;
3369 case 0xb:
3370 case 0x1b:
3371 set_cc_subu64(s, tmp, tmp2, tmp3);
3372 break;
3373 default:
3374 tcg_abort();
3376 tcg_temp_free_i64(tmp);
3377 tcg_temp_free_i64(tmp2);
3378 tcg_temp_free_i64(tmp3);
3379 break;
3380 case 0xc: /* MSGR R1,R2 [RRE] */
3381 case 0x1c: /* MSGFR R1,R2 [RRE] */
3382 tmp = load_reg(r1);
3383 tmp2 = load_reg(r2);
3384 if (op == 0x1c) {
3385 tcg_gen_ext32s_i64(tmp2, tmp2);
3387 tcg_gen_mul_i64(tmp, tmp, tmp2);
3388 store_reg(r1, tmp);
3389 tcg_temp_free_i64(tmp);
3390 tcg_temp_free_i64(tmp2);
3391 break;
3392 case 0xd: /* DSGR R1,R2 [RRE] */
3393 case 0x1d: /* DSGFR R1,R2 [RRE] */
3394 tmp = load_reg(r1 + 1);
3395 if (op == 0xd) {
3396 tmp2 = load_reg(r2);
3397 } else {
3398 tmp32_1 = load_reg32(r2);
3399 tmp2 = tcg_temp_new_i64();
3400 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3401 tcg_temp_free_i32(tmp32_1);
3403 tmp3 = tcg_temp_new_i64();
3404 tcg_gen_div_i64(tmp3, tmp, tmp2);
3405 store_reg(r1 + 1, tmp3);
3406 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3407 store_reg(r1, tmp3);
3408 tcg_temp_free_i64(tmp);
3409 tcg_temp_free_i64(tmp2);
3410 tcg_temp_free_i64(tmp3);
3411 break;
3412 case 0x14: /* LGFR R1,R2 [RRE] */
3413 tmp32_1 = load_reg32(r2);
3414 tmp = tcg_temp_new_i64();
3415 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3416 store_reg(r1, tmp);
3417 tcg_temp_free_i32(tmp32_1);
3418 tcg_temp_free_i64(tmp);
3419 break;
3420 case 0x16: /* LLGFR R1,R2 [RRE] */
3421 tmp32_1 = load_reg32(r2);
3422 tmp = tcg_temp_new_i64();
3423 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3424 store_reg(r1, tmp);
3425 tcg_temp_free_i32(tmp32_1);
3426 tcg_temp_free_i64(tmp);
3427 break;
3428 case 0x17: /* LLGTR R1,R2 [RRE] */
3429 tmp32_1 = load_reg32(r2);
3430 tmp = tcg_temp_new_i64();
3431 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3432 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3433 store_reg(r1, tmp);
3434 tcg_temp_free_i32(tmp32_1);
3435 tcg_temp_free_i64(tmp);
3436 break;
3437 case 0x18: /* AGFR R1,R2 [RRE] */
3438 case 0x1a: /* ALGFR R1,R2 [RRE] */
3439 tmp32_1 = load_reg32(r2);
3440 tmp2 = tcg_temp_new_i64();
3441 if (op == 0x18) {
3442 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3443 } else {
3444 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3446 tcg_temp_free_i32(tmp32_1);
3447 tmp = load_reg(r1);
3448 tmp3 = tcg_temp_new_i64();
3449 tcg_gen_add_i64(tmp3, tmp, tmp2);
3450 store_reg(r1, tmp3);
3451 if (op == 0x18) {
3452 set_cc_add64(s, tmp, tmp2, tmp3);
3453 } else {
3454 set_cc_addu64(s, tmp, tmp2, tmp3);
3456 tcg_temp_free_i64(tmp);
3457 tcg_temp_free_i64(tmp2);
3458 tcg_temp_free_i64(tmp3);
3459 break;
3460 case 0x1f: /* LRVR R1,R2 [RRE] */
3461 tmp32_1 = load_reg32(r2);
3462 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3463 store_reg32(r1, tmp32_1);
3464 tcg_temp_free_i32(tmp32_1);
3465 break;
3466 case 0x20: /* CGR R1,R2 [RRE] */
3467 case 0x30: /* CGFR R1,R2 [RRE] */
3468 tmp2 = load_reg(r2);
3469 if (op == 0x30) {
3470 tcg_gen_ext32s_i64(tmp2, tmp2);
3472 tmp = load_reg(r1);
3473 cmp_s64(s, tmp, tmp2);
3474 tcg_temp_free_i64(tmp);
3475 tcg_temp_free_i64(tmp2);
3476 break;
3477 case 0x21: /* CLGR R1,R2 [RRE] */
3478 case 0x31: /* CLGFR R1,R2 [RRE] */
3479 tmp2 = load_reg(r2);
3480 if (op == 0x31) {
3481 tcg_gen_ext32u_i64(tmp2, tmp2);
3483 tmp = load_reg(r1);
3484 cmp_u64(s, tmp, tmp2);
3485 tcg_temp_free_i64(tmp);
3486 tcg_temp_free_i64(tmp2);
3487 break;
3488 case 0x26: /* LBR R1,R2 [RRE] */
3489 tmp32_1 = load_reg32(r2);
3490 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3491 store_reg32(r1, tmp32_1);
3492 tcg_temp_free_i32(tmp32_1);
3493 break;
3494 case 0x27: /* LHR R1,R2 [RRE] */
3495 tmp32_1 = load_reg32(r2);
3496 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3497 store_reg32(r1, tmp32_1);
3498 tcg_temp_free_i32(tmp32_1);
3499 break;
3500 case 0x80: /* NGR R1,R2 [RRE] */
3501 case 0x81: /* OGR R1,R2 [RRE] */
3502 case 0x82: /* XGR R1,R2 [RRE] */
3503 tmp = load_reg(r1);
3504 tmp2 = load_reg(r2);
3505 switch (op) {
3506 case 0x80:
3507 tcg_gen_and_i64(tmp, tmp, tmp2);
3508 break;
3509 case 0x81:
3510 tcg_gen_or_i64(tmp, tmp, tmp2);
3511 break;
3512 case 0x82:
3513 tcg_gen_xor_i64(tmp, tmp, tmp2);
3514 break;
3515 default:
3516 tcg_abort();
3518 store_reg(r1, tmp);
3519 set_cc_nz_u64(s, tmp);
3520 tcg_temp_free_i64(tmp);
3521 tcg_temp_free_i64(tmp2);
3522 break;
3523 case 0x83: /* FLOGR R1,R2 [RRE] */
3524 tmp = load_reg(r2);
3525 tmp32_1 = tcg_const_i32(r1);
3526 gen_helper_flogr(cc_op, tmp32_1, tmp);
3527 set_cc_static(s);
3528 tcg_temp_free_i64(tmp);
3529 tcg_temp_free_i32(tmp32_1);
3530 break;
3531 case 0x84: /* LLGCR R1,R2 [RRE] */
3532 tmp = load_reg(r2);
3533 tcg_gen_andi_i64(tmp, tmp, 0xff);
3534 store_reg(r1, tmp);
3535 tcg_temp_free_i64(tmp);
3536 break;
3537 case 0x85: /* LLGHR R1,R2 [RRE] */
3538 tmp = load_reg(r2);
3539 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3540 store_reg(r1, tmp);
3541 tcg_temp_free_i64(tmp);
3542 break;
3543 case 0x87: /* DLGR R1,R2 [RRE] */
3544 tmp32_1 = tcg_const_i32(r1);
3545 tmp = load_reg(r2);
3546 gen_helper_dlg(tmp32_1, tmp);
3547 tcg_temp_free_i64(tmp);
3548 tcg_temp_free_i32(tmp32_1);
3549 break;
3550 case 0x88: /* ALCGR R1,R2 [RRE] */
3551 tmp = load_reg(r1);
3552 tmp2 = load_reg(r2);
3553 tmp3 = tcg_temp_new_i64();
3554 gen_op_calc_cc(s);
3555 tcg_gen_extu_i32_i64(tmp3, cc_op);
3556 tcg_gen_shri_i64(tmp3, tmp3, 1);
3557 tcg_gen_andi_i64(tmp3, tmp3, 1);
3558 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3559 tcg_gen_add_i64(tmp3, tmp, tmp3);
3560 store_reg(r1, tmp3);
3561 set_cc_addu64(s, tmp, tmp2, tmp3);
3562 tcg_temp_free_i64(tmp);
3563 tcg_temp_free_i64(tmp2);
3564 tcg_temp_free_i64(tmp3);
3565 break;
3566 case 0x89: /* SLBGR R1,R2 [RRE] */
3567 tmp = load_reg(r1);
3568 tmp2 = load_reg(r2);
3569 tmp32_1 = tcg_const_i32(r1);
3570 gen_op_calc_cc(s);
3571 gen_helper_slbg(cc_op, cc_op, tmp32_1, tmp, tmp2);
3572 set_cc_static(s);
3573 tcg_temp_free_i64(tmp);
3574 tcg_temp_free_i64(tmp2);
3575 tcg_temp_free_i32(tmp32_1);
3576 break;
3577 case 0x94: /* LLCR R1,R2 [RRE] */
3578 tmp32_1 = load_reg32(r2);
3579 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3580 store_reg32(r1, tmp32_1);
3581 tcg_temp_free_i32(tmp32_1);
3582 break;
3583 case 0x95: /* LLHR R1,R2 [RRE] */
3584 tmp32_1 = load_reg32(r2);
3585 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3586 store_reg32(r1, tmp32_1);
3587 tcg_temp_free_i32(tmp32_1);
3588 break;
3589 case 0x96: /* MLR R1,R2 [RRE] */
3590 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3591 tmp2 = load_reg(r2);
3592 tmp3 = load_reg((r1 + 1) & 15);
3593 tcg_gen_ext32u_i64(tmp2, tmp2);
3594 tcg_gen_ext32u_i64(tmp3, tmp3);
3595 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3596 store_reg32_i64((r1 + 1) & 15, tmp2);
3597 tcg_gen_shri_i64(tmp2, tmp2, 32);
3598 store_reg32_i64(r1, tmp2);
3599 tcg_temp_free_i64(tmp2);
3600 tcg_temp_free_i64(tmp3);
3601 break;
3602 case 0x97: /* DLR R1,R2 [RRE] */
3603 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3604 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3605 tmp = load_reg(r1);
3606 tmp2 = load_reg(r2);
3607 tmp3 = load_reg((r1 + 1) & 15);
3608 tcg_gen_ext32u_i64(tmp2, tmp2);
3609 tcg_gen_ext32u_i64(tmp3, tmp3);
3610 tcg_gen_shli_i64(tmp, tmp, 32);
3611 tcg_gen_or_i64(tmp, tmp, tmp3);
3613 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3614 tcg_gen_div_i64(tmp, tmp, tmp2);
3615 store_reg32_i64((r1 + 1) & 15, tmp);
3616 store_reg32_i64(r1, tmp3);
3617 tcg_temp_free_i64(tmp);
3618 tcg_temp_free_i64(tmp2);
3619 tcg_temp_free_i64(tmp3);
3620 break;
3621 case 0x98: /* ALCR R1,R2 [RRE] */
3622 tmp32_1 = load_reg32(r1);
3623 tmp32_2 = load_reg32(r2);
3624 tmp32_3 = tcg_temp_new_i32();
3625 /* XXX possible optimization point */
3626 gen_op_calc_cc(s);
3627 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3628 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3629 store_reg32(r1, tmp32_3);
3630 tcg_temp_free_i32(tmp32_1);
3631 tcg_temp_free_i32(tmp32_2);
3632 tcg_temp_free_i32(tmp32_3);
3633 break;
3634 case 0x99: /* SLBR R1,R2 [RRE] */
3635 tmp32_1 = load_reg32(r2);
3636 tmp32_2 = tcg_const_i32(r1);
3637 gen_op_calc_cc(s);
3638 gen_helper_slb(cc_op, cc_op, tmp32_2, tmp32_1);
3639 set_cc_static(s);
3640 tcg_temp_free_i32(tmp32_1);
3641 tcg_temp_free_i32(tmp32_2);
3642 break;
3643 default:
3644 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3645 gen_illegal_opcode(s, 2);
3646 break;
3650 static void disas_c0(DisasContext *s, int op, int r1, int i2)
3652 TCGv_i64 tmp;
3653 TCGv_i32 tmp32_1, tmp32_2;
3654 uint64_t target = s->pc + i2 * 2LL;
3655 int l1;
3657 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3659 switch (op) {
3660 case 0: /* larl r1, i2 */
3661 tmp = tcg_const_i64(target);
3662 store_reg(r1, tmp);
3663 tcg_temp_free_i64(tmp);
3664 break;
3665 case 0x1: /* LGFI R1,I2 [RIL] */
3666 tmp = tcg_const_i64((int64_t)i2);
3667 store_reg(r1, tmp);
3668 tcg_temp_free_i64(tmp);
3669 break;
3670 case 0x4: /* BRCL M1,I2 [RIL] */
3671 /* m1 & (1 << (3 - cc)) */
3672 tmp32_1 = tcg_const_i32(3);
3673 tmp32_2 = tcg_const_i32(1);
3674 gen_op_calc_cc(s);
3675 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3676 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3677 tcg_temp_free_i32(tmp32_1);
3678 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3679 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3680 l1 = gen_new_label();
3681 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3682 gen_goto_tb(s, 0, target);
3683 gen_set_label(l1);
3684 gen_goto_tb(s, 1, s->pc + 6);
3685 s->is_jmp = DISAS_TB_JUMP;
3686 tcg_temp_free_i32(tmp32_1);
3687 tcg_temp_free_i32(tmp32_2);
3688 break;
3689 case 0x5: /* brasl r1, i2 */
3690 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3691 store_reg(r1, tmp);
3692 tcg_temp_free_i64(tmp);
3693 gen_goto_tb(s, 0, target);
3694 s->is_jmp = DISAS_TB_JUMP;
3695 break;
3696 case 0x7: /* XILF R1,I2 [RIL] */
3697 case 0xb: /* NILF R1,I2 [RIL] */
3698 case 0xd: /* OILF R1,I2 [RIL] */
3699 tmp32_1 = load_reg32(r1);
3700 switch (op) {
3701 case 0x7:
3702 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3703 break;
3704 case 0xb:
3705 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3706 break;
3707 case 0xd:
3708 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3709 break;
3710 default:
3711 tcg_abort();
3713 store_reg32(r1, tmp32_1);
3714 set_cc_nz_u32(s, tmp32_1);
3715 tcg_temp_free_i32(tmp32_1);
3716 break;
3717 case 0x9: /* IILF R1,I2 [RIL] */
3718 tmp32_1 = tcg_const_i32((uint32_t)i2);
3719 store_reg32(r1, tmp32_1);
3720 tcg_temp_free_i32(tmp32_1);
3721 break;
3722 case 0xa: /* NIHF R1,I2 [RIL] */
3723 tmp = load_reg(r1);
3724 tmp32_1 = tcg_temp_new_i32();
3725 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3726 | 0xffffffffULL);
3727 store_reg(r1, tmp);
3728 tcg_gen_shri_i64(tmp, tmp, 32);
3729 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3730 set_cc_nz_u32(s, tmp32_1);
3731 tcg_temp_free_i64(tmp);
3732 tcg_temp_free_i32(tmp32_1);
3733 break;
3734 case 0xe: /* LLIHF R1,I2 [RIL] */
3735 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3736 store_reg(r1, tmp);
3737 tcg_temp_free_i64(tmp);
3738 break;
3739 case 0xf: /* LLILF R1,I2 [RIL] */
3740 tmp = tcg_const_i64((uint32_t)i2);
3741 store_reg(r1, tmp);
3742 tcg_temp_free_i64(tmp);
3743 break;
3744 default:
3745 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3746 gen_illegal_opcode(s, 3);
3747 break;
3751 static void disas_c2(DisasContext *s, int op, int r1, int i2)
3753 TCGv_i64 tmp, tmp2, tmp3;
3754 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3756 switch (op) {
3757 case 0x4: /* SLGFI R1,I2 [RIL] */
3758 case 0xa: /* ALGFI R1,I2 [RIL] */
3759 tmp = load_reg(r1);
3760 tmp2 = tcg_const_i64((uint64_t)(uint32_t)i2);
3761 tmp3 = tcg_temp_new_i64();
3762 switch (op) {
3763 case 0x4:
3764 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3765 set_cc_subu64(s, tmp, tmp2, tmp3);
3766 break;
3767 case 0xa:
3768 tcg_gen_add_i64(tmp3, tmp, tmp2);
3769 set_cc_addu64(s, tmp, tmp2, tmp3);
3770 break;
3771 default:
3772 tcg_abort();
3774 store_reg(r1, tmp3);
3775 tcg_temp_free_i64(tmp);
3776 tcg_temp_free_i64(tmp2);
3777 tcg_temp_free_i64(tmp3);
3778 break;
3779 case 0x5: /* SLFI R1,I2 [RIL] */
3780 case 0xb: /* ALFI R1,I2 [RIL] */
3781 tmp32_1 = load_reg32(r1);
3782 tmp32_2 = tcg_const_i32(i2);
3783 tmp32_3 = tcg_temp_new_i32();
3784 switch (op) {
3785 case 0x5:
3786 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3787 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
3788 break;
3789 case 0xb:
3790 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3791 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3792 break;
3793 default:
3794 tcg_abort();
3796 store_reg32(r1, tmp32_3);
3797 tcg_temp_free_i32(tmp32_1);
3798 tcg_temp_free_i32(tmp32_2);
3799 tcg_temp_free_i32(tmp32_3);
3800 break;
3801 case 0xc: /* CGFI R1,I2 [RIL] */
3802 tmp = load_reg(r1);
3803 cmp_s64c(s, tmp, (int64_t)i2);
3804 tcg_temp_free_i64(tmp);
3805 break;
3806 case 0xe: /* CLGFI R1,I2 [RIL] */
3807 tmp = load_reg(r1);
3808 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3809 tcg_temp_free_i64(tmp);
3810 break;
3811 case 0xd: /* CFI R1,I2 [RIL] */
3812 tmp32_1 = load_reg32(r1);
3813 cmp_s32c(s, tmp32_1, i2);
3814 tcg_temp_free_i32(tmp32_1);
3815 break;
3816 case 0xf: /* CLFI R1,I2 [RIL] */
3817 tmp32_1 = load_reg32(r1);
3818 cmp_u32c(s, tmp32_1, i2);
3819 tcg_temp_free_i32(tmp32_1);
3820 break;
3821 default:
3822 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3823 gen_illegal_opcode(s, 3);
3824 break;
3828 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3830 switch (opc & 0xf) {
3831 case 0x4:
3832 tcg_gen_and_i32(tmp, tmp, tmp2);
3833 break;
3834 case 0x6:
3835 tcg_gen_or_i32(tmp, tmp, tmp2);
3836 break;
3837 case 0x7:
3838 tcg_gen_xor_i32(tmp, tmp, tmp2);
3839 break;
3840 default:
3841 tcg_abort();
3845 static void disas_s390_insn(DisasContext *s)
3847 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3848 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3849 unsigned char opc;
3850 uint64_t insn;
3851 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3852 TCGv_i32 vl;
3853 int ilc;
3854 int l1;
3856 opc = ldub_code(s->pc);
3857 LOG_DISAS("opc 0x%x\n", opc);
3859 ilc = get_ilc(opc);
3861 switch (opc) {
3862 #ifndef CONFIG_USER_ONLY
3863 case 0x01: /* SAM */
3864 insn = ld_code2(s->pc);
3865 /* set addressing mode, but we only do 64bit anyways */
3866 break;
3867 #endif
3868 case 0x6: /* BCTR R1,R2 [RR] */
3869 insn = ld_code2(s->pc);
3870 decode_rr(s, insn, &r1, &r2);
3871 tmp32_1 = load_reg32(r1);
3872 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3873 store_reg32(r1, tmp32_1);
3875 if (r2) {
3876 gen_update_cc_op(s);
3877 l1 = gen_new_label();
3878 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3880 /* not taking the branch, jump to after the instruction */
3881 gen_goto_tb(s, 0, s->pc + 2);
3882 gen_set_label(l1);
3884 /* take the branch, move R2 into psw.addr */
3885 tmp32_1 = load_reg32(r2);
3886 tmp = tcg_temp_new_i64();
3887 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3888 tcg_gen_mov_i64(psw_addr, tmp);
3889 s->is_jmp = DISAS_JUMP;
3890 tcg_temp_free_i32(tmp32_1);
3891 tcg_temp_free_i64(tmp);
3893 break;
3894 case 0x7: /* BCR M1,R2 [RR] */
3895 insn = ld_code2(s->pc);
3896 decode_rr(s, insn, &r1, &r2);
3897 if (r2) {
3898 tmp = load_reg(r2);
3899 gen_bcr(s, r1, tmp, s->pc);
3900 tcg_temp_free_i64(tmp);
3901 s->is_jmp = DISAS_TB_JUMP;
3902 } else {
3903 /* XXX: "serialization and checkpoint-synchronization function"? */
3905 break;
3906 case 0xa: /* SVC I [RR] */
3907 insn = ld_code2(s->pc);
3908 debug_insn(insn);
3909 i = insn & 0xff;
3910 update_psw_addr(s);
3911 gen_op_calc_cc(s);
3912 tmp32_1 = tcg_const_i32(i);
3913 tmp32_2 = tcg_const_i32(ilc * 2);
3914 tmp32_3 = tcg_const_i32(EXCP_SVC);
3915 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, int_svc_code));
3916 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUState, int_svc_ilc));
3917 gen_helper_exception(tmp32_3);
3918 s->is_jmp = DISAS_EXCP;
3919 tcg_temp_free_i32(tmp32_1);
3920 tcg_temp_free_i32(tmp32_2);
3921 tcg_temp_free_i32(tmp32_3);
3922 break;
3923 case 0xd: /* BASR R1,R2 [RR] */
3924 insn = ld_code2(s->pc);
3925 decode_rr(s, insn, &r1, &r2);
3926 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3927 store_reg(r1, tmp);
3928 if (r2) {
3929 tmp2 = load_reg(r2);
3930 tcg_gen_mov_i64(psw_addr, tmp2);
3931 tcg_temp_free_i64(tmp2);
3932 s->is_jmp = DISAS_JUMP;
3934 tcg_temp_free_i64(tmp);
3935 break;
3936 case 0xe: /* MVCL R1,R2 [RR] */
3937 insn = ld_code2(s->pc);
3938 decode_rr(s, insn, &r1, &r2);
3939 tmp32_1 = tcg_const_i32(r1);
3940 tmp32_2 = tcg_const_i32(r2);
3941 potential_page_fault(s);
3942 gen_helper_mvcl(cc_op, tmp32_1, tmp32_2);
3943 set_cc_static(s);
3944 tcg_temp_free_i32(tmp32_1);
3945 tcg_temp_free_i32(tmp32_2);
3946 break;
3947 case 0x10: /* LPR R1,R2 [RR] */
3948 insn = ld_code2(s->pc);
3949 decode_rr(s, insn, &r1, &r2);
3950 tmp32_1 = load_reg32(r2);
3951 set_cc_abs32(s, tmp32_1);
3952 gen_helper_abs_i32(tmp32_1, tmp32_1);
3953 store_reg32(r1, tmp32_1);
3954 tcg_temp_free_i32(tmp32_1);
3955 break;
3956 case 0x11: /* LNR R1,R2 [RR] */
3957 insn = ld_code2(s->pc);
3958 decode_rr(s, insn, &r1, &r2);
3959 tmp32_1 = load_reg32(r2);
3960 set_cc_nabs32(s, tmp32_1);
3961 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3962 store_reg32(r1, tmp32_1);
3963 tcg_temp_free_i32(tmp32_1);
3964 break;
3965 case 0x12: /* LTR R1,R2 [RR] */
3966 insn = ld_code2(s->pc);
3967 decode_rr(s, insn, &r1, &r2);
3968 tmp32_1 = load_reg32(r2);
3969 if (r1 != r2) {
3970 store_reg32(r1, tmp32_1);
3972 set_cc_s32(s, tmp32_1);
3973 tcg_temp_free_i32(tmp32_1);
3974 break;
3975 case 0x13: /* LCR R1,R2 [RR] */
3976 insn = ld_code2(s->pc);
3977 decode_rr(s, insn, &r1, &r2);
3978 tmp32_1 = load_reg32(r2);
3979 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3980 store_reg32(r1, tmp32_1);
3981 set_cc_comp32(s, tmp32_1);
3982 tcg_temp_free_i32(tmp32_1);
3983 break;
3984 case 0x14: /* NR R1,R2 [RR] */
3985 case 0x16: /* OR R1,R2 [RR] */
3986 case 0x17: /* XR R1,R2 [RR] */
3987 insn = ld_code2(s->pc);
3988 decode_rr(s, insn, &r1, &r2);
3989 tmp32_2 = load_reg32(r2);
3990 tmp32_1 = load_reg32(r1);
3991 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3992 store_reg32(r1, tmp32_1);
3993 set_cc_nz_u32(s, tmp32_1);
3994 tcg_temp_free_i32(tmp32_1);
3995 tcg_temp_free_i32(tmp32_2);
3996 break;
3997 case 0x18: /* LR R1,R2 [RR] */
3998 insn = ld_code2(s->pc);
3999 decode_rr(s, insn, &r1, &r2);
4000 tmp32_1 = load_reg32(r2);
4001 store_reg32(r1, tmp32_1);
4002 tcg_temp_free_i32(tmp32_1);
4003 break;
4004 case 0x15: /* CLR R1,R2 [RR] */
4005 case 0x19: /* CR R1,R2 [RR] */
4006 insn = ld_code2(s->pc);
4007 decode_rr(s, insn, &r1, &r2);
4008 tmp32_1 = load_reg32(r1);
4009 tmp32_2 = load_reg32(r2);
4010 if (opc == 0x15) {
4011 cmp_u32(s, tmp32_1, tmp32_2);
4012 } else {
4013 cmp_s32(s, tmp32_1, tmp32_2);
4015 tcg_temp_free_i32(tmp32_1);
4016 tcg_temp_free_i32(tmp32_2);
4017 break;
4018 case 0x1a: /* AR R1,R2 [RR] */
4019 case 0x1e: /* ALR R1,R2 [RR] */
4020 insn = ld_code2(s->pc);
4021 decode_rr(s, insn, &r1, &r2);
4022 tmp32_1 = load_reg32(r1);
4023 tmp32_2 = load_reg32(r2);
4024 tmp32_3 = tcg_temp_new_i32();
4025 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4026 store_reg32(r1, tmp32_3);
4027 if (opc == 0x1a) {
4028 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4029 } else {
4030 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4032 tcg_temp_free_i32(tmp32_1);
4033 tcg_temp_free_i32(tmp32_2);
4034 tcg_temp_free_i32(tmp32_3);
4035 break;
4036 case 0x1b: /* SR R1,R2 [RR] */
4037 case 0x1f: /* SLR R1,R2 [RR] */
4038 insn = ld_code2(s->pc);
4039 decode_rr(s, insn, &r1, &r2);
4040 tmp32_1 = load_reg32(r1);
4041 tmp32_2 = load_reg32(r2);
4042 tmp32_3 = tcg_temp_new_i32();
4043 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4044 store_reg32(r1, tmp32_3);
4045 if (opc == 0x1b) {
4046 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4047 } else {
4048 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4050 tcg_temp_free_i32(tmp32_1);
4051 tcg_temp_free_i32(tmp32_2);
4052 tcg_temp_free_i32(tmp32_3);
4053 break;
4054 case 0x1c: /* MR R1,R2 [RR] */
4055 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4056 insn = ld_code2(s->pc);
4057 decode_rr(s, insn, &r1, &r2);
4058 tmp2 = load_reg(r2);
4059 tmp3 = load_reg((r1 + 1) & 15);
4060 tcg_gen_ext32s_i64(tmp2, tmp2);
4061 tcg_gen_ext32s_i64(tmp3, tmp3);
4062 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4063 store_reg32_i64((r1 + 1) & 15, tmp2);
4064 tcg_gen_shri_i64(tmp2, tmp2, 32);
4065 store_reg32_i64(r1, tmp2);
4066 tcg_temp_free_i64(tmp2);
4067 tcg_temp_free_i64(tmp3);
4068 break;
4069 case 0x1d: /* DR R1,R2 [RR] */
4070 insn = ld_code2(s->pc);
4071 decode_rr(s, insn, &r1, &r2);
4072 tmp32_1 = load_reg32(r1);
4073 tmp32_2 = load_reg32(r1 + 1);
4074 tmp32_3 = load_reg32(r2);
4076 tmp = tcg_temp_new_i64(); /* dividend */
4077 tmp2 = tcg_temp_new_i64(); /* divisor */
4078 tmp3 = tcg_temp_new_i64();
4080 /* dividend is r(r1 << 32) | r(r1 + 1) */
4081 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4082 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4083 tcg_gen_shli_i64(tmp, tmp, 32);
4084 tcg_gen_or_i64(tmp, tmp, tmp2);
4086 /* divisor is r(r2) */
4087 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
4089 tcg_gen_div_i64(tmp3, tmp, tmp2);
4090 tcg_gen_rem_i64(tmp, tmp, tmp2);
4092 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4093 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4095 store_reg32(r1, tmp32_1); /* remainder */
4096 store_reg32(r1 + 1, tmp32_2); /* quotient */
4097 tcg_temp_free_i32(tmp32_1);
4098 tcg_temp_free_i32(tmp32_2);
4099 tcg_temp_free_i32(tmp32_3);
4100 tcg_temp_free_i64(tmp);
4101 tcg_temp_free_i64(tmp2);
4102 tcg_temp_free_i64(tmp3);
4103 break;
4104 case 0x28: /* LDR R1,R2 [RR] */
4105 insn = ld_code2(s->pc);
4106 decode_rr(s, insn, &r1, &r2);
4107 tmp = load_freg(r2);
4108 store_freg(r1, tmp);
4109 tcg_temp_free_i64(tmp);
4110 break;
4111 case 0x38: /* LER R1,R2 [RR] */
4112 insn = ld_code2(s->pc);
4113 decode_rr(s, insn, &r1, &r2);
4114 tmp32_1 = load_freg32(r2);
4115 store_freg32(r1, tmp32_1);
4116 tcg_temp_free_i32(tmp32_1);
4117 break;
4118 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4119 insn = ld_code4(s->pc);
4120 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4121 tmp2 = load_reg(r1);
4122 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
4123 tcg_temp_free_i64(tmp);
4124 tcg_temp_free_i64(tmp2);
4125 break;
4126 case 0x41: /* la */
4127 insn = ld_code4(s->pc);
4128 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4129 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
4130 tcg_temp_free_i64(tmp);
4131 break;
4132 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4133 insn = ld_code4(s->pc);
4134 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4135 tmp2 = load_reg(r1);
4136 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4137 tcg_temp_free_i64(tmp);
4138 tcg_temp_free_i64(tmp2);
4139 break;
4140 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4141 insn = ld_code4(s->pc);
4142 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4143 tmp2 = tcg_temp_new_i64();
4144 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4145 store_reg8(r1, tmp2);
4146 tcg_temp_free_i64(tmp);
4147 tcg_temp_free_i64(tmp2);
4148 break;
4149 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4150 insn = ld_code4(s->pc);
4151 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4152 tmp2 = load_reg(r1);
4153 tmp3 = tcg_const_i64(s->pc + 4);
4154 update_psw_addr(s);
4155 gen_op_calc_cc(s);
4156 gen_helper_ex(cc_op, cc_op, tmp2, tmp, tmp3);
4157 set_cc_static(s);
4158 tcg_temp_free_i64(tmp);
4159 tcg_temp_free_i64(tmp2);
4160 tcg_temp_free_i64(tmp3);
4161 break;
4162 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4163 insn = ld_code4(s->pc);
4164 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4165 tcg_temp_free_i64(tmp);
4167 tmp32_1 = load_reg32(r1);
4168 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
4169 store_reg32(r1, tmp32_1);
4171 gen_update_cc_op(s);
4172 l1 = gen_new_label();
4173 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
4175 /* not taking the branch, jump to after the instruction */
4176 gen_goto_tb(s, 0, s->pc + 4);
4177 gen_set_label(l1);
4179 /* take the branch, move R2 into psw.addr */
4180 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4181 tcg_gen_mov_i64(psw_addr, tmp);
4182 s->is_jmp = DISAS_JUMP;
4183 tcg_temp_free_i32(tmp32_1);
4184 tcg_temp_free_i64(tmp);
4185 break;
4186 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4187 insn = ld_code4(s->pc);
4188 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4189 gen_bcr(s, r1, tmp, s->pc + 4);
4190 tcg_temp_free_i64(tmp);
4191 s->is_jmp = DISAS_TB_JUMP;
4192 break;
4193 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4194 insn = ld_code4(s->pc);
4195 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4196 tmp2 = tcg_temp_new_i64();
4197 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4198 store_reg32_i64(r1, tmp2);
4199 tcg_temp_free_i64(tmp);
4200 tcg_temp_free_i64(tmp2);
4201 break;
4202 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4203 insn = ld_code4(s->pc);
4204 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4205 tmp32_1 = load_reg32(r1);
4206 tmp32_2 = tcg_temp_new_i32();
4207 tmp2 = tcg_temp_new_i64();
4208 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4209 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4210 cmp_s32(s, tmp32_1, tmp32_2);
4211 tcg_temp_free_i32(tmp32_1);
4212 tcg_temp_free_i32(tmp32_2);
4213 tcg_temp_free_i64(tmp);
4214 tcg_temp_free_i64(tmp2);
4215 break;
4216 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4217 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4218 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4219 insn = ld_code4(s->pc);
4220 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4221 tmp2 = tcg_temp_new_i64();
4222 tmp32_1 = load_reg32(r1);
4223 tmp32_2 = tcg_temp_new_i32();
4224 tmp32_3 = tcg_temp_new_i32();
4226 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4227 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4228 switch (opc) {
4229 case 0x4a:
4230 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4231 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4232 break;
4233 case 0x4b:
4234 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4235 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4236 break;
4237 case 0x4c:
4238 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
4239 break;
4240 default:
4241 tcg_abort();
4243 store_reg32(r1, tmp32_3);
4245 tcg_temp_free_i32(tmp32_1);
4246 tcg_temp_free_i32(tmp32_2);
4247 tcg_temp_free_i32(tmp32_3);
4248 tcg_temp_free_i64(tmp);
4249 tcg_temp_free_i64(tmp2);
4250 break;
4251 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4252 insn = ld_code4(s->pc);
4253 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4254 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
4255 store_reg(r1, tmp2);
4256 tcg_gen_mov_i64(psw_addr, tmp);
4257 tcg_temp_free_i64(tmp);
4258 tcg_temp_free_i64(tmp2);
4259 s->is_jmp = DISAS_JUMP;
4260 break;
4261 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4262 insn = ld_code4(s->pc);
4263 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4264 tmp2 = tcg_temp_new_i64();
4265 tmp32_1 = tcg_temp_new_i32();
4266 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
4267 gen_helper_cvd(tmp2, tmp32_1);
4268 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4269 tcg_temp_free_i64(tmp);
4270 tcg_temp_free_i64(tmp2);
4271 tcg_temp_free_i32(tmp32_1);
4272 break;
4273 case 0x50: /* st r1, d2(x2, b2) */
4274 insn = ld_code4(s->pc);
4275 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4276 tmp2 = load_reg(r1);
4277 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4278 tcg_temp_free_i64(tmp);
4279 tcg_temp_free_i64(tmp2);
4280 break;
4281 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4282 insn = ld_code4(s->pc);
4283 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4284 tmp2 = tcg_temp_new_i64();
4285 tmp32_1 = tcg_temp_new_i32();
4286 tmp32_2 = load_reg32(r1);
4287 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4288 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4289 cmp_u32(s, tmp32_2, tmp32_1);
4290 tcg_temp_free_i64(tmp);
4291 tcg_temp_free_i64(tmp2);
4292 tcg_temp_free_i32(tmp32_1);
4293 tcg_temp_free_i32(tmp32_2);
4294 break;
4295 case 0x54: /* N R1,D2(X2,B2) [RX] */
4296 case 0x56: /* O R1,D2(X2,B2) [RX] */
4297 case 0x57: /* X R1,D2(X2,B2) [RX] */
4298 insn = ld_code4(s->pc);
4299 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4300 tmp2 = tcg_temp_new_i64();
4301 tmp32_1 = load_reg32(r1);
4302 tmp32_2 = tcg_temp_new_i32();
4303 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4304 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4305 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4306 store_reg32(r1, tmp32_1);
4307 set_cc_nz_u32(s, tmp32_1);
4308 tcg_temp_free_i64(tmp);
4309 tcg_temp_free_i64(tmp2);
4310 tcg_temp_free_i32(tmp32_1);
4311 tcg_temp_free_i32(tmp32_2);
4312 break;
4313 case 0x58: /* l r1, d2(x2, b2) */
4314 insn = ld_code4(s->pc);
4315 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4316 tmp2 = tcg_temp_new_i64();
4317 tmp32_1 = tcg_temp_new_i32();
4318 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4319 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4320 store_reg32(r1, tmp32_1);
4321 tcg_temp_free_i64(tmp);
4322 tcg_temp_free_i64(tmp2);
4323 tcg_temp_free_i32(tmp32_1);
4324 break;
4325 case 0x59: /* C R1,D2(X2,B2) [RX] */
4326 insn = ld_code4(s->pc);
4327 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4328 tmp2 = tcg_temp_new_i64();
4329 tmp32_1 = tcg_temp_new_i32();
4330 tmp32_2 = load_reg32(r1);
4331 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4332 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4333 cmp_s32(s, tmp32_2, tmp32_1);
4334 tcg_temp_free_i64(tmp);
4335 tcg_temp_free_i64(tmp2);
4336 tcg_temp_free_i32(tmp32_1);
4337 tcg_temp_free_i32(tmp32_2);
4338 break;
4339 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4340 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4341 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4342 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4343 insn = ld_code4(s->pc);
4344 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4345 tmp32_1 = load_reg32(r1);
4346 tmp32_2 = tcg_temp_new_i32();
4347 tmp32_3 = tcg_temp_new_i32();
4348 tcg_gen_qemu_ld32s(tmp, tmp, get_mem_index(s));
4349 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4350 switch (opc) {
4351 case 0x5a:
4352 case 0x5e:
4353 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4354 break;
4355 case 0x5b:
4356 case 0x5f:
4357 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4358 break;
4359 default:
4360 tcg_abort();
4362 store_reg32(r1, tmp32_3);
4363 switch (opc) {
4364 case 0x5a:
4365 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4366 break;
4367 case 0x5e:
4368 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4369 break;
4370 case 0x5b:
4371 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4372 break;
4373 case 0x5f:
4374 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4375 break;
4376 default:
4377 tcg_abort();
4379 tcg_temp_free_i64(tmp);
4380 tcg_temp_free_i32(tmp32_1);
4381 tcg_temp_free_i32(tmp32_2);
4382 tcg_temp_free_i32(tmp32_3);
4383 break;
4384 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4385 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4386 insn = ld_code4(s->pc);
4387 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4388 tmp2 = tcg_temp_new_i64();
4389 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4390 tmp3 = load_reg((r1 + 1) & 15);
4391 tcg_gen_ext32s_i64(tmp2, tmp2);
4392 tcg_gen_ext32s_i64(tmp3, tmp3);
4393 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4394 store_reg32_i64((r1 + 1) & 15, tmp2);
4395 tcg_gen_shri_i64(tmp2, tmp2, 32);
4396 store_reg32_i64(r1, tmp2);
4397 tcg_temp_free_i64(tmp);
4398 tcg_temp_free_i64(tmp2);
4399 tcg_temp_free_i64(tmp3);
4400 break;
4401 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4402 insn = ld_code4(s->pc);
4403 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4404 tmp32_1 = load_reg32(r1);
4405 tmp32_2 = load_reg32(r1 + 1);
4407 tmp = tcg_temp_new_i64();
4408 tmp2 = tcg_temp_new_i64();
4410 /* dividend is r(r1 << 32) | r(r1 + 1) */
4411 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4412 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4413 tcg_gen_shli_i64(tmp, tmp, 32);
4414 tcg_gen_or_i64(tmp, tmp, tmp2);
4416 /* divisor is in memory */
4417 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
4419 /* XXX divisor == 0 -> FixP divide exception */
4421 tcg_gen_div_i64(tmp3, tmp, tmp2);
4422 tcg_gen_rem_i64(tmp, tmp, tmp2);
4424 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4425 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4427 store_reg32(r1, tmp32_1); /* remainder */
4428 store_reg32(r1 + 1, tmp32_2); /* quotient */
4429 tcg_temp_free_i32(tmp32_1);
4430 tcg_temp_free_i32(tmp32_2);
4431 tcg_temp_free_i64(tmp);
4432 tcg_temp_free_i64(tmp2);
4433 tcg_temp_free_i64(tmp3);
4434 break;
4435 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4436 insn = ld_code4(s->pc);
4437 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4438 tmp2 = load_freg(r1);
4439 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4440 tcg_temp_free_i64(tmp);
4441 tcg_temp_free_i64(tmp2);
4442 break;
4443 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4444 insn = ld_code4(s->pc);
4445 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4446 tmp2 = tcg_temp_new_i64();
4447 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4448 store_freg(r1, tmp2);
4449 tcg_temp_free_i64(tmp);
4450 tcg_temp_free_i64(tmp2);
4451 break;
4452 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4453 insn = ld_code4(s->pc);
4454 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4455 tmp2 = tcg_temp_new_i64();
4456 tmp32_1 = load_freg32(r1);
4457 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4458 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4459 tcg_temp_free_i64(tmp);
4460 tcg_temp_free_i64(tmp2);
4461 tcg_temp_free_i32(tmp32_1);
4462 break;
4463 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4464 insn = ld_code4(s->pc);
4465 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4466 tmp2 = tcg_temp_new_i64();
4467 tmp32_1 = load_reg32(r1);
4468 tmp32_2 = tcg_temp_new_i32();
4469 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4470 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4471 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4472 store_reg32(r1, tmp32_1);
4473 tcg_temp_free_i64(tmp);
4474 tcg_temp_free_i64(tmp2);
4475 tcg_temp_free_i32(tmp32_1);
4476 tcg_temp_free_i32(tmp32_2);
4477 break;
4478 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4479 insn = ld_code4(s->pc);
4480 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4481 tmp2 = tcg_temp_new_i64();
4482 tmp32_1 = tcg_temp_new_i32();
4483 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4484 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4485 store_freg32(r1, tmp32_1);
4486 tcg_temp_free_i64(tmp);
4487 tcg_temp_free_i64(tmp2);
4488 tcg_temp_free_i32(tmp32_1);
4489 break;
4490 #ifndef CONFIG_USER_ONLY
4491 case 0x80: /* SSM D2(B2) [S] */
4492 /* Set System Mask */
4493 check_privileged(s, ilc);
4494 insn = ld_code4(s->pc);
4495 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4496 tmp = get_address(s, 0, b2, d2);
4497 tmp2 = tcg_temp_new_i64();
4498 tmp3 = tcg_temp_new_i64();
4499 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4500 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4501 tcg_gen_shli_i64(tmp2, tmp2, 56);
4502 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4503 tcg_temp_free_i64(tmp);
4504 tcg_temp_free_i64(tmp2);
4505 tcg_temp_free_i64(tmp3);
4506 break;
4507 case 0x82: /* LPSW D2(B2) [S] */
4508 /* Load PSW */
4509 check_privileged(s, ilc);
4510 insn = ld_code4(s->pc);
4511 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4512 tmp = get_address(s, 0, b2, d2);
4513 tmp2 = tcg_temp_new_i64();
4514 tmp3 = tcg_temp_new_i64();
4515 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4516 tcg_gen_addi_i64(tmp, tmp, 4);
4517 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4518 gen_helper_load_psw(tmp2, tmp3);
4519 tcg_temp_free_i64(tmp);
4520 tcg_temp_free_i64(tmp2);
4521 tcg_temp_free_i64(tmp3);
4522 /* we need to keep cc_op intact */
4523 s->is_jmp = DISAS_JUMP;
4524 break;
4525 case 0x83: /* DIAG R1,R3,D2 [RS] */
4526 /* Diagnose call (KVM hypercall) */
4527 check_privileged(s, ilc);
4528 potential_page_fault(s);
4529 insn = ld_code4(s->pc);
4530 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4531 tmp32_1 = tcg_const_i32(insn & 0xfff);
4532 tmp2 = load_reg(2);
4533 tmp3 = load_reg(1);
4534 gen_helper_diag(tmp2, tmp32_1, tmp2, tmp3);
4535 store_reg(2, tmp2);
4536 tcg_temp_free_i32(tmp32_1);
4537 tcg_temp_free_i64(tmp2);
4538 tcg_temp_free_i64(tmp3);
4539 break;
4540 #endif
4541 case 0x88: /* SRL R1,D2(B2) [RS] */
4542 case 0x89: /* SLL R1,D2(B2) [RS] */
4543 case 0x8a: /* SRA R1,D2(B2) [RS] */
4544 insn = ld_code4(s->pc);
4545 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4546 tmp = get_address(s, 0, b2, d2);
4547 tmp32_1 = load_reg32(r1);
4548 tmp32_2 = tcg_temp_new_i32();
4549 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4550 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4551 switch (opc) {
4552 case 0x88:
4553 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4554 break;
4555 case 0x89:
4556 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4557 break;
4558 case 0x8a:
4559 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4560 set_cc_s32(s, tmp32_1);
4561 break;
4562 default:
4563 tcg_abort();
4565 store_reg32(r1, tmp32_1);
4566 tcg_temp_free_i64(tmp);
4567 tcg_temp_free_i32(tmp32_1);
4568 tcg_temp_free_i32(tmp32_2);
4569 break;
4570 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4571 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4572 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4573 insn = ld_code4(s->pc);
4574 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4575 tmp = get_address(s, 0, b2, d2); /* shift */
4576 tmp2 = tcg_temp_new_i64();
4577 tmp32_1 = load_reg32(r1);
4578 tmp32_2 = load_reg32(r1 + 1);
4579 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4580 switch (opc) {
4581 case 0x8c:
4582 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4583 break;
4584 case 0x8d:
4585 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4586 break;
4587 case 0x8e:
4588 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4589 set_cc_s64(s, tmp2);
4590 break;
4592 tcg_gen_shri_i64(tmp, tmp2, 32);
4593 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4594 store_reg32(r1, tmp32_1);
4595 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4596 store_reg32(r1 + 1, tmp32_2);
4597 break;
4598 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4599 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4600 insn = ld_code4(s->pc);
4601 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4603 tmp = get_address(s, 0, b2, d2);
4604 tmp2 = tcg_temp_new_i64();
4605 tmp3 = tcg_const_i64(4);
4606 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4607 for (i = r1;; i = (i + 1) % 16) {
4608 if (opc == 0x98) {
4609 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4610 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4611 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4612 } else {
4613 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4615 if (i == r3) {
4616 break;
4618 tcg_gen_add_i64(tmp, tmp, tmp3);
4620 tcg_temp_free_i64(tmp2);
4621 tcg_temp_free_i64(tmp3);
4622 tcg_temp_free_i64(tmp4);
4623 break;
4624 case 0x91: /* TM D1(B1),I2 [SI] */
4625 insn = ld_code4(s->pc);
4626 tmp = decode_si(s, insn, &i2, &b1, &d1);
4627 tmp2 = tcg_const_i64(i2);
4628 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4629 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4630 tcg_temp_free_i64(tmp);
4631 tcg_temp_free_i64(tmp2);
4632 break;
4633 case 0x92: /* MVI D1(B1),I2 [SI] */
4634 insn = ld_code4(s->pc);
4635 tmp = decode_si(s, insn, &i2, &b1, &d1);
4636 tmp2 = tcg_const_i64(i2);
4637 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4638 tcg_temp_free_i64(tmp);
4639 tcg_temp_free_i64(tmp2);
4640 break;
4641 case 0x94: /* NI D1(B1),I2 [SI] */
4642 case 0x96: /* OI D1(B1),I2 [SI] */
4643 case 0x97: /* XI D1(B1),I2 [SI] */
4644 insn = ld_code4(s->pc);
4645 tmp = decode_si(s, insn, &i2, &b1, &d1);
4646 tmp2 = tcg_temp_new_i64();
4647 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4648 switch (opc) {
4649 case 0x94:
4650 tcg_gen_andi_i64(tmp2, tmp2, i2);
4651 break;
4652 case 0x96:
4653 tcg_gen_ori_i64(tmp2, tmp2, i2);
4654 break;
4655 case 0x97:
4656 tcg_gen_xori_i64(tmp2, tmp2, i2);
4657 break;
4658 default:
4659 tcg_abort();
4661 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4662 set_cc_nz_u64(s, tmp2);
4663 tcg_temp_free_i64(tmp);
4664 tcg_temp_free_i64(tmp2);
4665 break;
4666 case 0x95: /* CLI D1(B1),I2 [SI] */
4667 insn = ld_code4(s->pc);
4668 tmp = decode_si(s, insn, &i2, &b1, &d1);
4669 tmp2 = tcg_temp_new_i64();
4670 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4671 cmp_u64c(s, tmp2, i2);
4672 tcg_temp_free_i64(tmp);
4673 tcg_temp_free_i64(tmp2);
4674 break;
4675 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4676 insn = ld_code4(s->pc);
4677 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4678 tmp = get_address(s, 0, b2, d2);
4679 tmp32_1 = tcg_const_i32(r1);
4680 tmp32_2 = tcg_const_i32(r3);
4681 potential_page_fault(s);
4682 gen_helper_lam(tmp32_1, tmp, tmp32_2);
4683 tcg_temp_free_i64(tmp);
4684 tcg_temp_free_i32(tmp32_1);
4685 tcg_temp_free_i32(tmp32_2);
4686 break;
4687 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4688 insn = ld_code4(s->pc);
4689 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4690 tmp = get_address(s, 0, b2, d2);
4691 tmp32_1 = tcg_const_i32(r1);
4692 tmp32_2 = tcg_const_i32(r3);
4693 potential_page_fault(s);
4694 gen_helper_stam(tmp32_1, tmp, tmp32_2);
4695 tcg_temp_free_i64(tmp);
4696 tcg_temp_free_i32(tmp32_1);
4697 tcg_temp_free_i32(tmp32_2);
4698 break;
4699 case 0xa5:
4700 insn = ld_code4(s->pc);
4701 r1 = (insn >> 20) & 0xf;
4702 op = (insn >> 16) & 0xf;
4703 i2 = insn & 0xffff;
4704 disas_a5(s, op, r1, i2);
4705 break;
4706 case 0xa7:
4707 insn = ld_code4(s->pc);
4708 r1 = (insn >> 20) & 0xf;
4709 op = (insn >> 16) & 0xf;
4710 i2 = (short)insn;
4711 disas_a7(s, op, r1, i2);
4712 break;
4713 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4714 insn = ld_code4(s->pc);
4715 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4716 tmp = get_address(s, 0, b2, d2);
4717 tmp32_1 = tcg_const_i32(r1);
4718 tmp32_2 = tcg_const_i32(r3);
4719 potential_page_fault(s);
4720 gen_helper_mvcle(cc_op, tmp32_1, tmp, tmp32_2);
4721 set_cc_static(s);
4722 tcg_temp_free_i64(tmp);
4723 tcg_temp_free_i32(tmp32_1);
4724 tcg_temp_free_i32(tmp32_2);
4725 break;
4726 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4727 insn = ld_code4(s->pc);
4728 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4729 tmp = get_address(s, 0, b2, d2);
4730 tmp32_1 = tcg_const_i32(r1);
4731 tmp32_2 = tcg_const_i32(r3);
4732 potential_page_fault(s);
4733 gen_helper_clcle(cc_op, tmp32_1, tmp, tmp32_2);
4734 set_cc_static(s);
4735 tcg_temp_free_i64(tmp);
4736 tcg_temp_free_i32(tmp32_1);
4737 tcg_temp_free_i32(tmp32_2);
4738 break;
4739 #ifndef CONFIG_USER_ONLY
4740 case 0xac: /* STNSM D1(B1),I2 [SI] */
4741 case 0xad: /* STOSM D1(B1),I2 [SI] */
4742 check_privileged(s, ilc);
4743 insn = ld_code4(s->pc);
4744 tmp = decode_si(s, insn, &i2, &b1, &d1);
4745 tmp2 = tcg_temp_new_i64();
4746 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4747 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4748 if (opc == 0xac) {
4749 tcg_gen_andi_i64(psw_mask, psw_mask,
4750 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4751 } else {
4752 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4754 tcg_temp_free_i64(tmp);
4755 tcg_temp_free_i64(tmp2);
4756 break;
4757 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4758 check_privileged(s, ilc);
4759 insn = ld_code4(s->pc);
4760 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4761 tmp = get_address(s, 0, b2, d2);
4762 tmp2 = load_reg(r3);
4763 tmp32_1 = tcg_const_i32(r1);
4764 potential_page_fault(s);
4765 gen_helper_sigp(cc_op, tmp, tmp32_1, tmp2);
4766 set_cc_static(s);
4767 tcg_temp_free_i64(tmp);
4768 tcg_temp_free_i64(tmp2);
4769 tcg_temp_free_i32(tmp32_1);
4770 break;
4771 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4772 check_privileged(s, ilc);
4773 insn = ld_code4(s->pc);
4774 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4775 tmp32_1 = tcg_const_i32(r1);
4776 potential_page_fault(s);
4777 gen_helper_lra(cc_op, tmp, tmp32_1);
4778 set_cc_static(s);
4779 tcg_temp_free_i64(tmp);
4780 tcg_temp_free_i32(tmp32_1);
4781 break;
4782 #endif
4783 case 0xb2:
4784 insn = ld_code4(s->pc);
4785 op = (insn >> 16) & 0xff;
4786 switch (op) {
4787 case 0x9c: /* STFPC D2(B2) [S] */
4788 d2 = insn & 0xfff;
4789 b2 = (insn >> 12) & 0xf;
4790 tmp32_1 = tcg_temp_new_i32();
4791 tmp = tcg_temp_new_i64();
4792 tmp2 = get_address(s, 0, b2, d2);
4793 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
4794 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4795 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4796 tcg_temp_free_i32(tmp32_1);
4797 tcg_temp_free_i64(tmp);
4798 tcg_temp_free_i64(tmp2);
4799 break;
4800 default:
4801 disas_b2(s, op, insn);
4802 break;
4804 break;
4805 case 0xb3:
4806 insn = ld_code4(s->pc);
4807 op = (insn >> 16) & 0xff;
4808 r3 = (insn >> 12) & 0xf; /* aka m3 */
4809 r1 = (insn >> 4) & 0xf;
4810 r2 = insn & 0xf;
4811 disas_b3(s, op, r3, r1, r2);
4812 break;
4813 #ifndef CONFIG_USER_ONLY
4814 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4815 /* Store Control */
4816 check_privileged(s, ilc);
4817 insn = ld_code4(s->pc);
4818 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4819 tmp = get_address(s, 0, b2, d2);
4820 tmp32_1 = tcg_const_i32(r1);
4821 tmp32_2 = tcg_const_i32(r3);
4822 potential_page_fault(s);
4823 gen_helper_stctl(tmp32_1, tmp, tmp32_2);
4824 tcg_temp_free_i64(tmp);
4825 tcg_temp_free_i32(tmp32_1);
4826 tcg_temp_free_i32(tmp32_2);
4827 break;
4828 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4829 /* Load Control */
4830 check_privileged(s, ilc);
4831 insn = ld_code4(s->pc);
4832 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4833 tmp = get_address(s, 0, b2, d2);
4834 tmp32_1 = tcg_const_i32(r1);
4835 tmp32_2 = tcg_const_i32(r3);
4836 potential_page_fault(s);
4837 gen_helper_lctl(tmp32_1, tmp, tmp32_2);
4838 tcg_temp_free_i64(tmp);
4839 tcg_temp_free_i32(tmp32_1);
4840 tcg_temp_free_i32(tmp32_2);
4841 break;
4842 #endif
4843 case 0xb9:
4844 insn = ld_code4(s->pc);
4845 r1 = (insn >> 4) & 0xf;
4846 r2 = insn & 0xf;
4847 op = (insn >> 16) & 0xff;
4848 disas_b9(s, op, r1, r2);
4849 break;
4850 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4851 insn = ld_code4(s->pc);
4852 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4853 tmp = get_address(s, 0, b2, d2);
4854 tmp32_1 = tcg_const_i32(r1);
4855 tmp32_2 = tcg_const_i32(r3);
4856 potential_page_fault(s);
4857 gen_helper_cs(cc_op, tmp32_1, tmp, tmp32_2);
4858 set_cc_static(s);
4859 tcg_temp_free_i64(tmp);
4860 tcg_temp_free_i32(tmp32_1);
4861 tcg_temp_free_i32(tmp32_2);
4862 break;
4863 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4864 insn = ld_code4(s->pc);
4865 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4866 tmp = get_address(s, 0, b2, d2);
4867 tmp32_1 = load_reg32(r1);
4868 tmp32_2 = tcg_const_i32(r3);
4869 potential_page_fault(s);
4870 gen_helper_clm(cc_op, tmp32_1, tmp32_2, tmp);
4871 set_cc_static(s);
4872 tcg_temp_free_i64(tmp);
4873 tcg_temp_free_i32(tmp32_1);
4874 tcg_temp_free_i32(tmp32_2);
4875 break;
4876 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4877 insn = ld_code4(s->pc);
4878 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4879 tmp = get_address(s, 0, b2, d2);
4880 tmp32_1 = load_reg32(r1);
4881 tmp32_2 = tcg_const_i32(r3);
4882 potential_page_fault(s);
4883 gen_helper_stcm(tmp32_1, tmp32_2, tmp);
4884 tcg_temp_free_i64(tmp);
4885 tcg_temp_free_i32(tmp32_1);
4886 tcg_temp_free_i32(tmp32_2);
4887 break;
4888 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4889 insn = ld_code4(s->pc);
4890 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4891 if (r3 == 15) {
4892 /* effectively a 32-bit load */
4893 tmp = get_address(s, 0, b2, d2);
4894 tmp32_1 = tcg_temp_new_i32();
4895 tmp32_2 = tcg_const_i32(r3);
4896 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4897 store_reg32_i64(r1, tmp);
4898 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4899 set_cc_icm(s, tmp32_2, tmp32_1);
4900 tcg_temp_free_i64(tmp);
4901 tcg_temp_free_i32(tmp32_1);
4902 tcg_temp_free_i32(tmp32_2);
4903 } else if (r3) {
4904 uint32_t mask = 0x00ffffffUL;
4905 uint32_t shift = 24;
4906 int m3 = r3;
4907 tmp = get_address(s, 0, b2, d2);
4908 tmp2 = tcg_temp_new_i64();
4909 tmp32_1 = load_reg32(r1);
4910 tmp32_2 = tcg_temp_new_i32();
4911 tmp32_3 = tcg_const_i32(r3);
4912 tmp32_4 = tcg_const_i32(0);
4913 while (m3) {
4914 if (m3 & 8) {
4915 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4916 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4917 if (shift) {
4918 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4920 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4921 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4922 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4923 tcg_gen_addi_i64(tmp, tmp, 1);
4925 m3 = (m3 << 1) & 0xf;
4926 mask = (mask >> 8) | 0xff000000UL;
4927 shift -= 8;
4929 store_reg32(r1, tmp32_1);
4930 set_cc_icm(s, tmp32_3, tmp32_4);
4931 tcg_temp_free_i64(tmp);
4932 tcg_temp_free_i64(tmp2);
4933 tcg_temp_free_i32(tmp32_1);
4934 tcg_temp_free_i32(tmp32_2);
4935 tcg_temp_free_i32(tmp32_3);
4936 tcg_temp_free_i32(tmp32_4);
4937 } else {
4938 /* i.e. env->cc = 0 */
4939 gen_op_movi_cc(s, 0);
4941 break;
4942 case 0xc0:
4943 case 0xc2:
4944 insn = ld_code6(s->pc);
4945 r1 = (insn >> 36) & 0xf;
4946 op = (insn >> 32) & 0xf;
4947 i2 = (int)insn;
4948 switch (opc) {
4949 case 0xc0:
4950 disas_c0(s, op, r1, i2);
4951 break;
4952 case 0xc2:
4953 disas_c2(s, op, r1, i2);
4954 break;
4955 default:
4956 tcg_abort();
4958 break;
4959 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4960 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4961 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4962 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4963 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4964 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4965 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4966 insn = ld_code6(s->pc);
4967 vl = tcg_const_i32((insn >> 32) & 0xff);
4968 b1 = (insn >> 28) & 0xf;
4969 b2 = (insn >> 12) & 0xf;
4970 d1 = (insn >> 16) & 0xfff;
4971 d2 = insn & 0xfff;
4972 tmp = get_address(s, 0, b1, d1);
4973 tmp2 = get_address(s, 0, b2, d2);
4974 switch (opc) {
4975 case 0xd2:
4976 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4977 break;
4978 case 0xd4:
4979 potential_page_fault(s);
4980 gen_helper_nc(cc_op, vl, tmp, tmp2);
4981 set_cc_static(s);
4982 break;
4983 case 0xd5:
4984 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
4985 break;
4986 case 0xd6:
4987 potential_page_fault(s);
4988 gen_helper_oc(cc_op, vl, tmp, tmp2);
4989 set_cc_static(s);
4990 break;
4991 case 0xd7:
4992 potential_page_fault(s);
4993 gen_helper_xc(cc_op, vl, tmp, tmp2);
4994 set_cc_static(s);
4995 break;
4996 case 0xdc:
4997 potential_page_fault(s);
4998 gen_helper_tr(vl, tmp, tmp2);
4999 set_cc_static(s);
5000 break;
5001 case 0xf3:
5002 potential_page_fault(s);
5003 gen_helper_unpk(vl, tmp, tmp2);
5004 break;
5005 default:
5006 tcg_abort();
5008 tcg_temp_free_i64(tmp);
5009 tcg_temp_free_i64(tmp2);
5010 break;
5011 #ifndef CONFIG_USER_ONLY
5012 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5013 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5014 check_privileged(s, ilc);
5015 potential_page_fault(s);
5016 insn = ld_code6(s->pc);
5017 r1 = (insn >> 36) & 0xf;
5018 r3 = (insn >> 32) & 0xf;
5019 b1 = (insn >> 28) & 0xf;
5020 d1 = (insn >> 16) & 0xfff;
5021 b2 = (insn >> 12) & 0xf;
5022 d2 = insn & 0xfff;
5023 tmp = load_reg(r1);
5024 /* XXX key in r3 */
5025 tmp2 = get_address(s, 0, b1, d1);
5026 tmp3 = get_address(s, 0, b2, d2);
5027 if (opc == 0xda) {
5028 gen_helper_mvcp(cc_op, tmp, tmp2, tmp3);
5029 } else {
5030 gen_helper_mvcs(cc_op, tmp, tmp2, tmp3);
5032 set_cc_static(s);
5033 tcg_temp_free_i64(tmp);
5034 tcg_temp_free_i64(tmp2);
5035 tcg_temp_free_i64(tmp3);
5036 break;
5037 #endif
5038 case 0xe3:
5039 insn = ld_code6(s->pc);
5040 debug_insn(insn);
5041 op = insn & 0xff;
5042 r1 = (insn >> 36) & 0xf;
5043 x2 = (insn >> 32) & 0xf;
5044 b2 = (insn >> 28) & 0xf;
5045 d2 = ((int)((((insn >> 16) & 0xfff)
5046 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5047 disas_e3(s, op, r1, x2, b2, d2 );
5048 break;
5049 #ifndef CONFIG_USER_ONLY
5050 case 0xe5:
5051 /* Test Protection */
5052 check_privileged(s, ilc);
5053 insn = ld_code6(s->pc);
5054 debug_insn(insn);
5055 disas_e5(s, insn);
5056 break;
5057 #endif
5058 case 0xeb:
5059 insn = ld_code6(s->pc);
5060 debug_insn(insn);
5061 op = insn & 0xff;
5062 r1 = (insn >> 36) & 0xf;
5063 r3 = (insn >> 32) & 0xf;
5064 b2 = (insn >> 28) & 0xf;
5065 d2 = ((int)((((insn >> 16) & 0xfff)
5066 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5067 disas_eb(s, op, r1, r3, b2, d2);
5068 break;
5069 case 0xed:
5070 insn = ld_code6(s->pc);
5071 debug_insn(insn);
5072 op = insn & 0xff;
5073 r1 = (insn >> 36) & 0xf;
5074 x2 = (insn >> 32) & 0xf;
5075 b2 = (insn >> 28) & 0xf;
5076 d2 = (short)((insn >> 16) & 0xfff);
5077 r1b = (insn >> 12) & 0xf;
5078 disas_ed(s, op, r1, x2, b2, d2, r1b);
5079 break;
5080 default:
5081 LOG_DISAS("unimplemented opcode 0x%x\n", opc);
5082 gen_illegal_opcode(s, ilc);
5083 break;
5086 /* Instruction length is encoded in the opcode */
5087 s->pc += (ilc * 2);
5090 static inline void gen_intermediate_code_internal(CPUState *env,
5091 TranslationBlock *tb,
5092 int search_pc)
5094 DisasContext dc;
5095 target_ulong pc_start;
5096 uint64_t next_page_start;
5097 uint16_t *gen_opc_end;
5098 int j, lj = -1;
5099 int num_insns, max_insns;
5100 CPUBreakpoint *bp;
5102 pc_start = tb->pc;
5104 /* 31-bit mode */
5105 if (!(tb->flags & FLAG_MASK_64)) {
5106 pc_start &= 0x7fffffff;
5109 dc.pc = pc_start;
5110 dc.is_jmp = DISAS_NEXT;
5111 dc.tb = tb;
5112 dc.cc_op = CC_OP_DYNAMIC;
5114 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5116 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5118 num_insns = 0;
5119 max_insns = tb->cflags & CF_COUNT_MASK;
5120 if (max_insns == 0) {
5121 max_insns = CF_COUNT_MASK;
5124 gen_icount_start();
5126 do {
5127 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5128 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5129 if (bp->pc == dc.pc) {
5130 gen_debug(&dc);
5131 break;
5135 if (search_pc) {
5136 j = gen_opc_ptr - gen_opc_buf;
5137 if (lj < j) {
5138 lj++;
5139 while (lj < j) {
5140 gen_opc_instr_start[lj++] = 0;
5143 gen_opc_pc[lj] = dc.pc;
5144 gen_opc_cc_op[lj] = dc.cc_op;
5145 gen_opc_instr_start[lj] = 1;
5146 gen_opc_icount[lj] = num_insns;
5148 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
5149 gen_io_start();
5151 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5152 LOG_DISAS("pc " TARGET_FMT_lx "\n",
5153 dc.pc);
5154 #endif
5155 disas_s390_insn(&dc);
5157 num_insns++;
5158 if (env->singlestep_enabled) {
5159 gen_debug(&dc);
5161 } while (!dc.is_jmp && gen_opc_ptr < gen_opc_end && dc.pc < next_page_start
5162 && num_insns < max_insns && !env->singlestep_enabled
5163 && !singlestep);
5165 if (!dc.is_jmp) {
5166 update_psw_addr(&dc);
5169 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5170 gen_op_calc_cc(&dc);
5171 } else {
5172 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5173 is in env */
5174 gen_op_set_cc_op(&dc);
5177 if (tb->cflags & CF_LAST_IO) {
5178 gen_io_end();
5180 /* Generate the return instruction */
5181 if (dc.is_jmp != DISAS_TB_JUMP) {
5182 tcg_gen_exit_tb(0);
5184 gen_icount_end(tb, num_insns);
5185 *gen_opc_ptr = INDEX_op_end;
5186 if (search_pc) {
5187 j = gen_opc_ptr - gen_opc_buf;
5188 lj++;
5189 while (lj <= j) {
5190 gen_opc_instr_start[lj++] = 0;
5192 } else {
5193 tb->size = dc.pc - pc_start;
5194 tb->icount = num_insns;
5196 #if defined(S390X_DEBUG_DISAS)
5197 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
5198 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5199 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5200 log_target_disas(pc_start, dc.pc - pc_start, 1);
5201 qemu_log("\n");
5203 #endif
5206 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
5208 gen_intermediate_code_internal(env, tb, 0);
5211 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
5213 gen_intermediate_code_internal(env, tb, 1);
5216 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5218 int cc_op;
5219 env->psw.addr = gen_opc_pc[pc_pos];
5220 cc_op = gen_opc_cc_op[pc_pos];
5221 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5222 env->cc_op = cc_op;