Can't boot from the u-boot.bin file in current directory
[qemu/mini2440.git] / target-cris / translate.c
blob6a442813ab6c320dd29fb556cdc23aafe71f9c2c
1 /*
2 * CRIS emulation for qemu: main translation routines.
4 * Copyright (c) 2008 AXIS Communications AB
5 * Written by Edgar E. Iglesias.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
23 * FIXME:
24 * The condition code translation is in need of attention.
27 #include <stdarg.h>
28 #include <stdlib.h>
29 #include <stdio.h>
30 #include <string.h>
31 #include <inttypes.h>
33 #include "cpu.h"
34 #include "exec-all.h"
35 #include "disas.h"
36 #include "tcg-op.h"
37 #include "helper.h"
38 #include "mmu.h"
39 #include "crisv32-decode.h"
40 #include "qemu-common.h"
42 #define GEN_HELPER 1
43 #include "helper.h"
45 #define DISAS_CRIS 0
46 #if DISAS_CRIS
47 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
48 #else
49 # define LOG_DIS(...) do { } while (0)
50 #endif
52 #define D(x)
53 #define BUG() (gen_BUG(dc, __FILE__, __LINE__))
54 #define BUG_ON(x) ({if (x) BUG();})
56 #define DISAS_SWI 5
58 /* Used by the decoder. */
59 #define EXTRACT_FIELD(src, start, end) \
60 (((src) >> start) & ((1 << (end - start + 1)) - 1))
62 #define CC_MASK_NZ 0xc
63 #define CC_MASK_NZV 0xe
64 #define CC_MASK_NZVC 0xf
65 #define CC_MASK_RNZV 0x10e
67 static TCGv_ptr cpu_env;
68 static TCGv cpu_R[16];
69 static TCGv cpu_PR[16];
70 static TCGv cc_x;
71 static TCGv cc_src;
72 static TCGv cc_dest;
73 static TCGv cc_result;
74 static TCGv cc_op;
75 static TCGv cc_size;
76 static TCGv cc_mask;
78 static TCGv env_btaken;
79 static TCGv env_btarget;
80 static TCGv env_pc;
82 #include "gen-icount.h"
84 /* This is the state at translation time. */
85 typedef struct DisasContext {
86 CPUState *env;
87 target_ulong pc, ppc;
89 /* Decoder. */
90 uint32_t ir;
91 uint32_t opcode;
92 unsigned int op1;
93 unsigned int op2;
94 unsigned int zsize, zzsize;
95 unsigned int mode;
96 unsigned int postinc;
98 int update_cc;
99 int cc_op;
100 int cc_size;
101 uint32_t cc_mask;
103 int cc_size_uptodate; /* -1 invalid or last written value. */
105 int cc_x_uptodate; /* 1 - ccs, 2 - known | X_FLAG. 0 not uptodate. */
106 int flags_uptodate; /* Wether or not $ccs is uptodate. */
107 int flagx_known; /* Wether or not flags_x has the x flag known at
108 translation time. */
109 int flags_x;
111 int clear_x; /* Clear x after this insn? */
112 int cpustate_changed;
113 unsigned int tb_flags; /* tb dependent flags. */
114 int is_jmp;
116 #define JMP_NOJMP 0
117 #define JMP_DIRECT 1
118 #define JMP_INDIRECT 2
119 int jmp; /* 0=nojmp, 1=direct, 2=indirect. */
120 uint32_t jmp_pc;
122 int delayed_branch;
124 struct TranslationBlock *tb;
125 int singlestep_enabled;
126 } DisasContext;
128 static void gen_BUG(DisasContext *dc, const char *file, int line)
130 printf ("BUG: pc=%x %s %d\n", dc->pc, file, line);
131 qemu_log("BUG: pc=%x %s %d\n", dc->pc, file, line);
132 cpu_abort(dc->env, "%s:%d\n", file, line);
135 static const char *regnames[] =
137 "$r0", "$r1", "$r2", "$r3",
138 "$r4", "$r5", "$r6", "$r7",
139 "$r8", "$r9", "$r10", "$r11",
140 "$r12", "$r13", "$sp", "$acr",
142 static const char *pregnames[] =
144 "$bz", "$vr", "$pid", "$srs",
145 "$wz", "$exs", "$eda", "$mof",
146 "$dz", "$ebp", "$erp", "$srp",
147 "$nrp", "$ccs", "$usp", "$spc",
150 /* We need this table to handle preg-moves with implicit width. */
151 static int preg_sizes[] = {
152 1, /* bz. */
153 1, /* vr. */
154 4, /* pid. */
155 1, /* srs. */
156 2, /* wz. */
157 4, 4, 4,
158 4, 4, 4, 4,
159 4, 4, 4, 4,
162 #define t_gen_mov_TN_env(tn, member) \
163 _t_gen_mov_TN_env((tn), offsetof(CPUState, member))
164 #define t_gen_mov_env_TN(member, tn) \
165 _t_gen_mov_env_TN(offsetof(CPUState, member), (tn))
167 static inline void t_gen_mov_TN_reg(TCGv tn, int r)
169 if (r < 0 || r > 15)
170 fprintf(stderr, "wrong register read $r%d\n", r);
171 tcg_gen_mov_tl(tn, cpu_R[r]);
173 static inline void t_gen_mov_reg_TN(int r, TCGv tn)
175 if (r < 0 || r > 15)
176 fprintf(stderr, "wrong register write $r%d\n", r);
177 tcg_gen_mov_tl(cpu_R[r], tn);
180 static inline void _t_gen_mov_TN_env(TCGv tn, int offset)
182 if (offset > sizeof (CPUState))
183 fprintf(stderr, "wrong load from env from off=%d\n", offset);
184 tcg_gen_ld_tl(tn, cpu_env, offset);
186 static inline void _t_gen_mov_env_TN(int offset, TCGv tn)
188 if (offset > sizeof (CPUState))
189 fprintf(stderr, "wrong store to env at off=%d\n", offset);
190 tcg_gen_st_tl(tn, cpu_env, offset);
193 static inline void t_gen_mov_TN_preg(TCGv tn, int r)
195 if (r < 0 || r > 15)
196 fprintf(stderr, "wrong register read $p%d\n", r);
197 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
198 tcg_gen_mov_tl(tn, tcg_const_tl(0));
199 else if (r == PR_VR)
200 tcg_gen_mov_tl(tn, tcg_const_tl(32));
201 else if (r == PR_EDA) {
202 printf("read from EDA!\n");
203 tcg_gen_mov_tl(tn, cpu_PR[r]);
205 else
206 tcg_gen_mov_tl(tn, cpu_PR[r]);
208 static inline void t_gen_mov_preg_TN(DisasContext *dc, int r, TCGv tn)
210 if (r < 0 || r > 15)
211 fprintf(stderr, "wrong register write $p%d\n", r);
212 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
213 return;
214 else if (r == PR_SRS)
215 tcg_gen_andi_tl(cpu_PR[r], tn, 3);
216 else {
217 if (r == PR_PID)
218 gen_helper_tlb_flush_pid(tn);
219 if (dc->tb_flags & S_FLAG && r == PR_SPC)
220 gen_helper_spc_write(tn);
221 else if (r == PR_CCS)
222 dc->cpustate_changed = 1;
223 tcg_gen_mov_tl(cpu_PR[r], tn);
227 static inline void t_gen_raise_exception(uint32_t index)
229 TCGv_i32 tmp = tcg_const_i32(index);
230 gen_helper_raise_exception(tmp);
231 tcg_temp_free_i32(tmp);
234 static void t_gen_lsl(TCGv d, TCGv a, TCGv b)
236 TCGv t0, t_31;
238 t0 = tcg_temp_new();
239 t_31 = tcg_const_tl(31);
240 tcg_gen_shl_tl(d, a, b);
242 tcg_gen_sub_tl(t0, t_31, b);
243 tcg_gen_sar_tl(t0, t0, t_31);
244 tcg_gen_and_tl(t0, t0, d);
245 tcg_gen_xor_tl(d, d, t0);
246 tcg_temp_free(t0);
247 tcg_temp_free(t_31);
250 static void t_gen_lsr(TCGv d, TCGv a, TCGv b)
252 TCGv t0, t_31;
254 t0 = tcg_temp_new();
255 t_31 = tcg_temp_new();
256 tcg_gen_shr_tl(d, a, b);
258 tcg_gen_movi_tl(t_31, 31);
259 tcg_gen_sub_tl(t0, t_31, b);
260 tcg_gen_sar_tl(t0, t0, t_31);
261 tcg_gen_and_tl(t0, t0, d);
262 tcg_gen_xor_tl(d, d, t0);
263 tcg_temp_free(t0);
264 tcg_temp_free(t_31);
267 static void t_gen_asr(TCGv d, TCGv a, TCGv b)
269 TCGv t0, t_31;
271 t0 = tcg_temp_new();
272 t_31 = tcg_temp_new();
273 tcg_gen_sar_tl(d, a, b);
275 tcg_gen_movi_tl(t_31, 31);
276 tcg_gen_sub_tl(t0, t_31, b);
277 tcg_gen_sar_tl(t0, t0, t_31);
278 tcg_gen_or_tl(d, d, t0);
279 tcg_temp_free(t0);
280 tcg_temp_free(t_31);
283 /* 64-bit signed mul, lower result in d and upper in d2. */
284 static void t_gen_muls(TCGv d, TCGv d2, TCGv a, TCGv b)
286 TCGv_i64 t0, t1;
288 t0 = tcg_temp_new_i64();
289 t1 = tcg_temp_new_i64();
291 tcg_gen_ext_i32_i64(t0, a);
292 tcg_gen_ext_i32_i64(t1, b);
293 tcg_gen_mul_i64(t0, t0, t1);
295 tcg_gen_trunc_i64_i32(d, t0);
296 tcg_gen_shri_i64(t0, t0, 32);
297 tcg_gen_trunc_i64_i32(d2, t0);
299 tcg_temp_free_i64(t0);
300 tcg_temp_free_i64(t1);
303 /* 64-bit unsigned muls, lower result in d and upper in d2. */
304 static void t_gen_mulu(TCGv d, TCGv d2, TCGv a, TCGv b)
306 TCGv_i64 t0, t1;
308 t0 = tcg_temp_new_i64();
309 t1 = tcg_temp_new_i64();
311 tcg_gen_extu_i32_i64(t0, a);
312 tcg_gen_extu_i32_i64(t1, b);
313 tcg_gen_mul_i64(t0, t0, t1);
315 tcg_gen_trunc_i64_i32(d, t0);
316 tcg_gen_shri_i64(t0, t0, 32);
317 tcg_gen_trunc_i64_i32(d2, t0);
319 tcg_temp_free_i64(t0);
320 tcg_temp_free_i64(t1);
323 static void t_gen_cris_dstep(TCGv d, TCGv a, TCGv b)
325 int l1;
327 l1 = gen_new_label();
330 * d <<= 1
331 * if (d >= s)
332 * d -= s;
334 tcg_gen_shli_tl(d, a, 1);
335 tcg_gen_brcond_tl(TCG_COND_LTU, d, b, l1);
336 tcg_gen_sub_tl(d, d, b);
337 gen_set_label(l1);
340 /* Extended arithmetics on CRIS. */
341 static inline void t_gen_add_flag(TCGv d, int flag)
343 TCGv c;
345 c = tcg_temp_new();
346 t_gen_mov_TN_preg(c, PR_CCS);
347 /* Propagate carry into d. */
348 tcg_gen_andi_tl(c, c, 1 << flag);
349 if (flag)
350 tcg_gen_shri_tl(c, c, flag);
351 tcg_gen_add_tl(d, d, c);
352 tcg_temp_free(c);
355 static inline void t_gen_addx_carry(DisasContext *dc, TCGv d)
357 if (dc->flagx_known) {
358 if (dc->flags_x) {
359 TCGv c;
361 c = tcg_temp_new();
362 t_gen_mov_TN_preg(c, PR_CCS);
363 /* C flag is already at bit 0. */
364 tcg_gen_andi_tl(c, c, C_FLAG);
365 tcg_gen_add_tl(d, d, c);
366 tcg_temp_free(c);
368 } else {
369 TCGv x, c;
371 x = tcg_temp_new();
372 c = tcg_temp_new();
373 t_gen_mov_TN_preg(x, PR_CCS);
374 tcg_gen_mov_tl(c, x);
376 /* Propagate carry into d if X is set. Branch free. */
377 tcg_gen_andi_tl(c, c, C_FLAG);
378 tcg_gen_andi_tl(x, x, X_FLAG);
379 tcg_gen_shri_tl(x, x, 4);
381 tcg_gen_and_tl(x, x, c);
382 tcg_gen_add_tl(d, d, x);
383 tcg_temp_free(x);
384 tcg_temp_free(c);
388 static inline void t_gen_subx_carry(DisasContext *dc, TCGv d)
390 if (dc->flagx_known) {
391 if (dc->flags_x) {
392 TCGv c;
394 c = tcg_temp_new();
395 t_gen_mov_TN_preg(c, PR_CCS);
396 /* C flag is already at bit 0. */
397 tcg_gen_andi_tl(c, c, C_FLAG);
398 tcg_gen_sub_tl(d, d, c);
399 tcg_temp_free(c);
401 } else {
402 TCGv x, c;
404 x = tcg_temp_new();
405 c = tcg_temp_new();
406 t_gen_mov_TN_preg(x, PR_CCS);
407 tcg_gen_mov_tl(c, x);
409 /* Propagate carry into d if X is set. Branch free. */
410 tcg_gen_andi_tl(c, c, C_FLAG);
411 tcg_gen_andi_tl(x, x, X_FLAG);
412 tcg_gen_shri_tl(x, x, 4);
414 tcg_gen_and_tl(x, x, c);
415 tcg_gen_sub_tl(d, d, x);
416 tcg_temp_free(x);
417 tcg_temp_free(c);
421 /* Swap the two bytes within each half word of the s operand.
422 T0 = ((T0 << 8) & 0xff00ff00) | ((T0 >> 8) & 0x00ff00ff) */
423 static inline void t_gen_swapb(TCGv d, TCGv s)
425 TCGv t, org_s;
427 t = tcg_temp_new();
428 org_s = tcg_temp_new();
430 /* d and s may refer to the same object. */
431 tcg_gen_mov_tl(org_s, s);
432 tcg_gen_shli_tl(t, org_s, 8);
433 tcg_gen_andi_tl(d, t, 0xff00ff00);
434 tcg_gen_shri_tl(t, org_s, 8);
435 tcg_gen_andi_tl(t, t, 0x00ff00ff);
436 tcg_gen_or_tl(d, d, t);
437 tcg_temp_free(t);
438 tcg_temp_free(org_s);
441 /* Swap the halfwords of the s operand. */
442 static inline void t_gen_swapw(TCGv d, TCGv s)
444 TCGv t;
445 /* d and s refer the same object. */
446 t = tcg_temp_new();
447 tcg_gen_mov_tl(t, s);
448 tcg_gen_shli_tl(d, t, 16);
449 tcg_gen_shri_tl(t, t, 16);
450 tcg_gen_or_tl(d, d, t);
451 tcg_temp_free(t);
454 /* Reverse the within each byte.
455 T0 = (((T0 << 7) & 0x80808080) |
456 ((T0 << 5) & 0x40404040) |
457 ((T0 << 3) & 0x20202020) |
458 ((T0 << 1) & 0x10101010) |
459 ((T0 >> 1) & 0x08080808) |
460 ((T0 >> 3) & 0x04040404) |
461 ((T0 >> 5) & 0x02020202) |
462 ((T0 >> 7) & 0x01010101));
464 static inline void t_gen_swapr(TCGv d, TCGv s)
466 struct {
467 int shift; /* LSL when positive, LSR when negative. */
468 uint32_t mask;
469 } bitrev [] = {
470 {7, 0x80808080},
471 {5, 0x40404040},
472 {3, 0x20202020},
473 {1, 0x10101010},
474 {-1, 0x08080808},
475 {-3, 0x04040404},
476 {-5, 0x02020202},
477 {-7, 0x01010101}
479 int i;
480 TCGv t, org_s;
482 /* d and s refer the same object. */
483 t = tcg_temp_new();
484 org_s = tcg_temp_new();
485 tcg_gen_mov_tl(org_s, s);
487 tcg_gen_shli_tl(t, org_s, bitrev[0].shift);
488 tcg_gen_andi_tl(d, t, bitrev[0].mask);
489 for (i = 1; i < ARRAY_SIZE(bitrev); i++) {
490 if (bitrev[i].shift >= 0) {
491 tcg_gen_shli_tl(t, org_s, bitrev[i].shift);
492 } else {
493 tcg_gen_shri_tl(t, org_s, -bitrev[i].shift);
495 tcg_gen_andi_tl(t, t, bitrev[i].mask);
496 tcg_gen_or_tl(d, d, t);
498 tcg_temp_free(t);
499 tcg_temp_free(org_s);
502 static void t_gen_cc_jmp(TCGv pc_true, TCGv pc_false)
504 TCGv btaken;
505 int l1;
507 l1 = gen_new_label();
508 btaken = tcg_temp_new();
510 /* Conditional jmp. */
511 tcg_gen_mov_tl(btaken, env_btaken);
512 tcg_gen_mov_tl(env_pc, pc_false);
513 tcg_gen_brcondi_tl(TCG_COND_EQ, btaken, 0, l1);
514 tcg_gen_mov_tl(env_pc, pc_true);
515 gen_set_label(l1);
517 tcg_temp_free(btaken);
520 static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
522 TranslationBlock *tb;
523 tb = dc->tb;
524 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
525 tcg_gen_goto_tb(n);
526 tcg_gen_movi_tl(env_pc, dest);
527 tcg_gen_exit_tb((long)tb + n);
528 } else {
529 tcg_gen_movi_tl(env_pc, dest);
530 tcg_gen_exit_tb(0);
534 /* Sign extend at translation time. */
535 static int sign_extend(unsigned int val, unsigned int width)
537 int sval;
539 /* LSL. */
540 val <<= 31 - width;
541 sval = val;
542 /* ASR. */
543 sval >>= 31 - width;
544 return sval;
547 static inline void cris_clear_x_flag(DisasContext *dc)
549 if (dc->flagx_known && dc->flags_x)
550 dc->flags_uptodate = 0;
552 dc->flagx_known = 1;
553 dc->flags_x = 0;
556 static void cris_flush_cc_state(DisasContext *dc)
558 if (dc->cc_size_uptodate != dc->cc_size) {
559 tcg_gen_movi_tl(cc_size, dc->cc_size);
560 dc->cc_size_uptodate = dc->cc_size;
562 tcg_gen_movi_tl(cc_op, dc->cc_op);
563 tcg_gen_movi_tl(cc_mask, dc->cc_mask);
566 static void cris_evaluate_flags(DisasContext *dc)
568 if (dc->flags_uptodate)
569 return;
571 cris_flush_cc_state(dc);
573 switch (dc->cc_op)
575 case CC_OP_MCP:
576 gen_helper_evaluate_flags_mcp(cpu_PR[PR_CCS],
577 cpu_PR[PR_CCS], cc_src,
578 cc_dest, cc_result);
579 break;
580 case CC_OP_MULS:
581 gen_helper_evaluate_flags_muls(cpu_PR[PR_CCS],
582 cpu_PR[PR_CCS], cc_result,
583 cpu_PR[PR_MOF]);
584 break;
585 case CC_OP_MULU:
586 gen_helper_evaluate_flags_mulu(cpu_PR[PR_CCS],
587 cpu_PR[PR_CCS], cc_result,
588 cpu_PR[PR_MOF]);
589 break;
590 case CC_OP_MOVE:
591 case CC_OP_AND:
592 case CC_OP_OR:
593 case CC_OP_XOR:
594 case CC_OP_ASR:
595 case CC_OP_LSR:
596 case CC_OP_LSL:
597 switch (dc->cc_size)
599 case 4:
600 gen_helper_evaluate_flags_move_4(cpu_PR[PR_CCS],
601 cpu_PR[PR_CCS], cc_result);
602 break;
603 case 2:
604 gen_helper_evaluate_flags_move_2(cpu_PR[PR_CCS],
605 cpu_PR[PR_CCS], cc_result);
606 break;
607 default:
608 gen_helper_evaluate_flags();
609 break;
611 break;
612 case CC_OP_FLAGS:
613 /* live. */
614 break;
615 case CC_OP_SUB:
616 case CC_OP_CMP:
617 if (dc->cc_size == 4)
618 gen_helper_evaluate_flags_sub_4(cpu_PR[PR_CCS],
619 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
620 else
621 gen_helper_evaluate_flags();
623 break;
624 default:
625 switch (dc->cc_size)
627 case 4:
628 gen_helper_evaluate_flags_alu_4(cpu_PR[PR_CCS],
629 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
630 break;
631 default:
632 gen_helper_evaluate_flags();
633 break;
635 break;
638 if (dc->flagx_known) {
639 if (dc->flags_x)
640 tcg_gen_ori_tl(cpu_PR[PR_CCS],
641 cpu_PR[PR_CCS], X_FLAG);
642 else
643 tcg_gen_andi_tl(cpu_PR[PR_CCS],
644 cpu_PR[PR_CCS], ~X_FLAG);
646 dc->flags_uptodate = 1;
649 static void cris_cc_mask(DisasContext *dc, unsigned int mask)
651 uint32_t ovl;
653 if (!mask) {
654 dc->update_cc = 0;
655 return;
658 /* Check if we need to evaluate the condition codes due to
659 CC overlaying. */
660 ovl = (dc->cc_mask ^ mask) & ~mask;
661 if (ovl) {
662 /* TODO: optimize this case. It trigs all the time. */
663 cris_evaluate_flags (dc);
665 dc->cc_mask = mask;
666 dc->update_cc = 1;
669 static void cris_update_cc_op(DisasContext *dc, int op, int size)
671 dc->cc_op = op;
672 dc->cc_size = size;
673 dc->flags_uptodate = 0;
676 static inline void cris_update_cc_x(DisasContext *dc)
678 /* Save the x flag state at the time of the cc snapshot. */
679 if (dc->flagx_known) {
680 if (dc->cc_x_uptodate == (2 | dc->flags_x))
681 return;
682 tcg_gen_movi_tl(cc_x, dc->flags_x);
683 dc->cc_x_uptodate = 2 | dc->flags_x;
685 else {
686 tcg_gen_andi_tl(cc_x, cpu_PR[PR_CCS], X_FLAG);
687 dc->cc_x_uptodate = 1;
691 /* Update cc prior to executing ALU op. Needs source operands untouched. */
692 static void cris_pre_alu_update_cc(DisasContext *dc, int op,
693 TCGv dst, TCGv src, int size)
695 if (dc->update_cc) {
696 cris_update_cc_op(dc, op, size);
697 tcg_gen_mov_tl(cc_src, src);
699 if (op != CC_OP_MOVE
700 && op != CC_OP_AND
701 && op != CC_OP_OR
702 && op != CC_OP_XOR
703 && op != CC_OP_ASR
704 && op != CC_OP_LSR
705 && op != CC_OP_LSL)
706 tcg_gen_mov_tl(cc_dest, dst);
708 cris_update_cc_x(dc);
712 /* Update cc after executing ALU op. needs the result. */
713 static inline void cris_update_result(DisasContext *dc, TCGv res)
715 if (dc->update_cc)
716 tcg_gen_mov_tl(cc_result, res);
719 /* Returns one if the write back stage should execute. */
720 static void cris_alu_op_exec(DisasContext *dc, int op,
721 TCGv dst, TCGv a, TCGv b, int size)
723 /* Emit the ALU insns. */
724 switch (op)
726 case CC_OP_ADD:
727 tcg_gen_add_tl(dst, a, b);
728 /* Extended arithmetics. */
729 t_gen_addx_carry(dc, dst);
730 break;
731 case CC_OP_ADDC:
732 tcg_gen_add_tl(dst, a, b);
733 t_gen_add_flag(dst, 0); /* C_FLAG. */
734 break;
735 case CC_OP_MCP:
736 tcg_gen_add_tl(dst, a, b);
737 t_gen_add_flag(dst, 8); /* R_FLAG. */
738 break;
739 case CC_OP_SUB:
740 tcg_gen_sub_tl(dst, a, b);
741 /* Extended arithmetics. */
742 t_gen_subx_carry(dc, dst);
743 break;
744 case CC_OP_MOVE:
745 tcg_gen_mov_tl(dst, b);
746 break;
747 case CC_OP_OR:
748 tcg_gen_or_tl(dst, a, b);
749 break;
750 case CC_OP_AND:
751 tcg_gen_and_tl(dst, a, b);
752 break;
753 case CC_OP_XOR:
754 tcg_gen_xor_tl(dst, a, b);
755 break;
756 case CC_OP_LSL:
757 t_gen_lsl(dst, a, b);
758 break;
759 case CC_OP_LSR:
760 t_gen_lsr(dst, a, b);
761 break;
762 case CC_OP_ASR:
763 t_gen_asr(dst, a, b);
764 break;
765 case CC_OP_NEG:
766 tcg_gen_neg_tl(dst, b);
767 /* Extended arithmetics. */
768 t_gen_subx_carry(dc, dst);
769 break;
770 case CC_OP_LZ:
771 gen_helper_lz(dst, b);
772 break;
773 case CC_OP_MULS:
774 t_gen_muls(dst, cpu_PR[PR_MOF], a, b);
775 break;
776 case CC_OP_MULU:
777 t_gen_mulu(dst, cpu_PR[PR_MOF], a, b);
778 break;
779 case CC_OP_DSTEP:
780 t_gen_cris_dstep(dst, a, b);
781 break;
782 case CC_OP_BOUND:
784 int l1;
785 l1 = gen_new_label();
786 tcg_gen_mov_tl(dst, a);
787 tcg_gen_brcond_tl(TCG_COND_LEU, a, b, l1);
788 tcg_gen_mov_tl(dst, b);
789 gen_set_label(l1);
791 break;
792 case CC_OP_CMP:
793 tcg_gen_sub_tl(dst, a, b);
794 /* Extended arithmetics. */
795 t_gen_subx_carry(dc, dst);
796 break;
797 default:
798 qemu_log("illegal ALU op.\n");
799 BUG();
800 break;
803 if (size == 1)
804 tcg_gen_andi_tl(dst, dst, 0xff);
805 else if (size == 2)
806 tcg_gen_andi_tl(dst, dst, 0xffff);
809 static void cris_alu(DisasContext *dc, int op,
810 TCGv d, TCGv op_a, TCGv op_b, int size)
812 TCGv tmp;
813 int writeback;
815 writeback = 1;
817 if (op == CC_OP_CMP) {
818 tmp = tcg_temp_new();
819 writeback = 0;
820 } else if (size == 4) {
821 tmp = d;
822 writeback = 0;
823 } else
824 tmp = tcg_temp_new();
827 cris_pre_alu_update_cc(dc, op, op_a, op_b, size);
828 cris_alu_op_exec(dc, op, tmp, op_a, op_b, size);
829 cris_update_result(dc, tmp);
831 /* Writeback. */
832 if (writeback) {
833 if (size == 1)
834 tcg_gen_andi_tl(d, d, ~0xff);
835 else
836 tcg_gen_andi_tl(d, d, ~0xffff);
837 tcg_gen_or_tl(d, d, tmp);
839 if (!TCGV_EQUAL(tmp, d))
840 tcg_temp_free(tmp);
843 static int arith_cc(DisasContext *dc)
845 if (dc->update_cc) {
846 switch (dc->cc_op) {
847 case CC_OP_ADDC: return 1;
848 case CC_OP_ADD: return 1;
849 case CC_OP_SUB: return 1;
850 case CC_OP_DSTEP: return 1;
851 case CC_OP_LSL: return 1;
852 case CC_OP_LSR: return 1;
853 case CC_OP_ASR: return 1;
854 case CC_OP_CMP: return 1;
855 case CC_OP_NEG: return 1;
856 case CC_OP_OR: return 1;
857 case CC_OP_AND: return 1;
858 case CC_OP_XOR: return 1;
859 case CC_OP_MULU: return 1;
860 case CC_OP_MULS: return 1;
861 default:
862 return 0;
865 return 0;
868 static void gen_tst_cc (DisasContext *dc, TCGv cc, int cond)
870 int arith_opt, move_opt;
872 /* TODO: optimize more condition codes. */
875 * If the flags are live, we've gotta look into the bits of CCS.
876 * Otherwise, if we just did an arithmetic operation we try to
877 * evaluate the condition code faster.
879 * When this function is done, T0 should be non-zero if the condition
880 * code is true.
882 arith_opt = arith_cc(dc) && !dc->flags_uptodate;
883 move_opt = (dc->cc_op == CC_OP_MOVE);
884 switch (cond) {
885 case CC_EQ:
886 if (arith_opt || move_opt) {
887 /* If cc_result is zero, T0 should be
888 non-zero otherwise T0 should be zero. */
889 int l1;
890 l1 = gen_new_label();
891 tcg_gen_movi_tl(cc, 0);
892 tcg_gen_brcondi_tl(TCG_COND_NE, cc_result,
893 0, l1);
894 tcg_gen_movi_tl(cc, 1);
895 gen_set_label(l1);
897 else {
898 cris_evaluate_flags(dc);
899 tcg_gen_andi_tl(cc,
900 cpu_PR[PR_CCS], Z_FLAG);
902 break;
903 case CC_NE:
904 if (arith_opt || move_opt)
905 tcg_gen_mov_tl(cc, cc_result);
906 else {
907 cris_evaluate_flags(dc);
908 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
909 Z_FLAG);
910 tcg_gen_andi_tl(cc, cc, Z_FLAG);
912 break;
913 case CC_CS:
914 cris_evaluate_flags(dc);
915 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], C_FLAG);
916 break;
917 case CC_CC:
918 cris_evaluate_flags(dc);
919 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS], C_FLAG);
920 tcg_gen_andi_tl(cc, cc, C_FLAG);
921 break;
922 case CC_VS:
923 cris_evaluate_flags(dc);
924 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], V_FLAG);
925 break;
926 case CC_VC:
927 cris_evaluate_flags(dc);
928 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
929 V_FLAG);
930 tcg_gen_andi_tl(cc, cc, V_FLAG);
931 break;
932 case CC_PL:
933 if (arith_opt || move_opt) {
934 int bits = 31;
936 if (dc->cc_size == 1)
937 bits = 7;
938 else if (dc->cc_size == 2)
939 bits = 15;
941 tcg_gen_shri_tl(cc, cc_result, bits);
942 tcg_gen_xori_tl(cc, cc, 1);
943 } else {
944 cris_evaluate_flags(dc);
945 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
946 N_FLAG);
947 tcg_gen_andi_tl(cc, cc, N_FLAG);
949 break;
950 case CC_MI:
951 if (arith_opt || move_opt) {
952 int bits = 31;
954 if (dc->cc_size == 1)
955 bits = 7;
956 else if (dc->cc_size == 2)
957 bits = 15;
959 tcg_gen_shri_tl(cc, cc_result, bits);
960 tcg_gen_andi_tl(cc, cc, 1);
962 else {
963 cris_evaluate_flags(dc);
964 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
965 N_FLAG);
967 break;
968 case CC_LS:
969 cris_evaluate_flags(dc);
970 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
971 C_FLAG | Z_FLAG);
972 break;
973 case CC_HI:
974 cris_evaluate_flags(dc);
976 TCGv tmp;
978 tmp = tcg_temp_new();
979 tcg_gen_xori_tl(tmp, cpu_PR[PR_CCS],
980 C_FLAG | Z_FLAG);
981 /* Overlay the C flag on top of the Z. */
982 tcg_gen_shli_tl(cc, tmp, 2);
983 tcg_gen_and_tl(cc, tmp, cc);
984 tcg_gen_andi_tl(cc, cc, Z_FLAG);
986 tcg_temp_free(tmp);
988 break;
989 case CC_GE:
990 cris_evaluate_flags(dc);
991 /* Overlay the V flag on top of the N. */
992 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
993 tcg_gen_xor_tl(cc,
994 cpu_PR[PR_CCS], cc);
995 tcg_gen_andi_tl(cc, cc, N_FLAG);
996 tcg_gen_xori_tl(cc, cc, N_FLAG);
997 break;
998 case CC_LT:
999 cris_evaluate_flags(dc);
1000 /* Overlay the V flag on top of the N. */
1001 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
1002 tcg_gen_xor_tl(cc,
1003 cpu_PR[PR_CCS], cc);
1004 tcg_gen_andi_tl(cc, cc, N_FLAG);
1005 break;
1006 case CC_GT:
1007 cris_evaluate_flags(dc);
1009 TCGv n, z;
1011 n = tcg_temp_new();
1012 z = tcg_temp_new();
1014 /* To avoid a shift we overlay everything on
1015 the V flag. */
1016 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1017 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1018 /* invert Z. */
1019 tcg_gen_xori_tl(z, z, 2);
1021 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1022 tcg_gen_xori_tl(n, n, 2);
1023 tcg_gen_and_tl(cc, z, n);
1024 tcg_gen_andi_tl(cc, cc, 2);
1026 tcg_temp_free(n);
1027 tcg_temp_free(z);
1029 break;
1030 case CC_LE:
1031 cris_evaluate_flags(dc);
1033 TCGv n, z;
1035 n = tcg_temp_new();
1036 z = tcg_temp_new();
1038 /* To avoid a shift we overlay everything on
1039 the V flag. */
1040 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1041 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1043 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1044 tcg_gen_or_tl(cc, z, n);
1045 tcg_gen_andi_tl(cc, cc, 2);
1047 tcg_temp_free(n);
1048 tcg_temp_free(z);
1050 break;
1051 case CC_P:
1052 cris_evaluate_flags(dc);
1053 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], P_FLAG);
1054 break;
1055 case CC_A:
1056 tcg_gen_movi_tl(cc, 1);
1057 break;
1058 default:
1059 BUG();
1060 break;
1064 static void cris_store_direct_jmp(DisasContext *dc)
1066 /* Store the direct jmp state into the cpu-state. */
1067 if (dc->jmp == JMP_DIRECT) {
1068 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1069 tcg_gen_movi_tl(env_btaken, 1);
1073 static void cris_prepare_cc_branch (DisasContext *dc,
1074 int offset, int cond)
1076 /* This helps us re-schedule the micro-code to insns in delay-slots
1077 before the actual jump. */
1078 dc->delayed_branch = 2;
1079 dc->jmp_pc = dc->pc + offset;
1081 if (cond != CC_A)
1083 dc->jmp = JMP_INDIRECT;
1084 gen_tst_cc (dc, env_btaken, cond);
1085 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1086 } else {
1087 /* Allow chaining. */
1088 dc->jmp = JMP_DIRECT;
1093 /* jumps, when the dest is in a live reg for example. Direct should be set
1094 when the dest addr is constant to allow tb chaining. */
1095 static inline void cris_prepare_jmp (DisasContext *dc, unsigned int type)
1097 /* This helps us re-schedule the micro-code to insns in delay-slots
1098 before the actual jump. */
1099 dc->delayed_branch = 2;
1100 dc->jmp = type;
1101 if (type == JMP_INDIRECT)
1102 tcg_gen_movi_tl(env_btaken, 1);
1105 static void gen_load64(DisasContext *dc, TCGv_i64 dst, TCGv addr)
1107 int mem_index = cpu_mmu_index(dc->env);
1109 /* If we get a fault on a delayslot we must keep the jmp state in
1110 the cpu-state to be able to re-execute the jmp. */
1111 if (dc->delayed_branch == 1)
1112 cris_store_direct_jmp(dc);
1114 tcg_gen_qemu_ld64(dst, addr, mem_index);
1117 static void gen_load(DisasContext *dc, TCGv dst, TCGv addr,
1118 unsigned int size, int sign)
1120 int mem_index = cpu_mmu_index(dc->env);
1122 /* If we get a fault on a delayslot we must keep the jmp state in
1123 the cpu-state to be able to re-execute the jmp. */
1124 if (dc->delayed_branch == 1)
1125 cris_store_direct_jmp(dc);
1127 if (size == 1) {
1128 if (sign)
1129 tcg_gen_qemu_ld8s(dst, addr, mem_index);
1130 else
1131 tcg_gen_qemu_ld8u(dst, addr, mem_index);
1133 else if (size == 2) {
1134 if (sign)
1135 tcg_gen_qemu_ld16s(dst, addr, mem_index);
1136 else
1137 tcg_gen_qemu_ld16u(dst, addr, mem_index);
1139 else if (size == 4) {
1140 tcg_gen_qemu_ld32u(dst, addr, mem_index);
1142 else {
1143 abort();
1147 static void gen_store (DisasContext *dc, TCGv addr, TCGv val,
1148 unsigned int size)
1150 int mem_index = cpu_mmu_index(dc->env);
1152 /* If we get a fault on a delayslot we must keep the jmp state in
1153 the cpu-state to be able to re-execute the jmp. */
1154 if (dc->delayed_branch == 1)
1155 cris_store_direct_jmp(dc);
1158 /* Conditional writes. We only support the kind were X and P are known
1159 at translation time. */
1160 if (dc->flagx_known && dc->flags_x && (dc->tb_flags & P_FLAG)) {
1161 dc->postinc = 0;
1162 cris_evaluate_flags(dc);
1163 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], C_FLAG);
1164 return;
1167 if (size == 1)
1168 tcg_gen_qemu_st8(val, addr, mem_index);
1169 else if (size == 2)
1170 tcg_gen_qemu_st16(val, addr, mem_index);
1171 else
1172 tcg_gen_qemu_st32(val, addr, mem_index);
1174 if (dc->flagx_known && dc->flags_x) {
1175 cris_evaluate_flags(dc);
1176 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~C_FLAG);
1180 static inline void t_gen_sext(TCGv d, TCGv s, int size)
1182 if (size == 1)
1183 tcg_gen_ext8s_i32(d, s);
1184 else if (size == 2)
1185 tcg_gen_ext16s_i32(d, s);
1186 else if(!TCGV_EQUAL(d, s))
1187 tcg_gen_mov_tl(d, s);
1190 static inline void t_gen_zext(TCGv d, TCGv s, int size)
1192 if (size == 1)
1193 tcg_gen_ext8u_i32(d, s);
1194 else if (size == 2)
1195 tcg_gen_ext16u_i32(d, s);
1196 else if (!TCGV_EQUAL(d, s))
1197 tcg_gen_mov_tl(d, s);
1200 #if DISAS_CRIS
1201 static char memsize_char(int size)
1203 switch (size)
1205 case 1: return 'b'; break;
1206 case 2: return 'w'; break;
1207 case 4: return 'd'; break;
1208 default:
1209 return 'x';
1210 break;
1213 #endif
1215 static inline unsigned int memsize_z(DisasContext *dc)
1217 return dc->zsize + 1;
1220 static inline unsigned int memsize_zz(DisasContext *dc)
1222 switch (dc->zzsize)
1224 case 0: return 1;
1225 case 1: return 2;
1226 default:
1227 return 4;
1231 static inline void do_postinc (DisasContext *dc, int size)
1233 if (dc->postinc)
1234 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], size);
1237 static inline void dec_prep_move_r(DisasContext *dc, int rs, int rd,
1238 int size, int s_ext, TCGv dst)
1240 if (s_ext)
1241 t_gen_sext(dst, cpu_R[rs], size);
1242 else
1243 t_gen_zext(dst, cpu_R[rs], size);
1246 /* Prepare T0 and T1 for a register alu operation.
1247 s_ext decides if the operand1 should be sign-extended or zero-extended when
1248 needed. */
1249 static void dec_prep_alu_r(DisasContext *dc, int rs, int rd,
1250 int size, int s_ext, TCGv dst, TCGv src)
1252 dec_prep_move_r(dc, rs, rd, size, s_ext, src);
1254 if (s_ext)
1255 t_gen_sext(dst, cpu_R[rd], size);
1256 else
1257 t_gen_zext(dst, cpu_R[rd], size);
1260 static int dec_prep_move_m(DisasContext *dc, int s_ext, int memsize,
1261 TCGv dst)
1263 unsigned int rs, rd;
1264 uint32_t imm;
1265 int is_imm;
1266 int insn_len = 2;
1268 rs = dc->op1;
1269 rd = dc->op2;
1270 is_imm = rs == 15 && dc->postinc;
1272 /* Load [$rs] onto T1. */
1273 if (is_imm) {
1274 insn_len = 2 + memsize;
1275 if (memsize == 1)
1276 insn_len++;
1278 if (memsize != 4) {
1279 if (s_ext) {
1280 if (memsize == 1)
1281 imm = ldsb_code(dc->pc + 2);
1282 else
1283 imm = ldsw_code(dc->pc + 2);
1284 } else {
1285 if (memsize == 1)
1286 imm = ldub_code(dc->pc + 2);
1287 else
1288 imm = lduw_code(dc->pc + 2);
1290 } else
1291 imm = ldl_code(dc->pc + 2);
1293 tcg_gen_movi_tl(dst, imm);
1294 dc->postinc = 0;
1295 } else {
1296 cris_flush_cc_state(dc);
1297 gen_load(dc, dst, cpu_R[rs], memsize, 0);
1298 if (s_ext)
1299 t_gen_sext(dst, dst, memsize);
1300 else
1301 t_gen_zext(dst, dst, memsize);
1303 return insn_len;
1306 /* Prepare T0 and T1 for a memory + alu operation.
1307 s_ext decides if the operand1 should be sign-extended or zero-extended when
1308 needed. */
1309 static int dec_prep_alu_m(DisasContext *dc, int s_ext, int memsize,
1310 TCGv dst, TCGv src)
1312 int insn_len;
1314 insn_len = dec_prep_move_m(dc, s_ext, memsize, src);
1315 tcg_gen_mov_tl(dst, cpu_R[dc->op2]);
1316 return insn_len;
1319 #if DISAS_CRIS
1320 static const char *cc_name(int cc)
1322 static const char *cc_names[16] = {
1323 "cc", "cs", "ne", "eq", "vc", "vs", "pl", "mi",
1324 "ls", "hi", "ge", "lt", "gt", "le", "a", "p"
1326 assert(cc < 16);
1327 return cc_names[cc];
1329 #endif
1331 /* Start of insn decoders. */
1333 static unsigned int dec_bccq(DisasContext *dc)
1335 int32_t offset;
1336 int sign;
1337 uint32_t cond = dc->op2;
1338 int tmp;
1340 offset = EXTRACT_FIELD (dc->ir, 1, 7);
1341 sign = EXTRACT_FIELD(dc->ir, 0, 0);
1343 offset *= 2;
1344 offset |= sign << 8;
1345 tmp = offset;
1346 offset = sign_extend(offset, 8);
1348 LOG_DIS("b%s %x\n", cc_name(cond), dc->pc + offset);
1350 /* op2 holds the condition-code. */
1351 cris_cc_mask(dc, 0);
1352 cris_prepare_cc_branch (dc, offset, cond);
1353 return 2;
1355 static unsigned int dec_addoq(DisasContext *dc)
1357 int32_t imm;
1359 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 7);
1360 imm = sign_extend(dc->op1, 7);
1362 LOG_DIS("addoq %d, $r%u\n", imm, dc->op2);
1363 cris_cc_mask(dc, 0);
1364 /* Fetch register operand, */
1365 tcg_gen_addi_tl(cpu_R[R_ACR], cpu_R[dc->op2], imm);
1367 return 2;
1369 static unsigned int dec_addq(DisasContext *dc)
1371 LOG_DIS("addq %u, $r%u\n", dc->op1, dc->op2);
1373 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1375 cris_cc_mask(dc, CC_MASK_NZVC);
1377 cris_alu(dc, CC_OP_ADD,
1378 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1379 return 2;
1381 static unsigned int dec_moveq(DisasContext *dc)
1383 uint32_t imm;
1385 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1386 imm = sign_extend(dc->op1, 5);
1387 LOG_DIS("moveq %d, $r%u\n", imm, dc->op2);
1389 tcg_gen_mov_tl(cpu_R[dc->op2], tcg_const_tl(imm));
1390 return 2;
1392 static unsigned int dec_subq(DisasContext *dc)
1394 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1396 LOG_DIS("subq %u, $r%u\n", dc->op1, dc->op2);
1398 cris_cc_mask(dc, CC_MASK_NZVC);
1399 cris_alu(dc, CC_OP_SUB,
1400 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1401 return 2;
1403 static unsigned int dec_cmpq(DisasContext *dc)
1405 uint32_t imm;
1406 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1407 imm = sign_extend(dc->op1, 5);
1409 LOG_DIS("cmpq %d, $r%d\n", imm, dc->op2);
1410 cris_cc_mask(dc, CC_MASK_NZVC);
1412 cris_alu(dc, CC_OP_CMP,
1413 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1414 return 2;
1416 static unsigned int dec_andq(DisasContext *dc)
1418 uint32_t imm;
1419 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1420 imm = sign_extend(dc->op1, 5);
1422 LOG_DIS("andq %d, $r%d\n", imm, dc->op2);
1423 cris_cc_mask(dc, CC_MASK_NZ);
1425 cris_alu(dc, CC_OP_AND,
1426 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1427 return 2;
1429 static unsigned int dec_orq(DisasContext *dc)
1431 uint32_t imm;
1432 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1433 imm = sign_extend(dc->op1, 5);
1434 LOG_DIS("orq %d, $r%d\n", imm, dc->op2);
1435 cris_cc_mask(dc, CC_MASK_NZ);
1437 cris_alu(dc, CC_OP_OR,
1438 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1439 return 2;
1441 static unsigned int dec_btstq(DisasContext *dc)
1443 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1444 LOG_DIS("btstq %u, $r%d\n", dc->op1, dc->op2);
1446 cris_cc_mask(dc, CC_MASK_NZ);
1447 cris_evaluate_flags(dc);
1448 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1449 tcg_const_tl(dc->op1), cpu_PR[PR_CCS]);
1450 cris_alu(dc, CC_OP_MOVE,
1451 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1452 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1453 dc->flags_uptodate = 1;
1454 return 2;
1456 static unsigned int dec_asrq(DisasContext *dc)
1458 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1459 LOG_DIS("asrq %u, $r%d\n", dc->op1, dc->op2);
1460 cris_cc_mask(dc, CC_MASK_NZ);
1462 tcg_gen_sari_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1463 cris_alu(dc, CC_OP_MOVE,
1464 cpu_R[dc->op2],
1465 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1466 return 2;
1468 static unsigned int dec_lslq(DisasContext *dc)
1470 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1471 LOG_DIS("lslq %u, $r%d\n", dc->op1, dc->op2);
1473 cris_cc_mask(dc, CC_MASK_NZ);
1475 tcg_gen_shli_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1477 cris_alu(dc, CC_OP_MOVE,
1478 cpu_R[dc->op2],
1479 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1480 return 2;
1482 static unsigned int dec_lsrq(DisasContext *dc)
1484 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1485 LOG_DIS("lsrq %u, $r%d\n", dc->op1, dc->op2);
1487 cris_cc_mask(dc, CC_MASK_NZ);
1489 tcg_gen_shri_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1490 cris_alu(dc, CC_OP_MOVE,
1491 cpu_R[dc->op2],
1492 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1493 return 2;
1496 static unsigned int dec_move_r(DisasContext *dc)
1498 int size = memsize_zz(dc);
1500 LOG_DIS("move.%c $r%u, $r%u\n",
1501 memsize_char(size), dc->op1, dc->op2);
1503 cris_cc_mask(dc, CC_MASK_NZ);
1504 if (size == 4) {
1505 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, cpu_R[dc->op2]);
1506 cris_cc_mask(dc, CC_MASK_NZ);
1507 cris_update_cc_op(dc, CC_OP_MOVE, 4);
1508 cris_update_cc_x(dc);
1509 cris_update_result(dc, cpu_R[dc->op2]);
1511 else {
1512 TCGv t0;
1514 t0 = tcg_temp_new();
1515 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1516 cris_alu(dc, CC_OP_MOVE,
1517 cpu_R[dc->op2],
1518 cpu_R[dc->op2], t0, size);
1519 tcg_temp_free(t0);
1521 return 2;
1524 static unsigned int dec_scc_r(DisasContext *dc)
1526 int cond = dc->op2;
1528 LOG_DIS("s%s $r%u\n",
1529 cc_name(cond), dc->op1);
1531 if (cond != CC_A)
1533 int l1;
1535 gen_tst_cc (dc, cpu_R[dc->op1], cond);
1536 l1 = gen_new_label();
1537 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_R[dc->op1], 0, l1);
1538 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1539 gen_set_label(l1);
1541 else
1542 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1544 cris_cc_mask(dc, 0);
1545 return 2;
1548 static inline void cris_alu_alloc_temps(DisasContext *dc, int size, TCGv *t)
1550 if (size == 4) {
1551 t[0] = cpu_R[dc->op2];
1552 t[1] = cpu_R[dc->op1];
1553 } else {
1554 t[0] = tcg_temp_new();
1555 t[1] = tcg_temp_new();
1559 static inline void cris_alu_free_temps(DisasContext *dc, int size, TCGv *t)
1561 if (size != 4) {
1562 tcg_temp_free(t[0]);
1563 tcg_temp_free(t[1]);
1567 static unsigned int dec_and_r(DisasContext *dc)
1569 TCGv t[2];
1570 int size = memsize_zz(dc);
1572 LOG_DIS("and.%c $r%u, $r%u\n",
1573 memsize_char(size), dc->op1, dc->op2);
1575 cris_cc_mask(dc, CC_MASK_NZ);
1577 cris_alu_alloc_temps(dc, size, t);
1578 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1579 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], size);
1580 cris_alu_free_temps(dc, size, t);
1581 return 2;
1584 static unsigned int dec_lz_r(DisasContext *dc)
1586 TCGv t0;
1587 LOG_DIS("lz $r%u, $r%u\n",
1588 dc->op1, dc->op2);
1589 cris_cc_mask(dc, CC_MASK_NZ);
1590 t0 = tcg_temp_new();
1591 dec_prep_alu_r(dc, dc->op1, dc->op2, 4, 0, cpu_R[dc->op2], t0);
1592 cris_alu(dc, CC_OP_LZ, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1593 tcg_temp_free(t0);
1594 return 2;
1597 static unsigned int dec_lsl_r(DisasContext *dc)
1599 TCGv t[2];
1600 int size = memsize_zz(dc);
1602 LOG_DIS("lsl.%c $r%u, $r%u\n",
1603 memsize_char(size), dc->op1, dc->op2);
1605 cris_cc_mask(dc, CC_MASK_NZ);
1606 cris_alu_alloc_temps(dc, size, t);
1607 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1608 tcg_gen_andi_tl(t[1], t[1], 63);
1609 cris_alu(dc, CC_OP_LSL, cpu_R[dc->op2], t[0], t[1], size);
1610 cris_alu_alloc_temps(dc, size, t);
1611 return 2;
1614 static unsigned int dec_lsr_r(DisasContext *dc)
1616 TCGv t[2];
1617 int size = memsize_zz(dc);
1619 LOG_DIS("lsr.%c $r%u, $r%u\n",
1620 memsize_char(size), dc->op1, dc->op2);
1622 cris_cc_mask(dc, CC_MASK_NZ);
1623 cris_alu_alloc_temps(dc, size, t);
1624 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1625 tcg_gen_andi_tl(t[1], t[1], 63);
1626 cris_alu(dc, CC_OP_LSR, cpu_R[dc->op2], t[0], t[1], size);
1627 cris_alu_free_temps(dc, size, t);
1628 return 2;
1631 static unsigned int dec_asr_r(DisasContext *dc)
1633 TCGv t[2];
1634 int size = memsize_zz(dc);
1636 LOG_DIS("asr.%c $r%u, $r%u\n",
1637 memsize_char(size), dc->op1, dc->op2);
1639 cris_cc_mask(dc, CC_MASK_NZ);
1640 cris_alu_alloc_temps(dc, size, t);
1641 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1642 tcg_gen_andi_tl(t[1], t[1], 63);
1643 cris_alu(dc, CC_OP_ASR, cpu_R[dc->op2], t[0], t[1], size);
1644 cris_alu_free_temps(dc, size, t);
1645 return 2;
1648 static unsigned int dec_muls_r(DisasContext *dc)
1650 TCGv t[2];
1651 int size = memsize_zz(dc);
1653 LOG_DIS("muls.%c $r%u, $r%u\n",
1654 memsize_char(size), dc->op1, dc->op2);
1655 cris_cc_mask(dc, CC_MASK_NZV);
1656 cris_alu_alloc_temps(dc, size, t);
1657 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1659 cris_alu(dc, CC_OP_MULS, cpu_R[dc->op2], t[0], t[1], 4);
1660 cris_alu_free_temps(dc, size, t);
1661 return 2;
1664 static unsigned int dec_mulu_r(DisasContext *dc)
1666 TCGv t[2];
1667 int size = memsize_zz(dc);
1669 LOG_DIS("mulu.%c $r%u, $r%u\n",
1670 memsize_char(size), dc->op1, dc->op2);
1671 cris_cc_mask(dc, CC_MASK_NZV);
1672 cris_alu_alloc_temps(dc, size, t);
1673 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1675 cris_alu(dc, CC_OP_MULU, cpu_R[dc->op2], t[0], t[1], 4);
1676 cris_alu_alloc_temps(dc, size, t);
1677 return 2;
1681 static unsigned int dec_dstep_r(DisasContext *dc)
1683 LOG_DIS("dstep $r%u, $r%u\n", dc->op1, dc->op2);
1684 cris_cc_mask(dc, CC_MASK_NZ);
1685 cris_alu(dc, CC_OP_DSTEP,
1686 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1687 return 2;
1690 static unsigned int dec_xor_r(DisasContext *dc)
1692 TCGv t[2];
1693 int size = memsize_zz(dc);
1694 LOG_DIS("xor.%c $r%u, $r%u\n",
1695 memsize_char(size), dc->op1, dc->op2);
1696 BUG_ON(size != 4); /* xor is dword. */
1697 cris_cc_mask(dc, CC_MASK_NZ);
1698 cris_alu_alloc_temps(dc, size, t);
1699 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1701 cris_alu(dc, CC_OP_XOR, cpu_R[dc->op2], t[0], t[1], 4);
1702 cris_alu_free_temps(dc, size, t);
1703 return 2;
1706 static unsigned int dec_bound_r(DisasContext *dc)
1708 TCGv l0;
1709 int size = memsize_zz(dc);
1710 LOG_DIS("bound.%c $r%u, $r%u\n",
1711 memsize_char(size), dc->op1, dc->op2);
1712 cris_cc_mask(dc, CC_MASK_NZ);
1713 l0 = tcg_temp_local_new();
1714 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, l0);
1715 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], cpu_R[dc->op2], l0, 4);
1716 tcg_temp_free(l0);
1717 return 2;
1720 static unsigned int dec_cmp_r(DisasContext *dc)
1722 TCGv t[2];
1723 int size = memsize_zz(dc);
1724 LOG_DIS("cmp.%c $r%u, $r%u\n",
1725 memsize_char(size), dc->op1, dc->op2);
1726 cris_cc_mask(dc, CC_MASK_NZVC);
1727 cris_alu_alloc_temps(dc, size, t);
1728 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1730 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], t[0], t[1], size);
1731 cris_alu_free_temps(dc, size, t);
1732 return 2;
1735 static unsigned int dec_abs_r(DisasContext *dc)
1737 TCGv t0;
1739 LOG_DIS("abs $r%u, $r%u\n",
1740 dc->op1, dc->op2);
1741 cris_cc_mask(dc, CC_MASK_NZ);
1743 t0 = tcg_temp_new();
1744 tcg_gen_sari_tl(t0, cpu_R[dc->op1], 31);
1745 tcg_gen_xor_tl(cpu_R[dc->op2], cpu_R[dc->op1], t0);
1746 tcg_gen_sub_tl(cpu_R[dc->op2], cpu_R[dc->op2], t0);
1747 tcg_temp_free(t0);
1749 cris_alu(dc, CC_OP_MOVE,
1750 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1751 return 2;
1754 static unsigned int dec_add_r(DisasContext *dc)
1756 TCGv t[2];
1757 int size = memsize_zz(dc);
1758 LOG_DIS("add.%c $r%u, $r%u\n",
1759 memsize_char(size), dc->op1, dc->op2);
1760 cris_cc_mask(dc, CC_MASK_NZVC);
1761 cris_alu_alloc_temps(dc, size, t);
1762 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1764 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], t[0], t[1], size);
1765 cris_alu_free_temps(dc, size, t);
1766 return 2;
1769 static unsigned int dec_addc_r(DisasContext *dc)
1771 LOG_DIS("addc $r%u, $r%u\n",
1772 dc->op1, dc->op2);
1773 cris_evaluate_flags(dc);
1774 /* Set for this insn. */
1775 dc->flagx_known = 1;
1776 dc->flags_x = X_FLAG;
1778 cris_cc_mask(dc, CC_MASK_NZVC);
1779 cris_alu(dc, CC_OP_ADDC,
1780 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1781 return 2;
1784 static unsigned int dec_mcp_r(DisasContext *dc)
1786 LOG_DIS("mcp $p%u, $r%u\n",
1787 dc->op2, dc->op1);
1788 cris_evaluate_flags(dc);
1789 cris_cc_mask(dc, CC_MASK_RNZV);
1790 cris_alu(dc, CC_OP_MCP,
1791 cpu_R[dc->op1], cpu_R[dc->op1], cpu_PR[dc->op2], 4);
1792 return 2;
1795 #if DISAS_CRIS
1796 static char * swapmode_name(int mode, char *modename) {
1797 int i = 0;
1798 if (mode & 8)
1799 modename[i++] = 'n';
1800 if (mode & 4)
1801 modename[i++] = 'w';
1802 if (mode & 2)
1803 modename[i++] = 'b';
1804 if (mode & 1)
1805 modename[i++] = 'r';
1806 modename[i++] = 0;
1807 return modename;
1809 #endif
1811 static unsigned int dec_swap_r(DisasContext *dc)
1813 TCGv t0;
1814 #if DISAS_CRIS
1815 char modename[4];
1816 #endif
1817 LOG_DIS("swap%s $r%u\n",
1818 swapmode_name(dc->op2, modename), dc->op1);
1820 cris_cc_mask(dc, CC_MASK_NZ);
1821 t0 = tcg_temp_new();
1822 t_gen_mov_TN_reg(t0, dc->op1);
1823 if (dc->op2 & 8)
1824 tcg_gen_not_tl(t0, t0);
1825 if (dc->op2 & 4)
1826 t_gen_swapw(t0, t0);
1827 if (dc->op2 & 2)
1828 t_gen_swapb(t0, t0);
1829 if (dc->op2 & 1)
1830 t_gen_swapr(t0, t0);
1831 cris_alu(dc, CC_OP_MOVE,
1832 cpu_R[dc->op1], cpu_R[dc->op1], t0, 4);
1833 tcg_temp_free(t0);
1834 return 2;
1837 static unsigned int dec_or_r(DisasContext *dc)
1839 TCGv t[2];
1840 int size = memsize_zz(dc);
1841 LOG_DIS("or.%c $r%u, $r%u\n",
1842 memsize_char(size), dc->op1, dc->op2);
1843 cris_cc_mask(dc, CC_MASK_NZ);
1844 cris_alu_alloc_temps(dc, size, t);
1845 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1846 cris_alu(dc, CC_OP_OR, cpu_R[dc->op2], t[0], t[1], size);
1847 cris_alu_free_temps(dc, size, t);
1848 return 2;
1851 static unsigned int dec_addi_r(DisasContext *dc)
1853 TCGv t0;
1854 LOG_DIS("addi.%c $r%u, $r%u\n",
1855 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1856 cris_cc_mask(dc, 0);
1857 t0 = tcg_temp_new();
1858 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1859 tcg_gen_add_tl(cpu_R[dc->op1], cpu_R[dc->op1], t0);
1860 tcg_temp_free(t0);
1861 return 2;
1864 static unsigned int dec_addi_acr(DisasContext *dc)
1866 TCGv t0;
1867 LOG_DIS("addi.%c $r%u, $r%u, $acr\n",
1868 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1869 cris_cc_mask(dc, 0);
1870 t0 = tcg_temp_new();
1871 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1872 tcg_gen_add_tl(cpu_R[R_ACR], cpu_R[dc->op1], t0);
1873 tcg_temp_free(t0);
1874 return 2;
1877 static unsigned int dec_neg_r(DisasContext *dc)
1879 TCGv t[2];
1880 int size = memsize_zz(dc);
1881 LOG_DIS("neg.%c $r%u, $r%u\n",
1882 memsize_char(size), dc->op1, dc->op2);
1883 cris_cc_mask(dc, CC_MASK_NZVC);
1884 cris_alu_alloc_temps(dc, size, t);
1885 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1887 cris_alu(dc, CC_OP_NEG, cpu_R[dc->op2], t[0], t[1], size);
1888 cris_alu_free_temps(dc, size, t);
1889 return 2;
1892 static unsigned int dec_btst_r(DisasContext *dc)
1894 LOG_DIS("btst $r%u, $r%u\n",
1895 dc->op1, dc->op2);
1896 cris_cc_mask(dc, CC_MASK_NZ);
1897 cris_evaluate_flags(dc);
1898 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1899 cpu_R[dc->op1], cpu_PR[PR_CCS]);
1900 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2],
1901 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1902 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1903 dc->flags_uptodate = 1;
1904 return 2;
1907 static unsigned int dec_sub_r(DisasContext *dc)
1909 TCGv t[2];
1910 int size = memsize_zz(dc);
1911 LOG_DIS("sub.%c $r%u, $r%u\n",
1912 memsize_char(size), dc->op1, dc->op2);
1913 cris_cc_mask(dc, CC_MASK_NZVC);
1914 cris_alu_alloc_temps(dc, size, t);
1915 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1916 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], size);
1917 cris_alu_free_temps(dc, size, t);
1918 return 2;
1921 /* Zero extension. From size to dword. */
1922 static unsigned int dec_movu_r(DisasContext *dc)
1924 TCGv t0;
1925 int size = memsize_z(dc);
1926 LOG_DIS("movu.%c $r%u, $r%u\n",
1927 memsize_char(size),
1928 dc->op1, dc->op2);
1930 cris_cc_mask(dc, CC_MASK_NZ);
1931 t0 = tcg_temp_new();
1932 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1933 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1934 tcg_temp_free(t0);
1935 return 2;
1938 /* Sign extension. From size to dword. */
1939 static unsigned int dec_movs_r(DisasContext *dc)
1941 TCGv t0;
1942 int size = memsize_z(dc);
1943 LOG_DIS("movs.%c $r%u, $r%u\n",
1944 memsize_char(size),
1945 dc->op1, dc->op2);
1947 cris_cc_mask(dc, CC_MASK_NZ);
1948 t0 = tcg_temp_new();
1949 /* Size can only be qi or hi. */
1950 t_gen_sext(t0, cpu_R[dc->op1], size);
1951 cris_alu(dc, CC_OP_MOVE,
1952 cpu_R[dc->op2], cpu_R[dc->op1], t0, 4);
1953 tcg_temp_free(t0);
1954 return 2;
1957 /* zero extension. From size to dword. */
1958 static unsigned int dec_addu_r(DisasContext *dc)
1960 TCGv t0;
1961 int size = memsize_z(dc);
1962 LOG_DIS("addu.%c $r%u, $r%u\n",
1963 memsize_char(size),
1964 dc->op1, dc->op2);
1966 cris_cc_mask(dc, CC_MASK_NZVC);
1967 t0 = tcg_temp_new();
1968 /* Size can only be qi or hi. */
1969 t_gen_zext(t0, cpu_R[dc->op1], size);
1970 cris_alu(dc, CC_OP_ADD,
1971 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1972 tcg_temp_free(t0);
1973 return 2;
1976 /* Sign extension. From size to dword. */
1977 static unsigned int dec_adds_r(DisasContext *dc)
1979 TCGv t0;
1980 int size = memsize_z(dc);
1981 LOG_DIS("adds.%c $r%u, $r%u\n",
1982 memsize_char(size),
1983 dc->op1, dc->op2);
1985 cris_cc_mask(dc, CC_MASK_NZVC);
1986 t0 = tcg_temp_new();
1987 /* Size can only be qi or hi. */
1988 t_gen_sext(t0, cpu_R[dc->op1], size);
1989 cris_alu(dc, CC_OP_ADD,
1990 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1991 tcg_temp_free(t0);
1992 return 2;
1995 /* Zero extension. From size to dword. */
1996 static unsigned int dec_subu_r(DisasContext *dc)
1998 TCGv t0;
1999 int size = memsize_z(dc);
2000 LOG_DIS("subu.%c $r%u, $r%u\n",
2001 memsize_char(size),
2002 dc->op1, dc->op2);
2004 cris_cc_mask(dc, CC_MASK_NZVC);
2005 t0 = tcg_temp_new();
2006 /* Size can only be qi or hi. */
2007 t_gen_zext(t0, cpu_R[dc->op1], size);
2008 cris_alu(dc, CC_OP_SUB,
2009 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2010 tcg_temp_free(t0);
2011 return 2;
2014 /* Sign extension. From size to dword. */
2015 static unsigned int dec_subs_r(DisasContext *dc)
2017 TCGv t0;
2018 int size = memsize_z(dc);
2019 LOG_DIS("subs.%c $r%u, $r%u\n",
2020 memsize_char(size),
2021 dc->op1, dc->op2);
2023 cris_cc_mask(dc, CC_MASK_NZVC);
2024 t0 = tcg_temp_new();
2025 /* Size can only be qi or hi. */
2026 t_gen_sext(t0, cpu_R[dc->op1], size);
2027 cris_alu(dc, CC_OP_SUB,
2028 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2029 tcg_temp_free(t0);
2030 return 2;
2033 static unsigned int dec_setclrf(DisasContext *dc)
2035 uint32_t flags;
2036 int set = (~dc->opcode >> 2) & 1;
2039 flags = (EXTRACT_FIELD(dc->ir, 12, 15) << 4)
2040 | EXTRACT_FIELD(dc->ir, 0, 3);
2041 if (set && flags == 0) {
2042 LOG_DIS("nop\n");
2043 return 2;
2044 } else if (!set && (flags & 0x20)) {
2045 LOG_DIS("di\n");
2047 else {
2048 LOG_DIS("%sf %x\n",
2049 set ? "set" : "clr",
2050 flags);
2053 /* User space is not allowed to touch these. Silently ignore. */
2054 if (dc->tb_flags & U_FLAG) {
2055 flags &= ~(S_FLAG | I_FLAG | U_FLAG);
2058 if (flags & X_FLAG) {
2059 dc->flagx_known = 1;
2060 if (set)
2061 dc->flags_x = X_FLAG;
2062 else
2063 dc->flags_x = 0;
2066 /* Break the TB if the P flag changes. */
2067 if (flags & P_FLAG) {
2068 if ((set && !(dc->tb_flags & P_FLAG))
2069 || (!set && (dc->tb_flags & P_FLAG))) {
2070 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2071 dc->is_jmp = DISAS_UPDATE;
2072 dc->cpustate_changed = 1;
2075 if (flags & S_FLAG) {
2076 dc->cpustate_changed = 1;
2080 /* Simply decode the flags. */
2081 cris_evaluate_flags (dc);
2082 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2083 cris_update_cc_x(dc);
2084 tcg_gen_movi_tl(cc_op, dc->cc_op);
2086 if (set) {
2087 if (!(dc->tb_flags & U_FLAG) && (flags & U_FLAG)) {
2088 /* Enter user mode. */
2089 t_gen_mov_env_TN(ksp, cpu_R[R_SP]);
2090 tcg_gen_mov_tl(cpu_R[R_SP], cpu_PR[PR_USP]);
2091 dc->cpustate_changed = 1;
2093 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], flags);
2095 else
2096 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~flags);
2098 dc->flags_uptodate = 1;
2099 dc->clear_x = 0;
2100 return 2;
2103 static unsigned int dec_move_rs(DisasContext *dc)
2105 LOG_DIS("move $r%u, $s%u\n", dc->op1, dc->op2);
2106 cris_cc_mask(dc, 0);
2107 gen_helper_movl_sreg_reg(tcg_const_tl(dc->op2), tcg_const_tl(dc->op1));
2108 return 2;
2110 static unsigned int dec_move_sr(DisasContext *dc)
2112 LOG_DIS("move $s%u, $r%u\n", dc->op2, dc->op1);
2113 cris_cc_mask(dc, 0);
2114 gen_helper_movl_reg_sreg(tcg_const_tl(dc->op1), tcg_const_tl(dc->op2));
2115 return 2;
2118 static unsigned int dec_move_rp(DisasContext *dc)
2120 TCGv t[2];
2121 LOG_DIS("move $r%u, $p%u\n", dc->op1, dc->op2);
2122 cris_cc_mask(dc, 0);
2124 t[0] = tcg_temp_new();
2125 if (dc->op2 == PR_CCS) {
2126 cris_evaluate_flags(dc);
2127 t_gen_mov_TN_reg(t[0], dc->op1);
2128 if (dc->tb_flags & U_FLAG) {
2129 t[1] = tcg_temp_new();
2130 /* User space is not allowed to touch all flags. */
2131 tcg_gen_andi_tl(t[0], t[0], 0x39f);
2132 tcg_gen_andi_tl(t[1], cpu_PR[PR_CCS], ~0x39f);
2133 tcg_gen_or_tl(t[0], t[1], t[0]);
2134 tcg_temp_free(t[1]);
2137 else
2138 t_gen_mov_TN_reg(t[0], dc->op1);
2140 t_gen_mov_preg_TN(dc, dc->op2, t[0]);
2141 if (dc->op2 == PR_CCS) {
2142 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2143 dc->flags_uptodate = 1;
2145 tcg_temp_free(t[0]);
2146 return 2;
2148 static unsigned int dec_move_pr(DisasContext *dc)
2150 TCGv t0;
2151 LOG_DIS("move $p%u, $r%u\n", dc->op1, dc->op2);
2152 cris_cc_mask(dc, 0);
2154 if (dc->op2 == PR_CCS)
2155 cris_evaluate_flags(dc);
2157 t0 = tcg_temp_new();
2158 t_gen_mov_TN_preg(t0, dc->op2);
2159 cris_alu(dc, CC_OP_MOVE,
2160 cpu_R[dc->op1], cpu_R[dc->op1], t0, preg_sizes[dc->op2]);
2161 tcg_temp_free(t0);
2162 return 2;
2165 static unsigned int dec_move_mr(DisasContext *dc)
2167 int memsize = memsize_zz(dc);
2168 int insn_len;
2169 LOG_DIS("move.%c [$r%u%s, $r%u\n",
2170 memsize_char(memsize),
2171 dc->op1, dc->postinc ? "+]" : "]",
2172 dc->op2);
2174 if (memsize == 4) {
2175 insn_len = dec_prep_move_m(dc, 0, 4, cpu_R[dc->op2]);
2176 cris_cc_mask(dc, CC_MASK_NZ);
2177 cris_update_cc_op(dc, CC_OP_MOVE, 4);
2178 cris_update_cc_x(dc);
2179 cris_update_result(dc, cpu_R[dc->op2]);
2181 else {
2182 TCGv t0;
2184 t0 = tcg_temp_new();
2185 insn_len = dec_prep_move_m(dc, 0, memsize, t0);
2186 cris_cc_mask(dc, CC_MASK_NZ);
2187 cris_alu(dc, CC_OP_MOVE,
2188 cpu_R[dc->op2], cpu_R[dc->op2], t0, memsize);
2189 tcg_temp_free(t0);
2191 do_postinc(dc, memsize);
2192 return insn_len;
2195 static inline void cris_alu_m_alloc_temps(TCGv *t)
2197 t[0] = tcg_temp_new();
2198 t[1] = tcg_temp_new();
2201 static inline void cris_alu_m_free_temps(TCGv *t)
2203 tcg_temp_free(t[0]);
2204 tcg_temp_free(t[1]);
2207 static unsigned int dec_movs_m(DisasContext *dc)
2209 TCGv t[2];
2210 int memsize = memsize_z(dc);
2211 int insn_len;
2212 LOG_DIS("movs.%c [$r%u%s, $r%u\n",
2213 memsize_char(memsize),
2214 dc->op1, dc->postinc ? "+]" : "]",
2215 dc->op2);
2217 cris_alu_m_alloc_temps(t);
2218 /* sign extend. */
2219 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2220 cris_cc_mask(dc, CC_MASK_NZ);
2221 cris_alu(dc, CC_OP_MOVE,
2222 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2223 do_postinc(dc, memsize);
2224 cris_alu_m_free_temps(t);
2225 return insn_len;
2228 static unsigned int dec_addu_m(DisasContext *dc)
2230 TCGv t[2];
2231 int memsize = memsize_z(dc);
2232 int insn_len;
2233 LOG_DIS("addu.%c [$r%u%s, $r%u\n",
2234 memsize_char(memsize),
2235 dc->op1, dc->postinc ? "+]" : "]",
2236 dc->op2);
2238 cris_alu_m_alloc_temps(t);
2239 /* sign extend. */
2240 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2241 cris_cc_mask(dc, CC_MASK_NZVC);
2242 cris_alu(dc, CC_OP_ADD,
2243 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2244 do_postinc(dc, memsize);
2245 cris_alu_m_free_temps(t);
2246 return insn_len;
2249 static unsigned int dec_adds_m(DisasContext *dc)
2251 TCGv t[2];
2252 int memsize = memsize_z(dc);
2253 int insn_len;
2254 LOG_DIS("adds.%c [$r%u%s, $r%u\n",
2255 memsize_char(memsize),
2256 dc->op1, dc->postinc ? "+]" : "]",
2257 dc->op2);
2259 cris_alu_m_alloc_temps(t);
2260 /* sign extend. */
2261 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2262 cris_cc_mask(dc, CC_MASK_NZVC);
2263 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2264 do_postinc(dc, memsize);
2265 cris_alu_m_free_temps(t);
2266 return insn_len;
2269 static unsigned int dec_subu_m(DisasContext *dc)
2271 TCGv t[2];
2272 int memsize = memsize_z(dc);
2273 int insn_len;
2274 LOG_DIS("subu.%c [$r%u%s, $r%u\n",
2275 memsize_char(memsize),
2276 dc->op1, dc->postinc ? "+]" : "]",
2277 dc->op2);
2279 cris_alu_m_alloc_temps(t);
2280 /* sign extend. */
2281 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2282 cris_cc_mask(dc, CC_MASK_NZVC);
2283 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2284 do_postinc(dc, memsize);
2285 cris_alu_m_free_temps(t);
2286 return insn_len;
2289 static unsigned int dec_subs_m(DisasContext *dc)
2291 TCGv t[2];
2292 int memsize = memsize_z(dc);
2293 int insn_len;
2294 LOG_DIS("subs.%c [$r%u%s, $r%u\n",
2295 memsize_char(memsize),
2296 dc->op1, dc->postinc ? "+]" : "]",
2297 dc->op2);
2299 cris_alu_m_alloc_temps(t);
2300 /* sign extend. */
2301 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2302 cris_cc_mask(dc, CC_MASK_NZVC);
2303 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2304 do_postinc(dc, memsize);
2305 cris_alu_m_free_temps(t);
2306 return insn_len;
2309 static unsigned int dec_movu_m(DisasContext *dc)
2311 TCGv t[2];
2312 int memsize = memsize_z(dc);
2313 int insn_len;
2315 LOG_DIS("movu.%c [$r%u%s, $r%u\n",
2316 memsize_char(memsize),
2317 dc->op1, dc->postinc ? "+]" : "]",
2318 dc->op2);
2320 cris_alu_m_alloc_temps(t);
2321 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2322 cris_cc_mask(dc, CC_MASK_NZ);
2323 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2324 do_postinc(dc, memsize);
2325 cris_alu_m_free_temps(t);
2326 return insn_len;
2329 static unsigned int dec_cmpu_m(DisasContext *dc)
2331 TCGv t[2];
2332 int memsize = memsize_z(dc);
2333 int insn_len;
2334 LOG_DIS("cmpu.%c [$r%u%s, $r%u\n",
2335 memsize_char(memsize),
2336 dc->op1, dc->postinc ? "+]" : "]",
2337 dc->op2);
2339 cris_alu_m_alloc_temps(t);
2340 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2341 cris_cc_mask(dc, CC_MASK_NZVC);
2342 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2343 do_postinc(dc, memsize);
2344 cris_alu_m_free_temps(t);
2345 return insn_len;
2348 static unsigned int dec_cmps_m(DisasContext *dc)
2350 TCGv t[2];
2351 int memsize = memsize_z(dc);
2352 int insn_len;
2353 LOG_DIS("cmps.%c [$r%u%s, $r%u\n",
2354 memsize_char(memsize),
2355 dc->op1, dc->postinc ? "+]" : "]",
2356 dc->op2);
2358 cris_alu_m_alloc_temps(t);
2359 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2360 cris_cc_mask(dc, CC_MASK_NZVC);
2361 cris_alu(dc, CC_OP_CMP,
2362 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2363 memsize_zz(dc));
2364 do_postinc(dc, memsize);
2365 cris_alu_m_free_temps(t);
2366 return insn_len;
2369 static unsigned int dec_cmp_m(DisasContext *dc)
2371 TCGv t[2];
2372 int memsize = memsize_zz(dc);
2373 int insn_len;
2374 LOG_DIS("cmp.%c [$r%u%s, $r%u\n",
2375 memsize_char(memsize),
2376 dc->op1, dc->postinc ? "+]" : "]",
2377 dc->op2);
2379 cris_alu_m_alloc_temps(t);
2380 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2381 cris_cc_mask(dc, CC_MASK_NZVC);
2382 cris_alu(dc, CC_OP_CMP,
2383 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2384 memsize_zz(dc));
2385 do_postinc(dc, memsize);
2386 cris_alu_m_free_temps(t);
2387 return insn_len;
2390 static unsigned int dec_test_m(DisasContext *dc)
2392 TCGv t[2];
2393 int memsize = memsize_zz(dc);
2394 int insn_len;
2395 LOG_DIS("test.%d [$r%u%s] op2=%x\n",
2396 memsize_char(memsize),
2397 dc->op1, dc->postinc ? "+]" : "]",
2398 dc->op2);
2400 cris_evaluate_flags(dc);
2402 cris_alu_m_alloc_temps(t);
2403 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2404 cris_cc_mask(dc, CC_MASK_NZ);
2405 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~3);
2407 cris_alu(dc, CC_OP_CMP,
2408 cpu_R[dc->op2], t[1], tcg_const_tl(0), memsize_zz(dc));
2409 do_postinc(dc, memsize);
2410 cris_alu_m_free_temps(t);
2411 return insn_len;
2414 static unsigned int dec_and_m(DisasContext *dc)
2416 TCGv t[2];
2417 int memsize = memsize_zz(dc);
2418 int insn_len;
2419 LOG_DIS("and.%d [$r%u%s, $r%u\n",
2420 memsize_char(memsize),
2421 dc->op1, dc->postinc ? "+]" : "]",
2422 dc->op2);
2424 cris_alu_m_alloc_temps(t);
2425 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2426 cris_cc_mask(dc, CC_MASK_NZ);
2427 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2428 do_postinc(dc, memsize);
2429 cris_alu_m_free_temps(t);
2430 return insn_len;
2433 static unsigned int dec_add_m(DisasContext *dc)
2435 TCGv t[2];
2436 int memsize = memsize_zz(dc);
2437 int insn_len;
2438 LOG_DIS("add.%d [$r%u%s, $r%u\n",
2439 memsize_char(memsize),
2440 dc->op1, dc->postinc ? "+]" : "]",
2441 dc->op2);
2443 cris_alu_m_alloc_temps(t);
2444 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2445 cris_cc_mask(dc, CC_MASK_NZVC);
2446 cris_alu(dc, CC_OP_ADD,
2447 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2448 do_postinc(dc, memsize);
2449 cris_alu_m_free_temps(t);
2450 return insn_len;
2453 static unsigned int dec_addo_m(DisasContext *dc)
2455 TCGv t[2];
2456 int memsize = memsize_zz(dc);
2457 int insn_len;
2458 LOG_DIS("add.%d [$r%u%s, $r%u\n",
2459 memsize_char(memsize),
2460 dc->op1, dc->postinc ? "+]" : "]",
2461 dc->op2);
2463 cris_alu_m_alloc_temps(t);
2464 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2465 cris_cc_mask(dc, 0);
2466 cris_alu(dc, CC_OP_ADD, cpu_R[R_ACR], t[0], t[1], 4);
2467 do_postinc(dc, memsize);
2468 cris_alu_m_free_temps(t);
2469 return insn_len;
2472 static unsigned int dec_bound_m(DisasContext *dc)
2474 TCGv l[2];
2475 int memsize = memsize_zz(dc);
2476 int insn_len;
2477 LOG_DIS("bound.%d [$r%u%s, $r%u\n",
2478 memsize_char(memsize),
2479 dc->op1, dc->postinc ? "+]" : "]",
2480 dc->op2);
2482 l[0] = tcg_temp_local_new();
2483 l[1] = tcg_temp_local_new();
2484 insn_len = dec_prep_alu_m(dc, 0, memsize, l[0], l[1]);
2485 cris_cc_mask(dc, CC_MASK_NZ);
2486 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], l[0], l[1], 4);
2487 do_postinc(dc, memsize);
2488 tcg_temp_free(l[0]);
2489 tcg_temp_free(l[1]);
2490 return insn_len;
2493 static unsigned int dec_addc_mr(DisasContext *dc)
2495 TCGv t[2];
2496 int insn_len = 2;
2497 LOG_DIS("addc [$r%u%s, $r%u\n",
2498 dc->op1, dc->postinc ? "+]" : "]",
2499 dc->op2);
2501 cris_evaluate_flags(dc);
2503 /* Set for this insn. */
2504 dc->flagx_known = 1;
2505 dc->flags_x = X_FLAG;
2507 cris_alu_m_alloc_temps(t);
2508 insn_len = dec_prep_alu_m(dc, 0, 4, t[0], t[1]);
2509 cris_cc_mask(dc, CC_MASK_NZVC);
2510 cris_alu(dc, CC_OP_ADDC, cpu_R[dc->op2], t[0], t[1], 4);
2511 do_postinc(dc, 4);
2512 cris_alu_m_free_temps(t);
2513 return insn_len;
2516 static unsigned int dec_sub_m(DisasContext *dc)
2518 TCGv t[2];
2519 int memsize = memsize_zz(dc);
2520 int insn_len;
2521 LOG_DIS("sub.%c [$r%u%s, $r%u ir=%x zz=%x\n",
2522 memsize_char(memsize),
2523 dc->op1, dc->postinc ? "+]" : "]",
2524 dc->op2, dc->ir, dc->zzsize);
2526 cris_alu_m_alloc_temps(t);
2527 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2528 cris_cc_mask(dc, CC_MASK_NZVC);
2529 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], memsize);
2530 do_postinc(dc, memsize);
2531 cris_alu_m_free_temps(t);
2532 return insn_len;
2535 static unsigned int dec_or_m(DisasContext *dc)
2537 TCGv t[2];
2538 int memsize = memsize_zz(dc);
2539 int insn_len;
2540 LOG_DIS("or.%d [$r%u%s, $r%u pc=%x\n",
2541 memsize_char(memsize),
2542 dc->op1, dc->postinc ? "+]" : "]",
2543 dc->op2, dc->pc);
2545 cris_alu_m_alloc_temps(t);
2546 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2547 cris_cc_mask(dc, CC_MASK_NZ);
2548 cris_alu(dc, CC_OP_OR,
2549 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2550 do_postinc(dc, memsize);
2551 cris_alu_m_free_temps(t);
2552 return insn_len;
2555 static unsigned int dec_move_mp(DisasContext *dc)
2557 TCGv t[2];
2558 int memsize = memsize_zz(dc);
2559 int insn_len = 2;
2561 LOG_DIS("move.%c [$r%u%s, $p%u\n",
2562 memsize_char(memsize),
2563 dc->op1,
2564 dc->postinc ? "+]" : "]",
2565 dc->op2);
2567 cris_alu_m_alloc_temps(t);
2568 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2569 cris_cc_mask(dc, 0);
2570 if (dc->op2 == PR_CCS) {
2571 cris_evaluate_flags(dc);
2572 if (dc->tb_flags & U_FLAG) {
2573 /* User space is not allowed to touch all flags. */
2574 tcg_gen_andi_tl(t[1], t[1], 0x39f);
2575 tcg_gen_andi_tl(t[0], cpu_PR[PR_CCS], ~0x39f);
2576 tcg_gen_or_tl(t[1], t[0], t[1]);
2580 t_gen_mov_preg_TN(dc, dc->op2, t[1]);
2582 do_postinc(dc, memsize);
2583 cris_alu_m_free_temps(t);
2584 return insn_len;
2587 static unsigned int dec_move_pm(DisasContext *dc)
2589 TCGv t0;
2590 int memsize;
2592 memsize = preg_sizes[dc->op2];
2594 LOG_DIS("move.%c $p%u, [$r%u%s\n",
2595 memsize_char(memsize),
2596 dc->op2, dc->op1, dc->postinc ? "+]" : "]");
2598 /* prepare store. Address in T0, value in T1. */
2599 if (dc->op2 == PR_CCS)
2600 cris_evaluate_flags(dc);
2601 t0 = tcg_temp_new();
2602 t_gen_mov_TN_preg(t0, dc->op2);
2603 cris_flush_cc_state(dc);
2604 gen_store(dc, cpu_R[dc->op1], t0, memsize);
2605 tcg_temp_free(t0);
2607 cris_cc_mask(dc, 0);
2608 if (dc->postinc)
2609 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2610 return 2;
2613 static unsigned int dec_movem_mr(DisasContext *dc)
2615 TCGv_i64 tmp[16];
2616 TCGv tmp32;
2617 TCGv addr;
2618 int i;
2619 int nr = dc->op2 + 1;
2621 LOG_DIS("movem [$r%u%s, $r%u\n", dc->op1,
2622 dc->postinc ? "+]" : "]", dc->op2);
2624 addr = tcg_temp_new();
2625 /* There are probably better ways of doing this. */
2626 cris_flush_cc_state(dc);
2627 for (i = 0; i < (nr >> 1); i++) {
2628 tmp[i] = tcg_temp_new_i64();
2629 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2630 gen_load64(dc, tmp[i], addr);
2632 if (nr & 1) {
2633 tmp32 = tcg_temp_new_i32();
2634 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2635 gen_load(dc, tmp32, addr, 4, 0);
2636 } else
2637 TCGV_UNUSED(tmp32);
2638 tcg_temp_free(addr);
2640 for (i = 0; i < (nr >> 1); i++) {
2641 tcg_gen_trunc_i64_i32(cpu_R[i * 2], tmp[i]);
2642 tcg_gen_shri_i64(tmp[i], tmp[i], 32);
2643 tcg_gen_trunc_i64_i32(cpu_R[i * 2 + 1], tmp[i]);
2644 tcg_temp_free_i64(tmp[i]);
2646 if (nr & 1) {
2647 tcg_gen_mov_tl(cpu_R[dc->op2], tmp32);
2648 tcg_temp_free(tmp32);
2651 /* writeback the updated pointer value. */
2652 if (dc->postinc)
2653 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], nr * 4);
2655 /* gen_load might want to evaluate the previous insns flags. */
2656 cris_cc_mask(dc, 0);
2657 return 2;
2660 static unsigned int dec_movem_rm(DisasContext *dc)
2662 TCGv tmp;
2663 TCGv addr;
2664 int i;
2666 LOG_DIS("movem $r%u, [$r%u%s\n", dc->op2, dc->op1,
2667 dc->postinc ? "+]" : "]");
2669 cris_flush_cc_state(dc);
2671 tmp = tcg_temp_new();
2672 addr = tcg_temp_new();
2673 tcg_gen_movi_tl(tmp, 4);
2674 tcg_gen_mov_tl(addr, cpu_R[dc->op1]);
2675 for (i = 0; i <= dc->op2; i++) {
2676 /* Displace addr. */
2677 /* Perform the store. */
2678 gen_store(dc, addr, cpu_R[i], 4);
2679 tcg_gen_add_tl(addr, addr, tmp);
2681 if (dc->postinc)
2682 tcg_gen_mov_tl(cpu_R[dc->op1], addr);
2683 cris_cc_mask(dc, 0);
2684 tcg_temp_free(tmp);
2685 tcg_temp_free(addr);
2686 return 2;
2689 static unsigned int dec_move_rm(DisasContext *dc)
2691 int memsize;
2693 memsize = memsize_zz(dc);
2695 LOG_DIS("move.%d $r%u, [$r%u]\n",
2696 memsize, dc->op2, dc->op1);
2698 /* prepare store. */
2699 cris_flush_cc_state(dc);
2700 gen_store(dc, cpu_R[dc->op1], cpu_R[dc->op2], memsize);
2702 if (dc->postinc)
2703 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2704 cris_cc_mask(dc, 0);
2705 return 2;
2708 static unsigned int dec_lapcq(DisasContext *dc)
2710 LOG_DIS("lapcq %x, $r%u\n",
2711 dc->pc + dc->op1*2, dc->op2);
2712 cris_cc_mask(dc, 0);
2713 tcg_gen_movi_tl(cpu_R[dc->op2], dc->pc + dc->op1 * 2);
2714 return 2;
2717 static unsigned int dec_lapc_im(DisasContext *dc)
2719 unsigned int rd;
2720 int32_t imm;
2721 int32_t pc;
2723 rd = dc->op2;
2725 cris_cc_mask(dc, 0);
2726 imm = ldl_code(dc->pc + 2);
2727 LOG_DIS("lapc 0x%x, $r%u\n", imm + dc->pc, dc->op2);
2729 pc = dc->pc;
2730 pc += imm;
2731 t_gen_mov_reg_TN(rd, tcg_const_tl(pc));
2732 return 6;
2735 /* Jump to special reg. */
2736 static unsigned int dec_jump_p(DisasContext *dc)
2738 LOG_DIS("jump $p%u\n", dc->op2);
2740 if (dc->op2 == PR_CCS)
2741 cris_evaluate_flags(dc);
2742 t_gen_mov_TN_preg(env_btarget, dc->op2);
2743 /* rete will often have low bit set to indicate delayslot. */
2744 tcg_gen_andi_tl(env_btarget, env_btarget, ~1);
2745 cris_cc_mask(dc, 0);
2746 cris_prepare_jmp(dc, JMP_INDIRECT);
2747 return 2;
2750 /* Jump and save. */
2751 static unsigned int dec_jas_r(DisasContext *dc)
2753 LOG_DIS("jas $r%u, $p%u\n", dc->op1, dc->op2);
2754 cris_cc_mask(dc, 0);
2755 /* Store the return address in Pd. */
2756 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2757 if (dc->op2 > 15)
2758 abort();
2759 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4));
2761 cris_prepare_jmp(dc, JMP_INDIRECT);
2762 return 2;
2765 static unsigned int dec_jas_im(DisasContext *dc)
2767 uint32_t imm;
2769 imm = ldl_code(dc->pc + 2);
2771 LOG_DIS("jas 0x%x\n", imm);
2772 cris_cc_mask(dc, 0);
2773 /* Store the return address in Pd. */
2774 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2776 dc->jmp_pc = imm;
2777 cris_prepare_jmp(dc, JMP_DIRECT);
2778 return 6;
2781 static unsigned int dec_jasc_im(DisasContext *dc)
2783 uint32_t imm;
2785 imm = ldl_code(dc->pc + 2);
2787 LOG_DIS("jasc 0x%x\n", imm);
2788 cris_cc_mask(dc, 0);
2789 /* Store the return address in Pd. */
2790 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8 + 4));
2792 dc->jmp_pc = imm;
2793 cris_prepare_jmp(dc, JMP_DIRECT);
2794 return 6;
2797 static unsigned int dec_jasc_r(DisasContext *dc)
2799 LOG_DIS("jasc_r $r%u, $p%u\n", dc->op1, dc->op2);
2800 cris_cc_mask(dc, 0);
2801 /* Store the return address in Pd. */
2802 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2803 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4 + 4));
2804 cris_prepare_jmp(dc, JMP_INDIRECT);
2805 return 2;
2808 static unsigned int dec_bcc_im(DisasContext *dc)
2810 int32_t offset;
2811 uint32_t cond = dc->op2;
2813 offset = ldsw_code(dc->pc + 2);
2815 LOG_DIS("b%s %d pc=%x dst=%x\n",
2816 cc_name(cond), offset,
2817 dc->pc, dc->pc + offset);
2819 cris_cc_mask(dc, 0);
2820 /* op2 holds the condition-code. */
2821 cris_prepare_cc_branch (dc, offset, cond);
2822 return 4;
2825 static unsigned int dec_bas_im(DisasContext *dc)
2827 int32_t simm;
2830 simm = ldl_code(dc->pc + 2);
2832 LOG_DIS("bas 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2833 cris_cc_mask(dc, 0);
2834 /* Store the return address in Pd. */
2835 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2837 dc->jmp_pc = dc->pc + simm;
2838 cris_prepare_jmp(dc, JMP_DIRECT);
2839 return 6;
2842 static unsigned int dec_basc_im(DisasContext *dc)
2844 int32_t simm;
2845 simm = ldl_code(dc->pc + 2);
2847 LOG_DIS("basc 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2848 cris_cc_mask(dc, 0);
2849 /* Store the return address in Pd. */
2850 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 12));
2852 dc->jmp_pc = dc->pc + simm;
2853 cris_prepare_jmp(dc, JMP_DIRECT);
2854 return 6;
2857 static unsigned int dec_rfe_etc(DisasContext *dc)
2859 cris_cc_mask(dc, 0);
2861 if (dc->op2 == 15) {
2862 t_gen_mov_env_TN(halted, tcg_const_tl(1));
2863 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2864 t_gen_raise_exception(EXCP_HLT);
2865 return 2;
2868 switch (dc->op2 & 7) {
2869 case 2:
2870 /* rfe. */
2871 LOG_DIS("rfe\n");
2872 cris_evaluate_flags(dc);
2873 gen_helper_rfe();
2874 dc->is_jmp = DISAS_UPDATE;
2875 break;
2876 case 5:
2877 /* rfn. */
2878 LOG_DIS("rfn\n");
2879 cris_evaluate_flags(dc);
2880 gen_helper_rfn();
2881 dc->is_jmp = DISAS_UPDATE;
2882 break;
2883 case 6:
2884 LOG_DIS("break %d\n", dc->op1);
2885 cris_evaluate_flags (dc);
2886 /* break. */
2887 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2889 /* Breaks start at 16 in the exception vector. */
2890 t_gen_mov_env_TN(trap_vector,
2891 tcg_const_tl(dc->op1 + 16));
2892 t_gen_raise_exception(EXCP_BREAK);
2893 dc->is_jmp = DISAS_UPDATE;
2894 break;
2895 default:
2896 printf ("op2=%x\n", dc->op2);
2897 BUG();
2898 break;
2901 return 2;
2904 static unsigned int dec_ftag_fidx_d_m(DisasContext *dc)
2906 return 2;
2909 static unsigned int dec_ftag_fidx_i_m(DisasContext *dc)
2911 return 2;
2914 static unsigned int dec_null(DisasContext *dc)
2916 printf ("unknown insn pc=%x opc=%x op1=%x op2=%x\n",
2917 dc->pc, dc->opcode, dc->op1, dc->op2);
2918 fflush(NULL);
2919 BUG();
2920 return 2;
2923 static struct decoder_info {
2924 struct {
2925 uint32_t bits;
2926 uint32_t mask;
2928 unsigned int (*dec)(DisasContext *dc);
2929 } decinfo[] = {
2930 /* Order matters here. */
2931 {DEC_MOVEQ, dec_moveq},
2932 {DEC_BTSTQ, dec_btstq},
2933 {DEC_CMPQ, dec_cmpq},
2934 {DEC_ADDOQ, dec_addoq},
2935 {DEC_ADDQ, dec_addq},
2936 {DEC_SUBQ, dec_subq},
2937 {DEC_ANDQ, dec_andq},
2938 {DEC_ORQ, dec_orq},
2939 {DEC_ASRQ, dec_asrq},
2940 {DEC_LSLQ, dec_lslq},
2941 {DEC_LSRQ, dec_lsrq},
2942 {DEC_BCCQ, dec_bccq},
2944 {DEC_BCC_IM, dec_bcc_im},
2945 {DEC_JAS_IM, dec_jas_im},
2946 {DEC_JAS_R, dec_jas_r},
2947 {DEC_JASC_IM, dec_jasc_im},
2948 {DEC_JASC_R, dec_jasc_r},
2949 {DEC_BAS_IM, dec_bas_im},
2950 {DEC_BASC_IM, dec_basc_im},
2951 {DEC_JUMP_P, dec_jump_p},
2952 {DEC_LAPC_IM, dec_lapc_im},
2953 {DEC_LAPCQ, dec_lapcq},
2955 {DEC_RFE_ETC, dec_rfe_etc},
2956 {DEC_ADDC_MR, dec_addc_mr},
2958 {DEC_MOVE_MP, dec_move_mp},
2959 {DEC_MOVE_PM, dec_move_pm},
2960 {DEC_MOVEM_MR, dec_movem_mr},
2961 {DEC_MOVEM_RM, dec_movem_rm},
2962 {DEC_MOVE_PR, dec_move_pr},
2963 {DEC_SCC_R, dec_scc_r},
2964 {DEC_SETF, dec_setclrf},
2965 {DEC_CLEARF, dec_setclrf},
2967 {DEC_MOVE_SR, dec_move_sr},
2968 {DEC_MOVE_RP, dec_move_rp},
2969 {DEC_SWAP_R, dec_swap_r},
2970 {DEC_ABS_R, dec_abs_r},
2971 {DEC_LZ_R, dec_lz_r},
2972 {DEC_MOVE_RS, dec_move_rs},
2973 {DEC_BTST_R, dec_btst_r},
2974 {DEC_ADDC_R, dec_addc_r},
2976 {DEC_DSTEP_R, dec_dstep_r},
2977 {DEC_XOR_R, dec_xor_r},
2978 {DEC_MCP_R, dec_mcp_r},
2979 {DEC_CMP_R, dec_cmp_r},
2981 {DEC_ADDI_R, dec_addi_r},
2982 {DEC_ADDI_ACR, dec_addi_acr},
2984 {DEC_ADD_R, dec_add_r},
2985 {DEC_SUB_R, dec_sub_r},
2987 {DEC_ADDU_R, dec_addu_r},
2988 {DEC_ADDS_R, dec_adds_r},
2989 {DEC_SUBU_R, dec_subu_r},
2990 {DEC_SUBS_R, dec_subs_r},
2991 {DEC_LSL_R, dec_lsl_r},
2993 {DEC_AND_R, dec_and_r},
2994 {DEC_OR_R, dec_or_r},
2995 {DEC_BOUND_R, dec_bound_r},
2996 {DEC_ASR_R, dec_asr_r},
2997 {DEC_LSR_R, dec_lsr_r},
2999 {DEC_MOVU_R, dec_movu_r},
3000 {DEC_MOVS_R, dec_movs_r},
3001 {DEC_NEG_R, dec_neg_r},
3002 {DEC_MOVE_R, dec_move_r},
3004 {DEC_FTAG_FIDX_I_M, dec_ftag_fidx_i_m},
3005 {DEC_FTAG_FIDX_D_M, dec_ftag_fidx_d_m},
3007 {DEC_MULS_R, dec_muls_r},
3008 {DEC_MULU_R, dec_mulu_r},
3010 {DEC_ADDU_M, dec_addu_m},
3011 {DEC_ADDS_M, dec_adds_m},
3012 {DEC_SUBU_M, dec_subu_m},
3013 {DEC_SUBS_M, dec_subs_m},
3015 {DEC_CMPU_M, dec_cmpu_m},
3016 {DEC_CMPS_M, dec_cmps_m},
3017 {DEC_MOVU_M, dec_movu_m},
3018 {DEC_MOVS_M, dec_movs_m},
3020 {DEC_CMP_M, dec_cmp_m},
3021 {DEC_ADDO_M, dec_addo_m},
3022 {DEC_BOUND_M, dec_bound_m},
3023 {DEC_ADD_M, dec_add_m},
3024 {DEC_SUB_M, dec_sub_m},
3025 {DEC_AND_M, dec_and_m},
3026 {DEC_OR_M, dec_or_m},
3027 {DEC_MOVE_RM, dec_move_rm},
3028 {DEC_TEST_M, dec_test_m},
3029 {DEC_MOVE_MR, dec_move_mr},
3031 {{0, 0}, dec_null}
3034 static inline unsigned int
3035 cris_decoder(DisasContext *dc)
3037 unsigned int insn_len = 2;
3038 int i;
3040 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
3041 tcg_gen_debug_insn_start(dc->pc);
3043 /* Load a halfword onto the instruction register. */
3044 dc->ir = lduw_code(dc->pc);
3046 /* Now decode it. */
3047 dc->opcode = EXTRACT_FIELD(dc->ir, 4, 11);
3048 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 3);
3049 dc->op2 = EXTRACT_FIELD(dc->ir, 12, 15);
3050 dc->zsize = EXTRACT_FIELD(dc->ir, 4, 4);
3051 dc->zzsize = EXTRACT_FIELD(dc->ir, 4, 5);
3052 dc->postinc = EXTRACT_FIELD(dc->ir, 10, 10);
3054 /* Large switch for all insns. */
3055 for (i = 0; i < ARRAY_SIZE(decinfo); i++) {
3056 if ((dc->opcode & decinfo[i].mask) == decinfo[i].bits)
3058 insn_len = decinfo[i].dec(dc);
3059 break;
3063 #if !defined(CONFIG_USER_ONLY)
3064 /* Single-stepping ? */
3065 if (dc->tb_flags & S_FLAG) {
3066 int l1;
3068 l1 = gen_new_label();
3069 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_PR[PR_SPC], dc->pc, l1);
3070 /* We treat SPC as a break with an odd trap vector. */
3071 cris_evaluate_flags (dc);
3072 t_gen_mov_env_TN(trap_vector, tcg_const_tl(3));
3073 tcg_gen_movi_tl(env_pc, dc->pc + insn_len);
3074 tcg_gen_movi_tl(cpu_PR[PR_SPC], dc->pc + insn_len);
3075 t_gen_raise_exception(EXCP_BREAK);
3076 gen_set_label(l1);
3078 #endif
3079 return insn_len;
3082 static void check_breakpoint(CPUState *env, DisasContext *dc)
3084 CPUBreakpoint *bp;
3086 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
3087 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
3088 if (bp->pc == dc->pc) {
3089 cris_evaluate_flags (dc);
3090 tcg_gen_movi_tl(env_pc, dc->pc);
3091 t_gen_raise_exception(EXCP_DEBUG);
3092 dc->is_jmp = DISAS_UPDATE;
3100 * Delay slots on QEMU/CRIS.
3102 * If an exception hits on a delayslot, the core will let ERP (the Exception
3103 * Return Pointer) point to the branch (the previous) insn and set the lsb to
3104 * to give SW a hint that the exception actually hit on the dslot.
3106 * CRIS expects all PC addresses to be 16-bit aligned. The lsb is ignored by
3107 * the core and any jmp to an odd addresses will mask off that lsb. It is
3108 * simply there to let sw know there was an exception on a dslot.
3110 * When the software returns from an exception, the branch will re-execute.
3111 * On QEMU care needs to be taken when a branch+delayslot sequence is broken
3112 * and the branch and delayslot dont share pages.
3114 * The TB contaning the branch insn will set up env->btarget and evaluate
3115 * env->btaken. When the translation loop exits we will note that the branch
3116 * sequence is broken and let env->dslot be the size of the branch insn (those
3117 * vary in length).
3119 * The TB contaning the delayslot will have the PC of its real insn (i.e no lsb
3120 * set). It will also expect to have env->dslot setup with the size of the
3121 * delay slot so that env->pc - env->dslot point to the branch insn. This TB
3122 * will execute the dslot and take the branch, either to btarget or just one
3123 * insn ahead.
3125 * When exceptions occur, we check for env->dslot in do_interrupt to detect
3126 * broken branch sequences and setup $erp accordingly (i.e let it point to the
3127 * branch and set lsb). Then env->dslot gets cleared so that the exception
3128 * handler can enter. When returning from exceptions (jump $erp) the lsb gets
3129 * masked off and we will reexecute the branch insn.
3133 /* generate intermediate code for basic block 'tb'. */
3134 static void
3135 gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
3136 int search_pc)
3138 uint16_t *gen_opc_end;
3139 uint32_t pc_start;
3140 unsigned int insn_len;
3141 int j, lj;
3142 struct DisasContext ctx;
3143 struct DisasContext *dc = &ctx;
3144 uint32_t next_page_start;
3145 target_ulong npc;
3146 int num_insns;
3147 int max_insns;
3149 qemu_log_try_set_file(stderr);
3151 /* Odd PC indicates that branch is rexecuting due to exception in the
3152 * delayslot, like in real hw.
3154 pc_start = tb->pc & ~1;
3155 dc->env = env;
3156 dc->tb = tb;
3158 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3160 dc->is_jmp = DISAS_NEXT;
3161 dc->ppc = pc_start;
3162 dc->pc = pc_start;
3163 dc->singlestep_enabled = env->singlestep_enabled;
3164 dc->flags_uptodate = 1;
3165 dc->flagx_known = 1;
3166 dc->flags_x = tb->flags & X_FLAG;
3167 dc->cc_x_uptodate = 0;
3168 dc->cc_mask = 0;
3169 dc->update_cc = 0;
3171 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
3172 dc->cc_size_uptodate = -1;
3174 /* Decode TB flags. */
3175 dc->tb_flags = tb->flags & (S_FLAG | P_FLAG | U_FLAG | X_FLAG);
3176 dc->delayed_branch = !!(tb->flags & 7);
3177 if (dc->delayed_branch)
3178 dc->jmp = JMP_INDIRECT;
3179 else
3180 dc->jmp = JMP_NOJMP;
3182 dc->cpustate_changed = 0;
3184 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3185 qemu_log(
3186 "srch=%d pc=%x %x flg=%llx bt=%x ds=%u ccs=%x\n"
3187 "pid=%x usp=%x\n"
3188 "%x.%x.%x.%x\n"
3189 "%x.%x.%x.%x\n"
3190 "%x.%x.%x.%x\n"
3191 "%x.%x.%x.%x\n",
3192 search_pc, dc->pc, dc->ppc,
3193 (unsigned long long)tb->flags,
3194 env->btarget, (unsigned)tb->flags & 7,
3195 env->pregs[PR_CCS],
3196 env->pregs[PR_PID], env->pregs[PR_USP],
3197 env->regs[0], env->regs[1], env->regs[2], env->regs[3],
3198 env->regs[4], env->regs[5], env->regs[6], env->regs[7],
3199 env->regs[8], env->regs[9],
3200 env->regs[10], env->regs[11],
3201 env->regs[12], env->regs[13],
3202 env->regs[14], env->regs[15]);
3203 qemu_log("--------------\n");
3204 qemu_log("IN: %s\n", lookup_symbol(pc_start));
3207 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
3208 lj = -1;
3209 num_insns = 0;
3210 max_insns = tb->cflags & CF_COUNT_MASK;
3211 if (max_insns == 0)
3212 max_insns = CF_COUNT_MASK;
3214 gen_icount_start();
3217 check_breakpoint(env, dc);
3219 if (search_pc) {
3220 j = gen_opc_ptr - gen_opc_buf;
3221 if (lj < j) {
3222 lj++;
3223 while (lj < j)
3224 gen_opc_instr_start[lj++] = 0;
3226 if (dc->delayed_branch == 1)
3227 gen_opc_pc[lj] = dc->ppc | 1;
3228 else
3229 gen_opc_pc[lj] = dc->pc;
3230 gen_opc_instr_start[lj] = 1;
3231 gen_opc_icount[lj] = num_insns;
3234 /* Pretty disas. */
3235 LOG_DIS("%8.8x:\t", dc->pc);
3237 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3238 gen_io_start();
3239 dc->clear_x = 1;
3241 insn_len = cris_decoder(dc);
3242 dc->ppc = dc->pc;
3243 dc->pc += insn_len;
3244 if (dc->clear_x)
3245 cris_clear_x_flag(dc);
3247 num_insns++;
3248 /* Check for delayed branches here. If we do it before
3249 actually generating any host code, the simulator will just
3250 loop doing nothing for on this program location. */
3251 if (dc->delayed_branch) {
3252 dc->delayed_branch--;
3253 if (dc->delayed_branch == 0)
3255 if (tb->flags & 7)
3256 t_gen_mov_env_TN(dslot,
3257 tcg_const_tl(0));
3258 if (dc->jmp == JMP_DIRECT) {
3259 dc->is_jmp = DISAS_NEXT;
3260 } else {
3261 t_gen_cc_jmp(env_btarget,
3262 tcg_const_tl(dc->pc));
3263 dc->is_jmp = DISAS_JUMP;
3265 break;
3269 /* If we are rexecuting a branch due to exceptions on
3270 delay slots dont break. */
3271 if (!(tb->pc & 1) && env->singlestep_enabled)
3272 break;
3273 } while (!dc->is_jmp && !dc->cpustate_changed
3274 && gen_opc_ptr < gen_opc_end
3275 && !singlestep
3276 && (dc->pc < next_page_start)
3277 && num_insns < max_insns);
3279 npc = dc->pc;
3280 if (dc->jmp == JMP_DIRECT && !dc->delayed_branch)
3281 npc = dc->jmp_pc;
3283 if (tb->cflags & CF_LAST_IO)
3284 gen_io_end();
3285 /* Force an update if the per-tb cpu state has changed. */
3286 if (dc->is_jmp == DISAS_NEXT
3287 && (dc->cpustate_changed || !dc->flagx_known
3288 || (dc->flags_x != (tb->flags & X_FLAG)))) {
3289 dc->is_jmp = DISAS_UPDATE;
3290 tcg_gen_movi_tl(env_pc, npc);
3292 /* Broken branch+delayslot sequence. */
3293 if (dc->delayed_branch == 1) {
3294 /* Set env->dslot to the size of the branch insn. */
3295 t_gen_mov_env_TN(dslot, tcg_const_tl(dc->pc - dc->ppc));
3296 cris_store_direct_jmp(dc);
3299 cris_evaluate_flags (dc);
3301 if (unlikely(env->singlestep_enabled)) {
3302 if (dc->is_jmp == DISAS_NEXT)
3303 tcg_gen_movi_tl(env_pc, npc);
3304 t_gen_raise_exception(EXCP_DEBUG);
3305 } else {
3306 switch(dc->is_jmp) {
3307 case DISAS_NEXT:
3308 gen_goto_tb(dc, 1, npc);
3309 break;
3310 default:
3311 case DISAS_JUMP:
3312 case DISAS_UPDATE:
3313 /* indicate that the hash table must be used
3314 to find the next TB */
3315 tcg_gen_exit_tb(0);
3316 break;
3317 case DISAS_SWI:
3318 case DISAS_TB_JUMP:
3319 /* nothing more to generate */
3320 break;
3323 gen_icount_end(tb, num_insns);
3324 *gen_opc_ptr = INDEX_op_end;
3325 if (search_pc) {
3326 j = gen_opc_ptr - gen_opc_buf;
3327 lj++;
3328 while (lj <= j)
3329 gen_opc_instr_start[lj++] = 0;
3330 } else {
3331 tb->size = dc->pc - pc_start;
3332 tb->icount = num_insns;
3335 #ifdef DEBUG_DISAS
3336 #if !DISAS_CRIS
3337 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3338 log_target_disas(pc_start, dc->pc - pc_start, 0);
3339 qemu_log("\nisize=%d osize=%zd\n",
3340 dc->pc - pc_start, gen_opc_ptr - gen_opc_buf);
3342 #endif
3343 #endif
3346 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
3348 gen_intermediate_code_internal(env, tb, 0);
3351 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
3353 gen_intermediate_code_internal(env, tb, 1);
3356 void cpu_dump_state (CPUState *env, FILE *f,
3357 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3358 int flags)
3360 int i;
3361 uint32_t srs;
3363 if (!env || !f)
3364 return;
3366 cpu_fprintf(f, "PC=%x CCS=%x btaken=%d btarget=%x\n"
3367 "cc_op=%d cc_src=%d cc_dest=%d cc_result=%x cc_mask=%x\n",
3368 env->pc, env->pregs[PR_CCS], env->btaken, env->btarget,
3369 env->cc_op,
3370 env->cc_src, env->cc_dest, env->cc_result, env->cc_mask);
3373 for (i = 0; i < 16; i++) {
3374 cpu_fprintf(f, "r%2.2d=%8.8x ", i, env->regs[i]);
3375 if ((i + 1) % 4 == 0)
3376 cpu_fprintf(f, "\n");
3378 cpu_fprintf(f, "\nspecial regs:\n");
3379 for (i = 0; i < 16; i++) {
3380 cpu_fprintf(f, "p%2.2d=%8.8x ", i, env->pregs[i]);
3381 if ((i + 1) % 4 == 0)
3382 cpu_fprintf(f, "\n");
3384 srs = env->pregs[PR_SRS];
3385 cpu_fprintf(f, "\nsupport function regs bank %x:\n", srs);
3386 if (srs < 256) {
3387 for (i = 0; i < 16; i++) {
3388 cpu_fprintf(f, "s%2.2d=%8.8x ",
3389 i, env->sregs[srs][i]);
3390 if ((i + 1) % 4 == 0)
3391 cpu_fprintf(f, "\n");
3394 cpu_fprintf(f, "\n\n");
3398 CPUCRISState *cpu_cris_init (const char *cpu_model)
3400 CPUCRISState *env;
3401 static int tcg_initialized = 0;
3402 int i;
3404 env = qemu_mallocz(sizeof(CPUCRISState));
3406 cpu_exec_init(env);
3407 cpu_reset(env);
3408 qemu_init_vcpu(env);
3410 if (tcg_initialized)
3411 return env;
3413 tcg_initialized = 1;
3415 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
3416 cc_x = tcg_global_mem_new(TCG_AREG0,
3417 offsetof(CPUState, cc_x), "cc_x");
3418 cc_src = tcg_global_mem_new(TCG_AREG0,
3419 offsetof(CPUState, cc_src), "cc_src");
3420 cc_dest = tcg_global_mem_new(TCG_AREG0,
3421 offsetof(CPUState, cc_dest),
3422 "cc_dest");
3423 cc_result = tcg_global_mem_new(TCG_AREG0,
3424 offsetof(CPUState, cc_result),
3425 "cc_result");
3426 cc_op = tcg_global_mem_new(TCG_AREG0,
3427 offsetof(CPUState, cc_op), "cc_op");
3428 cc_size = tcg_global_mem_new(TCG_AREG0,
3429 offsetof(CPUState, cc_size),
3430 "cc_size");
3431 cc_mask = tcg_global_mem_new(TCG_AREG0,
3432 offsetof(CPUState, cc_mask),
3433 "cc_mask");
3435 env_pc = tcg_global_mem_new(TCG_AREG0,
3436 offsetof(CPUState, pc),
3437 "pc");
3438 env_btarget = tcg_global_mem_new(TCG_AREG0,
3439 offsetof(CPUState, btarget),
3440 "btarget");
3441 env_btaken = tcg_global_mem_new(TCG_AREG0,
3442 offsetof(CPUState, btaken),
3443 "btaken");
3444 for (i = 0; i < 16; i++) {
3445 cpu_R[i] = tcg_global_mem_new(TCG_AREG0,
3446 offsetof(CPUState, regs[i]),
3447 regnames[i]);
3449 for (i = 0; i < 16; i++) {
3450 cpu_PR[i] = tcg_global_mem_new(TCG_AREG0,
3451 offsetof(CPUState, pregs[i]),
3452 pregnames[i]);
3455 #define GEN_HELPER 2
3456 #include "helper.h"
3458 return env;
3461 void cpu_reset (CPUCRISState *env)
3463 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
3464 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
3465 log_cpu_state(env, 0);
3468 memset(env, 0, offsetof(CPUCRISState, breakpoints));
3469 tlb_flush(env, 1);
3471 env->pregs[PR_VR] = 32;
3472 #if defined(CONFIG_USER_ONLY)
3473 /* start in user mode with interrupts enabled. */
3474 env->pregs[PR_CCS] |= U_FLAG | I_FLAG;
3475 #else
3476 cris_mmu_init(env);
3477 env->pregs[PR_CCS] = 0;
3478 #endif
3481 void gen_pc_load(CPUState *env, struct TranslationBlock *tb,
3482 unsigned long searched_pc, int pc_pos, void *puc)
3484 env->pc = gen_opc_pc[pc_pos];