Revert "block: prevent multiwrite_merge from creating too large iovecs"
[qemu/aliguori-queue.git] / target-cris / translate.c
bloba18d69c84f9487782c21ac4fd2da88580fa91edb
1 /*
2 * CRIS emulation for qemu: main translation routines.
4 * Copyright (c) 2008 AXIS Communications AB
5 * Written by Edgar E. Iglesias.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
22 * FIXME:
23 * The condition code translation is in need of attention.
26 #include <stdarg.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <string.h>
30 #include <inttypes.h>
32 #include "cpu.h"
33 #include "exec-all.h"
34 #include "disas.h"
35 #include "tcg-op.h"
36 #include "helper.h"
37 #include "mmu.h"
38 #include "crisv32-decode.h"
39 #include "qemu-common.h"
41 #define GEN_HELPER 1
42 #include "helper.h"
44 #define DISAS_CRIS 0
45 #if DISAS_CRIS
46 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47 #else
48 # define LOG_DIS(...) do { } while (0)
49 #endif
51 #define D(x)
52 #define BUG() (gen_BUG(dc, __FILE__, __LINE__))
53 #define BUG_ON(x) ({if (x) BUG();})
55 #define DISAS_SWI 5
57 /* Used by the decoder. */
58 #define EXTRACT_FIELD(src, start, end) \
59 (((src) >> start) & ((1 << (end - start + 1)) - 1))
61 #define CC_MASK_NZ 0xc
62 #define CC_MASK_NZV 0xe
63 #define CC_MASK_NZVC 0xf
64 #define CC_MASK_RNZV 0x10e
66 static TCGv_ptr cpu_env;
67 static TCGv cpu_R[16];
68 static TCGv cpu_PR[16];
69 static TCGv cc_x;
70 static TCGv cc_src;
71 static TCGv cc_dest;
72 static TCGv cc_result;
73 static TCGv cc_op;
74 static TCGv cc_size;
75 static TCGv cc_mask;
77 static TCGv env_btaken;
78 static TCGv env_btarget;
79 static TCGv env_pc;
81 #include "gen-icount.h"
83 /* This is the state at translation time. */
84 typedef struct DisasContext {
85 CPUState *env;
86 target_ulong pc, ppc;
88 /* Decoder. */
89 uint32_t ir;
90 uint32_t opcode;
91 unsigned int op1;
92 unsigned int op2;
93 unsigned int zsize, zzsize;
94 unsigned int mode;
95 unsigned int postinc;
97 int update_cc;
98 int cc_op;
99 int cc_size;
100 uint32_t cc_mask;
102 int cc_size_uptodate; /* -1 invalid or last written value. */
104 int cc_x_uptodate; /* 1 - ccs, 2 - known | X_FLAG. 0 not uptodate. */
105 int flags_uptodate; /* Wether or not $ccs is uptodate. */
106 int flagx_known; /* Wether or not flags_x has the x flag known at
107 translation time. */
108 int flags_x;
110 int clear_x; /* Clear x after this insn? */
111 int cpustate_changed;
112 unsigned int tb_flags; /* tb dependent flags. */
113 int is_jmp;
115 #define JMP_NOJMP 0
116 #define JMP_DIRECT 1
117 #define JMP_INDIRECT 2
118 int jmp; /* 0=nojmp, 1=direct, 2=indirect. */
119 uint32_t jmp_pc;
121 int delayed_branch;
123 struct TranslationBlock *tb;
124 int singlestep_enabled;
125 } DisasContext;
127 static void gen_BUG(DisasContext *dc, const char *file, int line)
129 printf ("BUG: pc=%x %s %d\n", dc->pc, file, line);
130 qemu_log("BUG: pc=%x %s %d\n", dc->pc, file, line);
131 cpu_abort(dc->env, "%s:%d\n", file, line);
134 static const char *regnames[] =
136 "$r0", "$r1", "$r2", "$r3",
137 "$r4", "$r5", "$r6", "$r7",
138 "$r8", "$r9", "$r10", "$r11",
139 "$r12", "$r13", "$sp", "$acr",
141 static const char *pregnames[] =
143 "$bz", "$vr", "$pid", "$srs",
144 "$wz", "$exs", "$eda", "$mof",
145 "$dz", "$ebp", "$erp", "$srp",
146 "$nrp", "$ccs", "$usp", "$spc",
149 /* We need this table to handle preg-moves with implicit width. */
150 static int preg_sizes[] = {
151 1, /* bz. */
152 1, /* vr. */
153 4, /* pid. */
154 1, /* srs. */
155 2, /* wz. */
156 4, 4, 4,
157 4, 4, 4, 4,
158 4, 4, 4, 4,
161 #define t_gen_mov_TN_env(tn, member) \
162 _t_gen_mov_TN_env((tn), offsetof(CPUState, member))
163 #define t_gen_mov_env_TN(member, tn) \
164 _t_gen_mov_env_TN(offsetof(CPUState, member), (tn))
166 static inline void t_gen_mov_TN_reg(TCGv tn, int r)
168 if (r < 0 || r > 15)
169 fprintf(stderr, "wrong register read $r%d\n", r);
170 tcg_gen_mov_tl(tn, cpu_R[r]);
172 static inline void t_gen_mov_reg_TN(int r, TCGv tn)
174 if (r < 0 || r > 15)
175 fprintf(stderr, "wrong register write $r%d\n", r);
176 tcg_gen_mov_tl(cpu_R[r], tn);
179 static inline void _t_gen_mov_TN_env(TCGv tn, int offset)
181 if (offset > sizeof (CPUState))
182 fprintf(stderr, "wrong load from env from off=%d\n", offset);
183 tcg_gen_ld_tl(tn, cpu_env, offset);
185 static inline void _t_gen_mov_env_TN(int offset, TCGv tn)
187 if (offset > sizeof (CPUState))
188 fprintf(stderr, "wrong store to env at off=%d\n", offset);
189 tcg_gen_st_tl(tn, cpu_env, offset);
192 static inline void t_gen_mov_TN_preg(TCGv tn, int r)
194 if (r < 0 || r > 15)
195 fprintf(stderr, "wrong register read $p%d\n", r);
196 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
197 tcg_gen_mov_tl(tn, tcg_const_tl(0));
198 else if (r == PR_VR)
199 tcg_gen_mov_tl(tn, tcg_const_tl(32));
200 else
201 tcg_gen_mov_tl(tn, cpu_PR[r]);
203 static inline void t_gen_mov_preg_TN(DisasContext *dc, int r, TCGv tn)
205 if (r < 0 || r > 15)
206 fprintf(stderr, "wrong register write $p%d\n", r);
207 if (r == PR_BZ || r == PR_WZ || r == PR_DZ)
208 return;
209 else if (r == PR_SRS)
210 tcg_gen_andi_tl(cpu_PR[r], tn, 3);
211 else {
212 if (r == PR_PID)
213 gen_helper_tlb_flush_pid(tn);
214 if (dc->tb_flags & S_FLAG && r == PR_SPC)
215 gen_helper_spc_write(tn);
216 else if (r == PR_CCS)
217 dc->cpustate_changed = 1;
218 tcg_gen_mov_tl(cpu_PR[r], tn);
222 static inline void t_gen_raise_exception(uint32_t index)
224 TCGv_i32 tmp = tcg_const_i32(index);
225 gen_helper_raise_exception(tmp);
226 tcg_temp_free_i32(tmp);
229 static void t_gen_lsl(TCGv d, TCGv a, TCGv b)
231 TCGv t0, t_31;
233 t0 = tcg_temp_new();
234 t_31 = tcg_const_tl(31);
235 tcg_gen_shl_tl(d, a, b);
237 tcg_gen_sub_tl(t0, t_31, b);
238 tcg_gen_sar_tl(t0, t0, t_31);
239 tcg_gen_and_tl(t0, t0, d);
240 tcg_gen_xor_tl(d, d, t0);
241 tcg_temp_free(t0);
242 tcg_temp_free(t_31);
245 static void t_gen_lsr(TCGv d, TCGv a, TCGv b)
247 TCGv t0, t_31;
249 t0 = tcg_temp_new();
250 t_31 = tcg_temp_new();
251 tcg_gen_shr_tl(d, a, b);
253 tcg_gen_movi_tl(t_31, 31);
254 tcg_gen_sub_tl(t0, t_31, b);
255 tcg_gen_sar_tl(t0, t0, t_31);
256 tcg_gen_and_tl(t0, t0, d);
257 tcg_gen_xor_tl(d, d, t0);
258 tcg_temp_free(t0);
259 tcg_temp_free(t_31);
262 static void t_gen_asr(TCGv d, TCGv a, TCGv b)
264 TCGv t0, t_31;
266 t0 = tcg_temp_new();
267 t_31 = tcg_temp_new();
268 tcg_gen_sar_tl(d, a, b);
270 tcg_gen_movi_tl(t_31, 31);
271 tcg_gen_sub_tl(t0, t_31, b);
272 tcg_gen_sar_tl(t0, t0, t_31);
273 tcg_gen_or_tl(d, d, t0);
274 tcg_temp_free(t0);
275 tcg_temp_free(t_31);
278 /* 64-bit signed mul, lower result in d and upper in d2. */
279 static void t_gen_muls(TCGv d, TCGv d2, TCGv a, TCGv b)
281 TCGv_i64 t0, t1;
283 t0 = tcg_temp_new_i64();
284 t1 = tcg_temp_new_i64();
286 tcg_gen_ext_i32_i64(t0, a);
287 tcg_gen_ext_i32_i64(t1, b);
288 tcg_gen_mul_i64(t0, t0, t1);
290 tcg_gen_trunc_i64_i32(d, t0);
291 tcg_gen_shri_i64(t0, t0, 32);
292 tcg_gen_trunc_i64_i32(d2, t0);
294 tcg_temp_free_i64(t0);
295 tcg_temp_free_i64(t1);
298 /* 64-bit unsigned muls, lower result in d and upper in d2. */
299 static void t_gen_mulu(TCGv d, TCGv d2, TCGv a, TCGv b)
301 TCGv_i64 t0, t1;
303 t0 = tcg_temp_new_i64();
304 t1 = tcg_temp_new_i64();
306 tcg_gen_extu_i32_i64(t0, a);
307 tcg_gen_extu_i32_i64(t1, b);
308 tcg_gen_mul_i64(t0, t0, t1);
310 tcg_gen_trunc_i64_i32(d, t0);
311 tcg_gen_shri_i64(t0, t0, 32);
312 tcg_gen_trunc_i64_i32(d2, t0);
314 tcg_temp_free_i64(t0);
315 tcg_temp_free_i64(t1);
318 static void t_gen_cris_dstep(TCGv d, TCGv a, TCGv b)
320 int l1;
322 l1 = gen_new_label();
325 * d <<= 1
326 * if (d >= s)
327 * d -= s;
329 tcg_gen_shli_tl(d, a, 1);
330 tcg_gen_brcond_tl(TCG_COND_LTU, d, b, l1);
331 tcg_gen_sub_tl(d, d, b);
332 gen_set_label(l1);
335 /* Extended arithmetics on CRIS. */
336 static inline void t_gen_add_flag(TCGv d, int flag)
338 TCGv c;
340 c = tcg_temp_new();
341 t_gen_mov_TN_preg(c, PR_CCS);
342 /* Propagate carry into d. */
343 tcg_gen_andi_tl(c, c, 1 << flag);
344 if (flag)
345 tcg_gen_shri_tl(c, c, flag);
346 tcg_gen_add_tl(d, d, c);
347 tcg_temp_free(c);
350 static inline void t_gen_addx_carry(DisasContext *dc, TCGv d)
352 if (dc->flagx_known) {
353 if (dc->flags_x) {
354 TCGv c;
356 c = tcg_temp_new();
357 t_gen_mov_TN_preg(c, PR_CCS);
358 /* C flag is already at bit 0. */
359 tcg_gen_andi_tl(c, c, C_FLAG);
360 tcg_gen_add_tl(d, d, c);
361 tcg_temp_free(c);
363 } else {
364 TCGv x, c;
366 x = tcg_temp_new();
367 c = tcg_temp_new();
368 t_gen_mov_TN_preg(x, PR_CCS);
369 tcg_gen_mov_tl(c, x);
371 /* Propagate carry into d if X is set. Branch free. */
372 tcg_gen_andi_tl(c, c, C_FLAG);
373 tcg_gen_andi_tl(x, x, X_FLAG);
374 tcg_gen_shri_tl(x, x, 4);
376 tcg_gen_and_tl(x, x, c);
377 tcg_gen_add_tl(d, d, x);
378 tcg_temp_free(x);
379 tcg_temp_free(c);
383 static inline void t_gen_subx_carry(DisasContext *dc, TCGv d)
385 if (dc->flagx_known) {
386 if (dc->flags_x) {
387 TCGv c;
389 c = tcg_temp_new();
390 t_gen_mov_TN_preg(c, PR_CCS);
391 /* C flag is already at bit 0. */
392 tcg_gen_andi_tl(c, c, C_FLAG);
393 tcg_gen_sub_tl(d, d, c);
394 tcg_temp_free(c);
396 } else {
397 TCGv x, c;
399 x = tcg_temp_new();
400 c = tcg_temp_new();
401 t_gen_mov_TN_preg(x, PR_CCS);
402 tcg_gen_mov_tl(c, x);
404 /* Propagate carry into d if X is set. Branch free. */
405 tcg_gen_andi_tl(c, c, C_FLAG);
406 tcg_gen_andi_tl(x, x, X_FLAG);
407 tcg_gen_shri_tl(x, x, 4);
409 tcg_gen_and_tl(x, x, c);
410 tcg_gen_sub_tl(d, d, x);
411 tcg_temp_free(x);
412 tcg_temp_free(c);
416 /* Swap the two bytes within each half word of the s operand.
417 T0 = ((T0 << 8) & 0xff00ff00) | ((T0 >> 8) & 0x00ff00ff) */
418 static inline void t_gen_swapb(TCGv d, TCGv s)
420 TCGv t, org_s;
422 t = tcg_temp_new();
423 org_s = tcg_temp_new();
425 /* d and s may refer to the same object. */
426 tcg_gen_mov_tl(org_s, s);
427 tcg_gen_shli_tl(t, org_s, 8);
428 tcg_gen_andi_tl(d, t, 0xff00ff00);
429 tcg_gen_shri_tl(t, org_s, 8);
430 tcg_gen_andi_tl(t, t, 0x00ff00ff);
431 tcg_gen_or_tl(d, d, t);
432 tcg_temp_free(t);
433 tcg_temp_free(org_s);
436 /* Swap the halfwords of the s operand. */
437 static inline void t_gen_swapw(TCGv d, TCGv s)
439 TCGv t;
440 /* d and s refer the same object. */
441 t = tcg_temp_new();
442 tcg_gen_mov_tl(t, s);
443 tcg_gen_shli_tl(d, t, 16);
444 tcg_gen_shri_tl(t, t, 16);
445 tcg_gen_or_tl(d, d, t);
446 tcg_temp_free(t);
449 /* Reverse the within each byte.
450 T0 = (((T0 << 7) & 0x80808080) |
451 ((T0 << 5) & 0x40404040) |
452 ((T0 << 3) & 0x20202020) |
453 ((T0 << 1) & 0x10101010) |
454 ((T0 >> 1) & 0x08080808) |
455 ((T0 >> 3) & 0x04040404) |
456 ((T0 >> 5) & 0x02020202) |
457 ((T0 >> 7) & 0x01010101));
459 static inline void t_gen_swapr(TCGv d, TCGv s)
461 struct {
462 int shift; /* LSL when positive, LSR when negative. */
463 uint32_t mask;
464 } bitrev [] = {
465 {7, 0x80808080},
466 {5, 0x40404040},
467 {3, 0x20202020},
468 {1, 0x10101010},
469 {-1, 0x08080808},
470 {-3, 0x04040404},
471 {-5, 0x02020202},
472 {-7, 0x01010101}
474 int i;
475 TCGv t, org_s;
477 /* d and s refer the same object. */
478 t = tcg_temp_new();
479 org_s = tcg_temp_new();
480 tcg_gen_mov_tl(org_s, s);
482 tcg_gen_shli_tl(t, org_s, bitrev[0].shift);
483 tcg_gen_andi_tl(d, t, bitrev[0].mask);
484 for (i = 1; i < ARRAY_SIZE(bitrev); i++) {
485 if (bitrev[i].shift >= 0) {
486 tcg_gen_shli_tl(t, org_s, bitrev[i].shift);
487 } else {
488 tcg_gen_shri_tl(t, org_s, -bitrev[i].shift);
490 tcg_gen_andi_tl(t, t, bitrev[i].mask);
491 tcg_gen_or_tl(d, d, t);
493 tcg_temp_free(t);
494 tcg_temp_free(org_s);
497 static void t_gen_cc_jmp(TCGv pc_true, TCGv pc_false)
499 TCGv btaken;
500 int l1;
502 l1 = gen_new_label();
503 btaken = tcg_temp_new();
505 /* Conditional jmp. */
506 tcg_gen_mov_tl(btaken, env_btaken);
507 tcg_gen_mov_tl(env_pc, pc_false);
508 tcg_gen_brcondi_tl(TCG_COND_EQ, btaken, 0, l1);
509 tcg_gen_mov_tl(env_pc, pc_true);
510 gen_set_label(l1);
512 tcg_temp_free(btaken);
515 static void gen_goto_tb(DisasContext *dc, int n, target_ulong dest)
517 TranslationBlock *tb;
518 tb = dc->tb;
519 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
520 tcg_gen_goto_tb(n);
521 tcg_gen_movi_tl(env_pc, dest);
522 tcg_gen_exit_tb((long)tb + n);
523 } else {
524 tcg_gen_movi_tl(env_pc, dest);
525 tcg_gen_exit_tb(0);
529 /* Sign extend at translation time. */
530 static int sign_extend(unsigned int val, unsigned int width)
532 int sval;
534 /* LSL. */
535 val <<= 31 - width;
536 sval = val;
537 /* ASR. */
538 sval >>= 31 - width;
539 return sval;
542 static inline void cris_clear_x_flag(DisasContext *dc)
544 if (dc->flagx_known && dc->flags_x)
545 dc->flags_uptodate = 0;
547 dc->flagx_known = 1;
548 dc->flags_x = 0;
551 static void cris_flush_cc_state(DisasContext *dc)
553 if (dc->cc_size_uptodate != dc->cc_size) {
554 tcg_gen_movi_tl(cc_size, dc->cc_size);
555 dc->cc_size_uptodate = dc->cc_size;
557 tcg_gen_movi_tl(cc_op, dc->cc_op);
558 tcg_gen_movi_tl(cc_mask, dc->cc_mask);
561 static void cris_evaluate_flags(DisasContext *dc)
563 if (dc->flags_uptodate)
564 return;
566 cris_flush_cc_state(dc);
568 switch (dc->cc_op)
570 case CC_OP_MCP:
571 gen_helper_evaluate_flags_mcp(cpu_PR[PR_CCS],
572 cpu_PR[PR_CCS], cc_src,
573 cc_dest, cc_result);
574 break;
575 case CC_OP_MULS:
576 gen_helper_evaluate_flags_muls(cpu_PR[PR_CCS],
577 cpu_PR[PR_CCS], cc_result,
578 cpu_PR[PR_MOF]);
579 break;
580 case CC_OP_MULU:
581 gen_helper_evaluate_flags_mulu(cpu_PR[PR_CCS],
582 cpu_PR[PR_CCS], cc_result,
583 cpu_PR[PR_MOF]);
584 break;
585 case CC_OP_MOVE:
586 case CC_OP_AND:
587 case CC_OP_OR:
588 case CC_OP_XOR:
589 case CC_OP_ASR:
590 case CC_OP_LSR:
591 case CC_OP_LSL:
592 switch (dc->cc_size)
594 case 4:
595 gen_helper_evaluate_flags_move_4(cpu_PR[PR_CCS],
596 cpu_PR[PR_CCS], cc_result);
597 break;
598 case 2:
599 gen_helper_evaluate_flags_move_2(cpu_PR[PR_CCS],
600 cpu_PR[PR_CCS], cc_result);
601 break;
602 default:
603 gen_helper_evaluate_flags();
604 break;
606 break;
607 case CC_OP_FLAGS:
608 /* live. */
609 break;
610 case CC_OP_SUB:
611 case CC_OP_CMP:
612 if (dc->cc_size == 4)
613 gen_helper_evaluate_flags_sub_4(cpu_PR[PR_CCS],
614 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
615 else
616 gen_helper_evaluate_flags();
618 break;
619 default:
620 switch (dc->cc_size)
622 case 4:
623 gen_helper_evaluate_flags_alu_4(cpu_PR[PR_CCS],
624 cpu_PR[PR_CCS], cc_src, cc_dest, cc_result);
625 break;
626 default:
627 gen_helper_evaluate_flags();
628 break;
630 break;
633 if (dc->flagx_known) {
634 if (dc->flags_x)
635 tcg_gen_ori_tl(cpu_PR[PR_CCS],
636 cpu_PR[PR_CCS], X_FLAG);
637 else
638 tcg_gen_andi_tl(cpu_PR[PR_CCS],
639 cpu_PR[PR_CCS], ~X_FLAG);
641 dc->flags_uptodate = 1;
644 static void cris_cc_mask(DisasContext *dc, unsigned int mask)
646 uint32_t ovl;
648 if (!mask) {
649 dc->update_cc = 0;
650 return;
653 /* Check if we need to evaluate the condition codes due to
654 CC overlaying. */
655 ovl = (dc->cc_mask ^ mask) & ~mask;
656 if (ovl) {
657 /* TODO: optimize this case. It trigs all the time. */
658 cris_evaluate_flags (dc);
660 dc->cc_mask = mask;
661 dc->update_cc = 1;
664 static void cris_update_cc_op(DisasContext *dc, int op, int size)
666 dc->cc_op = op;
667 dc->cc_size = size;
668 dc->flags_uptodate = 0;
671 static inline void cris_update_cc_x(DisasContext *dc)
673 /* Save the x flag state at the time of the cc snapshot. */
674 if (dc->flagx_known) {
675 if (dc->cc_x_uptodate == (2 | dc->flags_x))
676 return;
677 tcg_gen_movi_tl(cc_x, dc->flags_x);
678 dc->cc_x_uptodate = 2 | dc->flags_x;
680 else {
681 tcg_gen_andi_tl(cc_x, cpu_PR[PR_CCS], X_FLAG);
682 dc->cc_x_uptodate = 1;
686 /* Update cc prior to executing ALU op. Needs source operands untouched. */
687 static void cris_pre_alu_update_cc(DisasContext *dc, int op,
688 TCGv dst, TCGv src, int size)
690 if (dc->update_cc) {
691 cris_update_cc_op(dc, op, size);
692 tcg_gen_mov_tl(cc_src, src);
694 if (op != CC_OP_MOVE
695 && op != CC_OP_AND
696 && op != CC_OP_OR
697 && op != CC_OP_XOR
698 && op != CC_OP_ASR
699 && op != CC_OP_LSR
700 && op != CC_OP_LSL)
701 tcg_gen_mov_tl(cc_dest, dst);
703 cris_update_cc_x(dc);
707 /* Update cc after executing ALU op. needs the result. */
708 static inline void cris_update_result(DisasContext *dc, TCGv res)
710 if (dc->update_cc)
711 tcg_gen_mov_tl(cc_result, res);
714 /* Returns one if the write back stage should execute. */
715 static void cris_alu_op_exec(DisasContext *dc, int op,
716 TCGv dst, TCGv a, TCGv b, int size)
718 /* Emit the ALU insns. */
719 switch (op)
721 case CC_OP_ADD:
722 tcg_gen_add_tl(dst, a, b);
723 /* Extended arithmetics. */
724 t_gen_addx_carry(dc, dst);
725 break;
726 case CC_OP_ADDC:
727 tcg_gen_add_tl(dst, a, b);
728 t_gen_add_flag(dst, 0); /* C_FLAG. */
729 break;
730 case CC_OP_MCP:
731 tcg_gen_add_tl(dst, a, b);
732 t_gen_add_flag(dst, 8); /* R_FLAG. */
733 break;
734 case CC_OP_SUB:
735 tcg_gen_sub_tl(dst, a, b);
736 /* Extended arithmetics. */
737 t_gen_subx_carry(dc, dst);
738 break;
739 case CC_OP_MOVE:
740 tcg_gen_mov_tl(dst, b);
741 break;
742 case CC_OP_OR:
743 tcg_gen_or_tl(dst, a, b);
744 break;
745 case CC_OP_AND:
746 tcg_gen_and_tl(dst, a, b);
747 break;
748 case CC_OP_XOR:
749 tcg_gen_xor_tl(dst, a, b);
750 break;
751 case CC_OP_LSL:
752 t_gen_lsl(dst, a, b);
753 break;
754 case CC_OP_LSR:
755 t_gen_lsr(dst, a, b);
756 break;
757 case CC_OP_ASR:
758 t_gen_asr(dst, a, b);
759 break;
760 case CC_OP_NEG:
761 tcg_gen_neg_tl(dst, b);
762 /* Extended arithmetics. */
763 t_gen_subx_carry(dc, dst);
764 break;
765 case CC_OP_LZ:
766 gen_helper_lz(dst, b);
767 break;
768 case CC_OP_MULS:
769 t_gen_muls(dst, cpu_PR[PR_MOF], a, b);
770 break;
771 case CC_OP_MULU:
772 t_gen_mulu(dst, cpu_PR[PR_MOF], a, b);
773 break;
774 case CC_OP_DSTEP:
775 t_gen_cris_dstep(dst, a, b);
776 break;
777 case CC_OP_BOUND:
779 int l1;
780 l1 = gen_new_label();
781 tcg_gen_mov_tl(dst, a);
782 tcg_gen_brcond_tl(TCG_COND_LEU, a, b, l1);
783 tcg_gen_mov_tl(dst, b);
784 gen_set_label(l1);
786 break;
787 case CC_OP_CMP:
788 tcg_gen_sub_tl(dst, a, b);
789 /* Extended arithmetics. */
790 t_gen_subx_carry(dc, dst);
791 break;
792 default:
793 qemu_log("illegal ALU op.\n");
794 BUG();
795 break;
798 if (size == 1)
799 tcg_gen_andi_tl(dst, dst, 0xff);
800 else if (size == 2)
801 tcg_gen_andi_tl(dst, dst, 0xffff);
804 static void cris_alu(DisasContext *dc, int op,
805 TCGv d, TCGv op_a, TCGv op_b, int size)
807 TCGv tmp;
808 int writeback;
810 writeback = 1;
812 if (op == CC_OP_CMP) {
813 tmp = tcg_temp_new();
814 writeback = 0;
815 } else if (size == 4) {
816 tmp = d;
817 writeback = 0;
818 } else
819 tmp = tcg_temp_new();
822 cris_pre_alu_update_cc(dc, op, op_a, op_b, size);
823 cris_alu_op_exec(dc, op, tmp, op_a, op_b, size);
824 cris_update_result(dc, tmp);
826 /* Writeback. */
827 if (writeback) {
828 if (size == 1)
829 tcg_gen_andi_tl(d, d, ~0xff);
830 else
831 tcg_gen_andi_tl(d, d, ~0xffff);
832 tcg_gen_or_tl(d, d, tmp);
834 if (!TCGV_EQUAL(tmp, d))
835 tcg_temp_free(tmp);
838 static int arith_cc(DisasContext *dc)
840 if (dc->update_cc) {
841 switch (dc->cc_op) {
842 case CC_OP_ADDC: return 1;
843 case CC_OP_ADD: return 1;
844 case CC_OP_SUB: return 1;
845 case CC_OP_DSTEP: return 1;
846 case CC_OP_LSL: return 1;
847 case CC_OP_LSR: return 1;
848 case CC_OP_ASR: return 1;
849 case CC_OP_CMP: return 1;
850 case CC_OP_NEG: return 1;
851 case CC_OP_OR: return 1;
852 case CC_OP_AND: return 1;
853 case CC_OP_XOR: return 1;
854 case CC_OP_MULU: return 1;
855 case CC_OP_MULS: return 1;
856 default:
857 return 0;
860 return 0;
863 static void gen_tst_cc (DisasContext *dc, TCGv cc, int cond)
865 int arith_opt, move_opt;
867 /* TODO: optimize more condition codes. */
870 * If the flags are live, we've gotta look into the bits of CCS.
871 * Otherwise, if we just did an arithmetic operation we try to
872 * evaluate the condition code faster.
874 * When this function is done, T0 should be non-zero if the condition
875 * code is true.
877 arith_opt = arith_cc(dc) && !dc->flags_uptodate;
878 move_opt = (dc->cc_op == CC_OP_MOVE);
879 switch (cond) {
880 case CC_EQ:
881 if (arith_opt || move_opt) {
882 /* If cc_result is zero, T0 should be
883 non-zero otherwise T0 should be zero. */
884 int l1;
885 l1 = gen_new_label();
886 tcg_gen_movi_tl(cc, 0);
887 tcg_gen_brcondi_tl(TCG_COND_NE, cc_result,
888 0, l1);
889 tcg_gen_movi_tl(cc, 1);
890 gen_set_label(l1);
892 else {
893 cris_evaluate_flags(dc);
894 tcg_gen_andi_tl(cc,
895 cpu_PR[PR_CCS], Z_FLAG);
897 break;
898 case CC_NE:
899 if (arith_opt || move_opt)
900 tcg_gen_mov_tl(cc, cc_result);
901 else {
902 cris_evaluate_flags(dc);
903 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
904 Z_FLAG);
905 tcg_gen_andi_tl(cc, cc, Z_FLAG);
907 break;
908 case CC_CS:
909 cris_evaluate_flags(dc);
910 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], C_FLAG);
911 break;
912 case CC_CC:
913 cris_evaluate_flags(dc);
914 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS], C_FLAG);
915 tcg_gen_andi_tl(cc, cc, C_FLAG);
916 break;
917 case CC_VS:
918 cris_evaluate_flags(dc);
919 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], V_FLAG);
920 break;
921 case CC_VC:
922 cris_evaluate_flags(dc);
923 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
924 V_FLAG);
925 tcg_gen_andi_tl(cc, cc, V_FLAG);
926 break;
927 case CC_PL:
928 if (arith_opt || move_opt) {
929 int bits = 31;
931 if (dc->cc_size == 1)
932 bits = 7;
933 else if (dc->cc_size == 2)
934 bits = 15;
936 tcg_gen_shri_tl(cc, cc_result, bits);
937 tcg_gen_xori_tl(cc, cc, 1);
938 } else {
939 cris_evaluate_flags(dc);
940 tcg_gen_xori_tl(cc, cpu_PR[PR_CCS],
941 N_FLAG);
942 tcg_gen_andi_tl(cc, cc, N_FLAG);
944 break;
945 case CC_MI:
946 if (arith_opt || move_opt) {
947 int bits = 31;
949 if (dc->cc_size == 1)
950 bits = 7;
951 else if (dc->cc_size == 2)
952 bits = 15;
954 tcg_gen_shri_tl(cc, cc_result, bits);
955 tcg_gen_andi_tl(cc, cc, 1);
957 else {
958 cris_evaluate_flags(dc);
959 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
960 N_FLAG);
962 break;
963 case CC_LS:
964 cris_evaluate_flags(dc);
965 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS],
966 C_FLAG | Z_FLAG);
967 break;
968 case CC_HI:
969 cris_evaluate_flags(dc);
971 TCGv tmp;
973 tmp = tcg_temp_new();
974 tcg_gen_xori_tl(tmp, cpu_PR[PR_CCS],
975 C_FLAG | Z_FLAG);
976 /* Overlay the C flag on top of the Z. */
977 tcg_gen_shli_tl(cc, tmp, 2);
978 tcg_gen_and_tl(cc, tmp, cc);
979 tcg_gen_andi_tl(cc, cc, Z_FLAG);
981 tcg_temp_free(tmp);
983 break;
984 case CC_GE:
985 cris_evaluate_flags(dc);
986 /* Overlay the V flag on top of the N. */
987 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
988 tcg_gen_xor_tl(cc,
989 cpu_PR[PR_CCS], cc);
990 tcg_gen_andi_tl(cc, cc, N_FLAG);
991 tcg_gen_xori_tl(cc, cc, N_FLAG);
992 break;
993 case CC_LT:
994 cris_evaluate_flags(dc);
995 /* Overlay the V flag on top of the N. */
996 tcg_gen_shli_tl(cc, cpu_PR[PR_CCS], 2);
997 tcg_gen_xor_tl(cc,
998 cpu_PR[PR_CCS], cc);
999 tcg_gen_andi_tl(cc, cc, N_FLAG);
1000 break;
1001 case CC_GT:
1002 cris_evaluate_flags(dc);
1004 TCGv n, z;
1006 n = tcg_temp_new();
1007 z = tcg_temp_new();
1009 /* To avoid a shift we overlay everything on
1010 the V flag. */
1011 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1012 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1013 /* invert Z. */
1014 tcg_gen_xori_tl(z, z, 2);
1016 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1017 tcg_gen_xori_tl(n, n, 2);
1018 tcg_gen_and_tl(cc, z, n);
1019 tcg_gen_andi_tl(cc, cc, 2);
1021 tcg_temp_free(n);
1022 tcg_temp_free(z);
1024 break;
1025 case CC_LE:
1026 cris_evaluate_flags(dc);
1028 TCGv n, z;
1030 n = tcg_temp_new();
1031 z = tcg_temp_new();
1033 /* To avoid a shift we overlay everything on
1034 the V flag. */
1035 tcg_gen_shri_tl(n, cpu_PR[PR_CCS], 2);
1036 tcg_gen_shri_tl(z, cpu_PR[PR_CCS], 1);
1038 tcg_gen_xor_tl(n, n, cpu_PR[PR_CCS]);
1039 tcg_gen_or_tl(cc, z, n);
1040 tcg_gen_andi_tl(cc, cc, 2);
1042 tcg_temp_free(n);
1043 tcg_temp_free(z);
1045 break;
1046 case CC_P:
1047 cris_evaluate_flags(dc);
1048 tcg_gen_andi_tl(cc, cpu_PR[PR_CCS], P_FLAG);
1049 break;
1050 case CC_A:
1051 tcg_gen_movi_tl(cc, 1);
1052 break;
1053 default:
1054 BUG();
1055 break;
1059 static void cris_store_direct_jmp(DisasContext *dc)
1061 /* Store the direct jmp state into the cpu-state. */
1062 if (dc->jmp == JMP_DIRECT) {
1063 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1064 tcg_gen_movi_tl(env_btaken, 1);
1068 static void cris_prepare_cc_branch (DisasContext *dc,
1069 int offset, int cond)
1071 /* This helps us re-schedule the micro-code to insns in delay-slots
1072 before the actual jump. */
1073 dc->delayed_branch = 2;
1074 dc->jmp_pc = dc->pc + offset;
1076 if (cond != CC_A)
1078 dc->jmp = JMP_INDIRECT;
1079 gen_tst_cc (dc, env_btaken, cond);
1080 tcg_gen_movi_tl(env_btarget, dc->jmp_pc);
1081 } else {
1082 /* Allow chaining. */
1083 dc->jmp = JMP_DIRECT;
1088 /* jumps, when the dest is in a live reg for example. Direct should be set
1089 when the dest addr is constant to allow tb chaining. */
1090 static inline void cris_prepare_jmp (DisasContext *dc, unsigned int type)
1092 /* This helps us re-schedule the micro-code to insns in delay-slots
1093 before the actual jump. */
1094 dc->delayed_branch = 2;
1095 dc->jmp = type;
1096 if (type == JMP_INDIRECT)
1097 tcg_gen_movi_tl(env_btaken, 1);
1100 static void gen_load64(DisasContext *dc, TCGv_i64 dst, TCGv addr)
1102 int mem_index = cpu_mmu_index(dc->env);
1104 /* If we get a fault on a delayslot we must keep the jmp state in
1105 the cpu-state to be able to re-execute the jmp. */
1106 if (dc->delayed_branch == 1)
1107 cris_store_direct_jmp(dc);
1109 tcg_gen_qemu_ld64(dst, addr, mem_index);
1112 static void gen_load(DisasContext *dc, TCGv dst, TCGv addr,
1113 unsigned int size, int sign)
1115 int mem_index = cpu_mmu_index(dc->env);
1117 /* If we get a fault on a delayslot we must keep the jmp state in
1118 the cpu-state to be able to re-execute the jmp. */
1119 if (dc->delayed_branch == 1)
1120 cris_store_direct_jmp(dc);
1122 if (size == 1) {
1123 if (sign)
1124 tcg_gen_qemu_ld8s(dst, addr, mem_index);
1125 else
1126 tcg_gen_qemu_ld8u(dst, addr, mem_index);
1128 else if (size == 2) {
1129 if (sign)
1130 tcg_gen_qemu_ld16s(dst, addr, mem_index);
1131 else
1132 tcg_gen_qemu_ld16u(dst, addr, mem_index);
1134 else if (size == 4) {
1135 tcg_gen_qemu_ld32u(dst, addr, mem_index);
1137 else {
1138 abort();
1142 static void gen_store (DisasContext *dc, TCGv addr, TCGv val,
1143 unsigned int size)
1145 int mem_index = cpu_mmu_index(dc->env);
1147 /* If we get a fault on a delayslot we must keep the jmp state in
1148 the cpu-state to be able to re-execute the jmp. */
1149 if (dc->delayed_branch == 1)
1150 cris_store_direct_jmp(dc);
1153 /* Conditional writes. We only support the kind were X and P are known
1154 at translation time. */
1155 if (dc->flagx_known && dc->flags_x && (dc->tb_flags & P_FLAG)) {
1156 dc->postinc = 0;
1157 cris_evaluate_flags(dc);
1158 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], C_FLAG);
1159 return;
1162 if (size == 1)
1163 tcg_gen_qemu_st8(val, addr, mem_index);
1164 else if (size == 2)
1165 tcg_gen_qemu_st16(val, addr, mem_index);
1166 else
1167 tcg_gen_qemu_st32(val, addr, mem_index);
1169 if (dc->flagx_known && dc->flags_x) {
1170 cris_evaluate_flags(dc);
1171 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~C_FLAG);
1175 static inline void t_gen_sext(TCGv d, TCGv s, int size)
1177 if (size == 1)
1178 tcg_gen_ext8s_i32(d, s);
1179 else if (size == 2)
1180 tcg_gen_ext16s_i32(d, s);
1181 else if(!TCGV_EQUAL(d, s))
1182 tcg_gen_mov_tl(d, s);
1185 static inline void t_gen_zext(TCGv d, TCGv s, int size)
1187 if (size == 1)
1188 tcg_gen_ext8u_i32(d, s);
1189 else if (size == 2)
1190 tcg_gen_ext16u_i32(d, s);
1191 else if (!TCGV_EQUAL(d, s))
1192 tcg_gen_mov_tl(d, s);
1195 #if DISAS_CRIS
1196 static char memsize_char(int size)
1198 switch (size)
1200 case 1: return 'b'; break;
1201 case 2: return 'w'; break;
1202 case 4: return 'd'; break;
1203 default:
1204 return 'x';
1205 break;
1208 #endif
1210 static inline unsigned int memsize_z(DisasContext *dc)
1212 return dc->zsize + 1;
1215 static inline unsigned int memsize_zz(DisasContext *dc)
1217 switch (dc->zzsize)
1219 case 0: return 1;
1220 case 1: return 2;
1221 default:
1222 return 4;
1226 static inline void do_postinc (DisasContext *dc, int size)
1228 if (dc->postinc)
1229 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], size);
1232 static inline void dec_prep_move_r(DisasContext *dc, int rs, int rd,
1233 int size, int s_ext, TCGv dst)
1235 if (s_ext)
1236 t_gen_sext(dst, cpu_R[rs], size);
1237 else
1238 t_gen_zext(dst, cpu_R[rs], size);
1241 /* Prepare T0 and T1 for a register alu operation.
1242 s_ext decides if the operand1 should be sign-extended or zero-extended when
1243 needed. */
1244 static void dec_prep_alu_r(DisasContext *dc, int rs, int rd,
1245 int size, int s_ext, TCGv dst, TCGv src)
1247 dec_prep_move_r(dc, rs, rd, size, s_ext, src);
1249 if (s_ext)
1250 t_gen_sext(dst, cpu_R[rd], size);
1251 else
1252 t_gen_zext(dst, cpu_R[rd], size);
1255 static int dec_prep_move_m(DisasContext *dc, int s_ext, int memsize,
1256 TCGv dst)
1258 unsigned int rs, rd;
1259 uint32_t imm;
1260 int is_imm;
1261 int insn_len = 2;
1263 rs = dc->op1;
1264 rd = dc->op2;
1265 is_imm = rs == 15 && dc->postinc;
1267 /* Load [$rs] onto T1. */
1268 if (is_imm) {
1269 insn_len = 2 + memsize;
1270 if (memsize == 1)
1271 insn_len++;
1273 if (memsize != 4) {
1274 if (s_ext) {
1275 if (memsize == 1)
1276 imm = ldsb_code(dc->pc + 2);
1277 else
1278 imm = ldsw_code(dc->pc + 2);
1279 } else {
1280 if (memsize == 1)
1281 imm = ldub_code(dc->pc + 2);
1282 else
1283 imm = lduw_code(dc->pc + 2);
1285 } else
1286 imm = ldl_code(dc->pc + 2);
1288 tcg_gen_movi_tl(dst, imm);
1289 dc->postinc = 0;
1290 } else {
1291 cris_flush_cc_state(dc);
1292 gen_load(dc, dst, cpu_R[rs], memsize, 0);
1293 if (s_ext)
1294 t_gen_sext(dst, dst, memsize);
1295 else
1296 t_gen_zext(dst, dst, memsize);
1298 return insn_len;
1301 /* Prepare T0 and T1 for a memory + alu operation.
1302 s_ext decides if the operand1 should be sign-extended or zero-extended when
1303 needed. */
1304 static int dec_prep_alu_m(DisasContext *dc, int s_ext, int memsize,
1305 TCGv dst, TCGv src)
1307 int insn_len;
1309 insn_len = dec_prep_move_m(dc, s_ext, memsize, src);
1310 tcg_gen_mov_tl(dst, cpu_R[dc->op2]);
1311 return insn_len;
1314 #if DISAS_CRIS
1315 static const char *cc_name(int cc)
1317 static const char *cc_names[16] = {
1318 "cc", "cs", "ne", "eq", "vc", "vs", "pl", "mi",
1319 "ls", "hi", "ge", "lt", "gt", "le", "a", "p"
1321 assert(cc < 16);
1322 return cc_names[cc];
1324 #endif
1326 /* Start of insn decoders. */
1328 static unsigned int dec_bccq(DisasContext *dc)
1330 int32_t offset;
1331 int sign;
1332 uint32_t cond = dc->op2;
1333 int tmp;
1335 offset = EXTRACT_FIELD (dc->ir, 1, 7);
1336 sign = EXTRACT_FIELD(dc->ir, 0, 0);
1338 offset *= 2;
1339 offset |= sign << 8;
1340 tmp = offset;
1341 offset = sign_extend(offset, 8);
1343 LOG_DIS("b%s %x\n", cc_name(cond), dc->pc + offset);
1345 /* op2 holds the condition-code. */
1346 cris_cc_mask(dc, 0);
1347 cris_prepare_cc_branch (dc, offset, cond);
1348 return 2;
1350 static unsigned int dec_addoq(DisasContext *dc)
1352 int32_t imm;
1354 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 7);
1355 imm = sign_extend(dc->op1, 7);
1357 LOG_DIS("addoq %d, $r%u\n", imm, dc->op2);
1358 cris_cc_mask(dc, 0);
1359 /* Fetch register operand, */
1360 tcg_gen_addi_tl(cpu_R[R_ACR], cpu_R[dc->op2], imm);
1362 return 2;
1364 static unsigned int dec_addq(DisasContext *dc)
1366 LOG_DIS("addq %u, $r%u\n", dc->op1, dc->op2);
1368 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1370 cris_cc_mask(dc, CC_MASK_NZVC);
1372 cris_alu(dc, CC_OP_ADD,
1373 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1374 return 2;
1376 static unsigned int dec_moveq(DisasContext *dc)
1378 uint32_t imm;
1380 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1381 imm = sign_extend(dc->op1, 5);
1382 LOG_DIS("moveq %d, $r%u\n", imm, dc->op2);
1384 tcg_gen_mov_tl(cpu_R[dc->op2], tcg_const_tl(imm));
1385 return 2;
1387 static unsigned int dec_subq(DisasContext *dc)
1389 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1391 LOG_DIS("subq %u, $r%u\n", dc->op1, dc->op2);
1393 cris_cc_mask(dc, CC_MASK_NZVC);
1394 cris_alu(dc, CC_OP_SUB,
1395 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(dc->op1), 4);
1396 return 2;
1398 static unsigned int dec_cmpq(DisasContext *dc)
1400 uint32_t imm;
1401 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1402 imm = sign_extend(dc->op1, 5);
1404 LOG_DIS("cmpq %d, $r%d\n", imm, dc->op2);
1405 cris_cc_mask(dc, CC_MASK_NZVC);
1407 cris_alu(dc, CC_OP_CMP,
1408 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1409 return 2;
1411 static unsigned int dec_andq(DisasContext *dc)
1413 uint32_t imm;
1414 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1415 imm = sign_extend(dc->op1, 5);
1417 LOG_DIS("andq %d, $r%d\n", imm, dc->op2);
1418 cris_cc_mask(dc, CC_MASK_NZ);
1420 cris_alu(dc, CC_OP_AND,
1421 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1422 return 2;
1424 static unsigned int dec_orq(DisasContext *dc)
1426 uint32_t imm;
1427 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 5);
1428 imm = sign_extend(dc->op1, 5);
1429 LOG_DIS("orq %d, $r%d\n", imm, dc->op2);
1430 cris_cc_mask(dc, CC_MASK_NZ);
1432 cris_alu(dc, CC_OP_OR,
1433 cpu_R[dc->op2], cpu_R[dc->op2], tcg_const_tl(imm), 4);
1434 return 2;
1436 static unsigned int dec_btstq(DisasContext *dc)
1438 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1439 LOG_DIS("btstq %u, $r%d\n", dc->op1, dc->op2);
1441 cris_cc_mask(dc, CC_MASK_NZ);
1442 cris_evaluate_flags(dc);
1443 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1444 tcg_const_tl(dc->op1), cpu_PR[PR_CCS]);
1445 cris_alu(dc, CC_OP_MOVE,
1446 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1447 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1448 dc->flags_uptodate = 1;
1449 return 2;
1451 static unsigned int dec_asrq(DisasContext *dc)
1453 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1454 LOG_DIS("asrq %u, $r%d\n", dc->op1, dc->op2);
1455 cris_cc_mask(dc, CC_MASK_NZ);
1457 tcg_gen_sari_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1458 cris_alu(dc, CC_OP_MOVE,
1459 cpu_R[dc->op2],
1460 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1461 return 2;
1463 static unsigned int dec_lslq(DisasContext *dc)
1465 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1466 LOG_DIS("lslq %u, $r%d\n", dc->op1, dc->op2);
1468 cris_cc_mask(dc, CC_MASK_NZ);
1470 tcg_gen_shli_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1472 cris_alu(dc, CC_OP_MOVE,
1473 cpu_R[dc->op2],
1474 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1475 return 2;
1477 static unsigned int dec_lsrq(DisasContext *dc)
1479 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 4);
1480 LOG_DIS("lsrq %u, $r%d\n", dc->op1, dc->op2);
1482 cris_cc_mask(dc, CC_MASK_NZ);
1484 tcg_gen_shri_tl(cpu_R[dc->op2], cpu_R[dc->op2], dc->op1);
1485 cris_alu(dc, CC_OP_MOVE,
1486 cpu_R[dc->op2],
1487 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1488 return 2;
1491 static unsigned int dec_move_r(DisasContext *dc)
1493 int size = memsize_zz(dc);
1495 LOG_DIS("move.%c $r%u, $r%u\n",
1496 memsize_char(size), dc->op1, dc->op2);
1498 cris_cc_mask(dc, CC_MASK_NZ);
1499 if (size == 4) {
1500 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, cpu_R[dc->op2]);
1501 cris_cc_mask(dc, CC_MASK_NZ);
1502 cris_update_cc_op(dc, CC_OP_MOVE, 4);
1503 cris_update_cc_x(dc);
1504 cris_update_result(dc, cpu_R[dc->op2]);
1506 else {
1507 TCGv t0;
1509 t0 = tcg_temp_new();
1510 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1511 cris_alu(dc, CC_OP_MOVE,
1512 cpu_R[dc->op2],
1513 cpu_R[dc->op2], t0, size);
1514 tcg_temp_free(t0);
1516 return 2;
1519 static unsigned int dec_scc_r(DisasContext *dc)
1521 int cond = dc->op2;
1523 LOG_DIS("s%s $r%u\n",
1524 cc_name(cond), dc->op1);
1526 if (cond != CC_A)
1528 int l1;
1530 gen_tst_cc (dc, cpu_R[dc->op1], cond);
1531 l1 = gen_new_label();
1532 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_R[dc->op1], 0, l1);
1533 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1534 gen_set_label(l1);
1536 else
1537 tcg_gen_movi_tl(cpu_R[dc->op1], 1);
1539 cris_cc_mask(dc, 0);
1540 return 2;
1543 static inline void cris_alu_alloc_temps(DisasContext *dc, int size, TCGv *t)
1545 if (size == 4) {
1546 t[0] = cpu_R[dc->op2];
1547 t[1] = cpu_R[dc->op1];
1548 } else {
1549 t[0] = tcg_temp_new();
1550 t[1] = tcg_temp_new();
1554 static inline void cris_alu_free_temps(DisasContext *dc, int size, TCGv *t)
1556 if (size != 4) {
1557 tcg_temp_free(t[0]);
1558 tcg_temp_free(t[1]);
1562 static unsigned int dec_and_r(DisasContext *dc)
1564 TCGv t[2];
1565 int size = memsize_zz(dc);
1567 LOG_DIS("and.%c $r%u, $r%u\n",
1568 memsize_char(size), dc->op1, dc->op2);
1570 cris_cc_mask(dc, CC_MASK_NZ);
1572 cris_alu_alloc_temps(dc, size, t);
1573 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1574 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], size);
1575 cris_alu_free_temps(dc, size, t);
1576 return 2;
1579 static unsigned int dec_lz_r(DisasContext *dc)
1581 TCGv t0;
1582 LOG_DIS("lz $r%u, $r%u\n",
1583 dc->op1, dc->op2);
1584 cris_cc_mask(dc, CC_MASK_NZ);
1585 t0 = tcg_temp_new();
1586 dec_prep_alu_r(dc, dc->op1, dc->op2, 4, 0, cpu_R[dc->op2], t0);
1587 cris_alu(dc, CC_OP_LZ, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1588 tcg_temp_free(t0);
1589 return 2;
1592 static unsigned int dec_lsl_r(DisasContext *dc)
1594 TCGv t[2];
1595 int size = memsize_zz(dc);
1597 LOG_DIS("lsl.%c $r%u, $r%u\n",
1598 memsize_char(size), dc->op1, dc->op2);
1600 cris_cc_mask(dc, CC_MASK_NZ);
1601 cris_alu_alloc_temps(dc, size, t);
1602 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1603 tcg_gen_andi_tl(t[1], t[1], 63);
1604 cris_alu(dc, CC_OP_LSL, cpu_R[dc->op2], t[0], t[1], size);
1605 cris_alu_alloc_temps(dc, size, t);
1606 return 2;
1609 static unsigned int dec_lsr_r(DisasContext *dc)
1611 TCGv t[2];
1612 int size = memsize_zz(dc);
1614 LOG_DIS("lsr.%c $r%u, $r%u\n",
1615 memsize_char(size), dc->op1, dc->op2);
1617 cris_cc_mask(dc, CC_MASK_NZ);
1618 cris_alu_alloc_temps(dc, size, t);
1619 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1620 tcg_gen_andi_tl(t[1], t[1], 63);
1621 cris_alu(dc, CC_OP_LSR, cpu_R[dc->op2], t[0], t[1], size);
1622 cris_alu_free_temps(dc, size, t);
1623 return 2;
1626 static unsigned int dec_asr_r(DisasContext *dc)
1628 TCGv t[2];
1629 int size = memsize_zz(dc);
1631 LOG_DIS("asr.%c $r%u, $r%u\n",
1632 memsize_char(size), dc->op1, dc->op2);
1634 cris_cc_mask(dc, CC_MASK_NZ);
1635 cris_alu_alloc_temps(dc, size, t);
1636 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1637 tcg_gen_andi_tl(t[1], t[1], 63);
1638 cris_alu(dc, CC_OP_ASR, cpu_R[dc->op2], t[0], t[1], size);
1639 cris_alu_free_temps(dc, size, t);
1640 return 2;
1643 static unsigned int dec_muls_r(DisasContext *dc)
1645 TCGv t[2];
1646 int size = memsize_zz(dc);
1648 LOG_DIS("muls.%c $r%u, $r%u\n",
1649 memsize_char(size), dc->op1, dc->op2);
1650 cris_cc_mask(dc, CC_MASK_NZV);
1651 cris_alu_alloc_temps(dc, size, t);
1652 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 1, t[0], t[1]);
1654 cris_alu(dc, CC_OP_MULS, cpu_R[dc->op2], t[0], t[1], 4);
1655 cris_alu_free_temps(dc, size, t);
1656 return 2;
1659 static unsigned int dec_mulu_r(DisasContext *dc)
1661 TCGv t[2];
1662 int size = memsize_zz(dc);
1664 LOG_DIS("mulu.%c $r%u, $r%u\n",
1665 memsize_char(size), dc->op1, dc->op2);
1666 cris_cc_mask(dc, CC_MASK_NZV);
1667 cris_alu_alloc_temps(dc, size, t);
1668 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1670 cris_alu(dc, CC_OP_MULU, cpu_R[dc->op2], t[0], t[1], 4);
1671 cris_alu_alloc_temps(dc, size, t);
1672 return 2;
1676 static unsigned int dec_dstep_r(DisasContext *dc)
1678 LOG_DIS("dstep $r%u, $r%u\n", dc->op1, dc->op2);
1679 cris_cc_mask(dc, CC_MASK_NZ);
1680 cris_alu(dc, CC_OP_DSTEP,
1681 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1682 return 2;
1685 static unsigned int dec_xor_r(DisasContext *dc)
1687 TCGv t[2];
1688 int size = memsize_zz(dc);
1689 LOG_DIS("xor.%c $r%u, $r%u\n",
1690 memsize_char(size), dc->op1, dc->op2);
1691 BUG_ON(size != 4); /* xor is dword. */
1692 cris_cc_mask(dc, CC_MASK_NZ);
1693 cris_alu_alloc_temps(dc, size, t);
1694 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1696 cris_alu(dc, CC_OP_XOR, cpu_R[dc->op2], t[0], t[1], 4);
1697 cris_alu_free_temps(dc, size, t);
1698 return 2;
1701 static unsigned int dec_bound_r(DisasContext *dc)
1703 TCGv l0;
1704 int size = memsize_zz(dc);
1705 LOG_DIS("bound.%c $r%u, $r%u\n",
1706 memsize_char(size), dc->op1, dc->op2);
1707 cris_cc_mask(dc, CC_MASK_NZ);
1708 l0 = tcg_temp_local_new();
1709 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, l0);
1710 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], cpu_R[dc->op2], l0, 4);
1711 tcg_temp_free(l0);
1712 return 2;
1715 static unsigned int dec_cmp_r(DisasContext *dc)
1717 TCGv t[2];
1718 int size = memsize_zz(dc);
1719 LOG_DIS("cmp.%c $r%u, $r%u\n",
1720 memsize_char(size), dc->op1, dc->op2);
1721 cris_cc_mask(dc, CC_MASK_NZVC);
1722 cris_alu_alloc_temps(dc, size, t);
1723 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1725 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], t[0], t[1], size);
1726 cris_alu_free_temps(dc, size, t);
1727 return 2;
1730 static unsigned int dec_abs_r(DisasContext *dc)
1732 TCGv t0;
1734 LOG_DIS("abs $r%u, $r%u\n",
1735 dc->op1, dc->op2);
1736 cris_cc_mask(dc, CC_MASK_NZ);
1738 t0 = tcg_temp_new();
1739 tcg_gen_sari_tl(t0, cpu_R[dc->op1], 31);
1740 tcg_gen_xor_tl(cpu_R[dc->op2], cpu_R[dc->op1], t0);
1741 tcg_gen_sub_tl(cpu_R[dc->op2], cpu_R[dc->op2], t0);
1742 tcg_temp_free(t0);
1744 cris_alu(dc, CC_OP_MOVE,
1745 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op2], 4);
1746 return 2;
1749 static unsigned int dec_add_r(DisasContext *dc)
1751 TCGv t[2];
1752 int size = memsize_zz(dc);
1753 LOG_DIS("add.%c $r%u, $r%u\n",
1754 memsize_char(size), dc->op1, dc->op2);
1755 cris_cc_mask(dc, CC_MASK_NZVC);
1756 cris_alu_alloc_temps(dc, size, t);
1757 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1759 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], t[0], t[1], size);
1760 cris_alu_free_temps(dc, size, t);
1761 return 2;
1764 static unsigned int dec_addc_r(DisasContext *dc)
1766 LOG_DIS("addc $r%u, $r%u\n",
1767 dc->op1, dc->op2);
1768 cris_evaluate_flags(dc);
1769 /* Set for this insn. */
1770 dc->flagx_known = 1;
1771 dc->flags_x = X_FLAG;
1773 cris_cc_mask(dc, CC_MASK_NZVC);
1774 cris_alu(dc, CC_OP_ADDC,
1775 cpu_R[dc->op2], cpu_R[dc->op2], cpu_R[dc->op1], 4);
1776 return 2;
1779 static unsigned int dec_mcp_r(DisasContext *dc)
1781 LOG_DIS("mcp $p%u, $r%u\n",
1782 dc->op2, dc->op1);
1783 cris_evaluate_flags(dc);
1784 cris_cc_mask(dc, CC_MASK_RNZV);
1785 cris_alu(dc, CC_OP_MCP,
1786 cpu_R[dc->op1], cpu_R[dc->op1], cpu_PR[dc->op2], 4);
1787 return 2;
1790 #if DISAS_CRIS
1791 static char * swapmode_name(int mode, char *modename) {
1792 int i = 0;
1793 if (mode & 8)
1794 modename[i++] = 'n';
1795 if (mode & 4)
1796 modename[i++] = 'w';
1797 if (mode & 2)
1798 modename[i++] = 'b';
1799 if (mode & 1)
1800 modename[i++] = 'r';
1801 modename[i++] = 0;
1802 return modename;
1804 #endif
1806 static unsigned int dec_swap_r(DisasContext *dc)
1808 TCGv t0;
1809 #if DISAS_CRIS
1810 char modename[4];
1811 #endif
1812 LOG_DIS("swap%s $r%u\n",
1813 swapmode_name(dc->op2, modename), dc->op1);
1815 cris_cc_mask(dc, CC_MASK_NZ);
1816 t0 = tcg_temp_new();
1817 t_gen_mov_TN_reg(t0, dc->op1);
1818 if (dc->op2 & 8)
1819 tcg_gen_not_tl(t0, t0);
1820 if (dc->op2 & 4)
1821 t_gen_swapw(t0, t0);
1822 if (dc->op2 & 2)
1823 t_gen_swapb(t0, t0);
1824 if (dc->op2 & 1)
1825 t_gen_swapr(t0, t0);
1826 cris_alu(dc, CC_OP_MOVE,
1827 cpu_R[dc->op1], cpu_R[dc->op1], t0, 4);
1828 tcg_temp_free(t0);
1829 return 2;
1832 static unsigned int dec_or_r(DisasContext *dc)
1834 TCGv t[2];
1835 int size = memsize_zz(dc);
1836 LOG_DIS("or.%c $r%u, $r%u\n",
1837 memsize_char(size), dc->op1, dc->op2);
1838 cris_cc_mask(dc, CC_MASK_NZ);
1839 cris_alu_alloc_temps(dc, size, t);
1840 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1841 cris_alu(dc, CC_OP_OR, cpu_R[dc->op2], t[0], t[1], size);
1842 cris_alu_free_temps(dc, size, t);
1843 return 2;
1846 static unsigned int dec_addi_r(DisasContext *dc)
1848 TCGv t0;
1849 LOG_DIS("addi.%c $r%u, $r%u\n",
1850 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1851 cris_cc_mask(dc, 0);
1852 t0 = tcg_temp_new();
1853 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1854 tcg_gen_add_tl(cpu_R[dc->op1], cpu_R[dc->op1], t0);
1855 tcg_temp_free(t0);
1856 return 2;
1859 static unsigned int dec_addi_acr(DisasContext *dc)
1861 TCGv t0;
1862 LOG_DIS("addi.%c $r%u, $r%u, $acr\n",
1863 memsize_char(memsize_zz(dc)), dc->op2, dc->op1);
1864 cris_cc_mask(dc, 0);
1865 t0 = tcg_temp_new();
1866 tcg_gen_shl_tl(t0, cpu_R[dc->op2], tcg_const_tl(dc->zzsize));
1867 tcg_gen_add_tl(cpu_R[R_ACR], cpu_R[dc->op1], t0);
1868 tcg_temp_free(t0);
1869 return 2;
1872 static unsigned int dec_neg_r(DisasContext *dc)
1874 TCGv t[2];
1875 int size = memsize_zz(dc);
1876 LOG_DIS("neg.%c $r%u, $r%u\n",
1877 memsize_char(size), dc->op1, dc->op2);
1878 cris_cc_mask(dc, CC_MASK_NZVC);
1879 cris_alu_alloc_temps(dc, size, t);
1880 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1882 cris_alu(dc, CC_OP_NEG, cpu_R[dc->op2], t[0], t[1], size);
1883 cris_alu_free_temps(dc, size, t);
1884 return 2;
1887 static unsigned int dec_btst_r(DisasContext *dc)
1889 LOG_DIS("btst $r%u, $r%u\n",
1890 dc->op1, dc->op2);
1891 cris_cc_mask(dc, CC_MASK_NZ);
1892 cris_evaluate_flags(dc);
1893 gen_helper_btst(cpu_PR[PR_CCS], cpu_R[dc->op2],
1894 cpu_R[dc->op1], cpu_PR[PR_CCS]);
1895 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2],
1896 cpu_R[dc->op2], cpu_R[dc->op2], 4);
1897 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
1898 dc->flags_uptodate = 1;
1899 return 2;
1902 static unsigned int dec_sub_r(DisasContext *dc)
1904 TCGv t[2];
1905 int size = memsize_zz(dc);
1906 LOG_DIS("sub.%c $r%u, $r%u\n",
1907 memsize_char(size), dc->op1, dc->op2);
1908 cris_cc_mask(dc, CC_MASK_NZVC);
1909 cris_alu_alloc_temps(dc, size, t);
1910 dec_prep_alu_r(dc, dc->op1, dc->op2, size, 0, t[0], t[1]);
1911 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], size);
1912 cris_alu_free_temps(dc, size, t);
1913 return 2;
1916 /* Zero extension. From size to dword. */
1917 static unsigned int dec_movu_r(DisasContext *dc)
1919 TCGv t0;
1920 int size = memsize_z(dc);
1921 LOG_DIS("movu.%c $r%u, $r%u\n",
1922 memsize_char(size),
1923 dc->op1, dc->op2);
1925 cris_cc_mask(dc, CC_MASK_NZ);
1926 t0 = tcg_temp_new();
1927 dec_prep_move_r(dc, dc->op1, dc->op2, size, 0, t0);
1928 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1929 tcg_temp_free(t0);
1930 return 2;
1933 /* Sign extension. From size to dword. */
1934 static unsigned int dec_movs_r(DisasContext *dc)
1936 TCGv t0;
1937 int size = memsize_z(dc);
1938 LOG_DIS("movs.%c $r%u, $r%u\n",
1939 memsize_char(size),
1940 dc->op1, dc->op2);
1942 cris_cc_mask(dc, CC_MASK_NZ);
1943 t0 = tcg_temp_new();
1944 /* Size can only be qi or hi. */
1945 t_gen_sext(t0, cpu_R[dc->op1], size);
1946 cris_alu(dc, CC_OP_MOVE,
1947 cpu_R[dc->op2], cpu_R[dc->op1], t0, 4);
1948 tcg_temp_free(t0);
1949 return 2;
1952 /* zero extension. From size to dword. */
1953 static unsigned int dec_addu_r(DisasContext *dc)
1955 TCGv t0;
1956 int size = memsize_z(dc);
1957 LOG_DIS("addu.%c $r%u, $r%u\n",
1958 memsize_char(size),
1959 dc->op1, dc->op2);
1961 cris_cc_mask(dc, CC_MASK_NZVC);
1962 t0 = tcg_temp_new();
1963 /* Size can only be qi or hi. */
1964 t_gen_zext(t0, cpu_R[dc->op1], size);
1965 cris_alu(dc, CC_OP_ADD,
1966 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1967 tcg_temp_free(t0);
1968 return 2;
1971 /* Sign extension. From size to dword. */
1972 static unsigned int dec_adds_r(DisasContext *dc)
1974 TCGv t0;
1975 int size = memsize_z(dc);
1976 LOG_DIS("adds.%c $r%u, $r%u\n",
1977 memsize_char(size),
1978 dc->op1, dc->op2);
1980 cris_cc_mask(dc, CC_MASK_NZVC);
1981 t0 = tcg_temp_new();
1982 /* Size can only be qi or hi. */
1983 t_gen_sext(t0, cpu_R[dc->op1], size);
1984 cris_alu(dc, CC_OP_ADD,
1985 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
1986 tcg_temp_free(t0);
1987 return 2;
1990 /* Zero extension. From size to dword. */
1991 static unsigned int dec_subu_r(DisasContext *dc)
1993 TCGv t0;
1994 int size = memsize_z(dc);
1995 LOG_DIS("subu.%c $r%u, $r%u\n",
1996 memsize_char(size),
1997 dc->op1, dc->op2);
1999 cris_cc_mask(dc, CC_MASK_NZVC);
2000 t0 = tcg_temp_new();
2001 /* Size can only be qi or hi. */
2002 t_gen_zext(t0, cpu_R[dc->op1], size);
2003 cris_alu(dc, CC_OP_SUB,
2004 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2005 tcg_temp_free(t0);
2006 return 2;
2009 /* Sign extension. From size to dword. */
2010 static unsigned int dec_subs_r(DisasContext *dc)
2012 TCGv t0;
2013 int size = memsize_z(dc);
2014 LOG_DIS("subs.%c $r%u, $r%u\n",
2015 memsize_char(size),
2016 dc->op1, dc->op2);
2018 cris_cc_mask(dc, CC_MASK_NZVC);
2019 t0 = tcg_temp_new();
2020 /* Size can only be qi or hi. */
2021 t_gen_sext(t0, cpu_R[dc->op1], size);
2022 cris_alu(dc, CC_OP_SUB,
2023 cpu_R[dc->op2], cpu_R[dc->op2], t0, 4);
2024 tcg_temp_free(t0);
2025 return 2;
2028 static unsigned int dec_setclrf(DisasContext *dc)
2030 uint32_t flags;
2031 int set = (~dc->opcode >> 2) & 1;
2034 flags = (EXTRACT_FIELD(dc->ir, 12, 15) << 4)
2035 | EXTRACT_FIELD(dc->ir, 0, 3);
2036 if (set && flags == 0) {
2037 LOG_DIS("nop\n");
2038 return 2;
2039 } else if (!set && (flags & 0x20)) {
2040 LOG_DIS("di\n");
2042 else {
2043 LOG_DIS("%sf %x\n",
2044 set ? "set" : "clr",
2045 flags);
2048 /* User space is not allowed to touch these. Silently ignore. */
2049 if (dc->tb_flags & U_FLAG) {
2050 flags &= ~(S_FLAG | I_FLAG | U_FLAG);
2053 if (flags & X_FLAG) {
2054 dc->flagx_known = 1;
2055 if (set)
2056 dc->flags_x = X_FLAG;
2057 else
2058 dc->flags_x = 0;
2061 /* Break the TB if the P flag changes. */
2062 if (flags & P_FLAG) {
2063 if ((set && !(dc->tb_flags & P_FLAG))
2064 || (!set && (dc->tb_flags & P_FLAG))) {
2065 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2066 dc->is_jmp = DISAS_UPDATE;
2067 dc->cpustate_changed = 1;
2070 if (flags & S_FLAG) {
2071 dc->cpustate_changed = 1;
2075 /* Simply decode the flags. */
2076 cris_evaluate_flags (dc);
2077 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2078 cris_update_cc_x(dc);
2079 tcg_gen_movi_tl(cc_op, dc->cc_op);
2081 if (set) {
2082 if (!(dc->tb_flags & U_FLAG) && (flags & U_FLAG)) {
2083 /* Enter user mode. */
2084 t_gen_mov_env_TN(ksp, cpu_R[R_SP]);
2085 tcg_gen_mov_tl(cpu_R[R_SP], cpu_PR[PR_USP]);
2086 dc->cpustate_changed = 1;
2088 tcg_gen_ori_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], flags);
2090 else
2091 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~flags);
2093 dc->flags_uptodate = 1;
2094 dc->clear_x = 0;
2095 return 2;
2098 static unsigned int dec_move_rs(DisasContext *dc)
2100 LOG_DIS("move $r%u, $s%u\n", dc->op1, dc->op2);
2101 cris_cc_mask(dc, 0);
2102 gen_helper_movl_sreg_reg(tcg_const_tl(dc->op2), tcg_const_tl(dc->op1));
2103 return 2;
2105 static unsigned int dec_move_sr(DisasContext *dc)
2107 LOG_DIS("move $s%u, $r%u\n", dc->op2, dc->op1);
2108 cris_cc_mask(dc, 0);
2109 gen_helper_movl_reg_sreg(tcg_const_tl(dc->op1), tcg_const_tl(dc->op2));
2110 return 2;
2113 static unsigned int dec_move_rp(DisasContext *dc)
2115 TCGv t[2];
2116 LOG_DIS("move $r%u, $p%u\n", dc->op1, dc->op2);
2117 cris_cc_mask(dc, 0);
2119 t[0] = tcg_temp_new();
2120 if (dc->op2 == PR_CCS) {
2121 cris_evaluate_flags(dc);
2122 t_gen_mov_TN_reg(t[0], dc->op1);
2123 if (dc->tb_flags & U_FLAG) {
2124 t[1] = tcg_temp_new();
2125 /* User space is not allowed to touch all flags. */
2126 tcg_gen_andi_tl(t[0], t[0], 0x39f);
2127 tcg_gen_andi_tl(t[1], cpu_PR[PR_CCS], ~0x39f);
2128 tcg_gen_or_tl(t[0], t[1], t[0]);
2129 tcg_temp_free(t[1]);
2132 else
2133 t_gen_mov_TN_reg(t[0], dc->op1);
2135 t_gen_mov_preg_TN(dc, dc->op2, t[0]);
2136 if (dc->op2 == PR_CCS) {
2137 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
2138 dc->flags_uptodate = 1;
2140 tcg_temp_free(t[0]);
2141 return 2;
2143 static unsigned int dec_move_pr(DisasContext *dc)
2145 TCGv t0;
2146 LOG_DIS("move $p%u, $r%u\n", dc->op1, dc->op2);
2147 cris_cc_mask(dc, 0);
2149 if (dc->op2 == PR_CCS)
2150 cris_evaluate_flags(dc);
2152 t0 = tcg_temp_new();
2153 t_gen_mov_TN_preg(t0, dc->op2);
2154 cris_alu(dc, CC_OP_MOVE,
2155 cpu_R[dc->op1], cpu_R[dc->op1], t0, preg_sizes[dc->op2]);
2156 tcg_temp_free(t0);
2157 return 2;
2160 static unsigned int dec_move_mr(DisasContext *dc)
2162 int memsize = memsize_zz(dc);
2163 int insn_len;
2164 LOG_DIS("move.%c [$r%u%s, $r%u\n",
2165 memsize_char(memsize),
2166 dc->op1, dc->postinc ? "+]" : "]",
2167 dc->op2);
2169 if (memsize == 4) {
2170 insn_len = dec_prep_move_m(dc, 0, 4, cpu_R[dc->op2]);
2171 cris_cc_mask(dc, CC_MASK_NZ);
2172 cris_update_cc_op(dc, CC_OP_MOVE, 4);
2173 cris_update_cc_x(dc);
2174 cris_update_result(dc, cpu_R[dc->op2]);
2176 else {
2177 TCGv t0;
2179 t0 = tcg_temp_new();
2180 insn_len = dec_prep_move_m(dc, 0, memsize, t0);
2181 cris_cc_mask(dc, CC_MASK_NZ);
2182 cris_alu(dc, CC_OP_MOVE,
2183 cpu_R[dc->op2], cpu_R[dc->op2], t0, memsize);
2184 tcg_temp_free(t0);
2186 do_postinc(dc, memsize);
2187 return insn_len;
2190 static inline void cris_alu_m_alloc_temps(TCGv *t)
2192 t[0] = tcg_temp_new();
2193 t[1] = tcg_temp_new();
2196 static inline void cris_alu_m_free_temps(TCGv *t)
2198 tcg_temp_free(t[0]);
2199 tcg_temp_free(t[1]);
2202 static unsigned int dec_movs_m(DisasContext *dc)
2204 TCGv t[2];
2205 int memsize = memsize_z(dc);
2206 int insn_len;
2207 LOG_DIS("movs.%c [$r%u%s, $r%u\n",
2208 memsize_char(memsize),
2209 dc->op1, dc->postinc ? "+]" : "]",
2210 dc->op2);
2212 cris_alu_m_alloc_temps(t);
2213 /* sign extend. */
2214 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2215 cris_cc_mask(dc, CC_MASK_NZ);
2216 cris_alu(dc, CC_OP_MOVE,
2217 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2218 do_postinc(dc, memsize);
2219 cris_alu_m_free_temps(t);
2220 return insn_len;
2223 static unsigned int dec_addu_m(DisasContext *dc)
2225 TCGv t[2];
2226 int memsize = memsize_z(dc);
2227 int insn_len;
2228 LOG_DIS("addu.%c [$r%u%s, $r%u\n",
2229 memsize_char(memsize),
2230 dc->op1, dc->postinc ? "+]" : "]",
2231 dc->op2);
2233 cris_alu_m_alloc_temps(t);
2234 /* sign extend. */
2235 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2236 cris_cc_mask(dc, CC_MASK_NZVC);
2237 cris_alu(dc, CC_OP_ADD,
2238 cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2239 do_postinc(dc, memsize);
2240 cris_alu_m_free_temps(t);
2241 return insn_len;
2244 static unsigned int dec_adds_m(DisasContext *dc)
2246 TCGv t[2];
2247 int memsize = memsize_z(dc);
2248 int insn_len;
2249 LOG_DIS("adds.%c [$r%u%s, $r%u\n",
2250 memsize_char(memsize),
2251 dc->op1, dc->postinc ? "+]" : "]",
2252 dc->op2);
2254 cris_alu_m_alloc_temps(t);
2255 /* sign extend. */
2256 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2257 cris_cc_mask(dc, CC_MASK_NZVC);
2258 cris_alu(dc, CC_OP_ADD, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2259 do_postinc(dc, memsize);
2260 cris_alu_m_free_temps(t);
2261 return insn_len;
2264 static unsigned int dec_subu_m(DisasContext *dc)
2266 TCGv t[2];
2267 int memsize = memsize_z(dc);
2268 int insn_len;
2269 LOG_DIS("subu.%c [$r%u%s, $r%u\n",
2270 memsize_char(memsize),
2271 dc->op1, dc->postinc ? "+]" : "]",
2272 dc->op2);
2274 cris_alu_m_alloc_temps(t);
2275 /* sign extend. */
2276 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2277 cris_cc_mask(dc, CC_MASK_NZVC);
2278 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2279 do_postinc(dc, memsize);
2280 cris_alu_m_free_temps(t);
2281 return insn_len;
2284 static unsigned int dec_subs_m(DisasContext *dc)
2286 TCGv t[2];
2287 int memsize = memsize_z(dc);
2288 int insn_len;
2289 LOG_DIS("subs.%c [$r%u%s, $r%u\n",
2290 memsize_char(memsize),
2291 dc->op1, dc->postinc ? "+]" : "]",
2292 dc->op2);
2294 cris_alu_m_alloc_temps(t);
2295 /* sign extend. */
2296 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2297 cris_cc_mask(dc, CC_MASK_NZVC);
2298 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2299 do_postinc(dc, memsize);
2300 cris_alu_m_free_temps(t);
2301 return insn_len;
2304 static unsigned int dec_movu_m(DisasContext *dc)
2306 TCGv t[2];
2307 int memsize = memsize_z(dc);
2308 int insn_len;
2310 LOG_DIS("movu.%c [$r%u%s, $r%u\n",
2311 memsize_char(memsize),
2312 dc->op1, dc->postinc ? "+]" : "]",
2313 dc->op2);
2315 cris_alu_m_alloc_temps(t);
2316 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2317 cris_cc_mask(dc, CC_MASK_NZ);
2318 cris_alu(dc, CC_OP_MOVE, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2319 do_postinc(dc, memsize);
2320 cris_alu_m_free_temps(t);
2321 return insn_len;
2324 static unsigned int dec_cmpu_m(DisasContext *dc)
2326 TCGv t[2];
2327 int memsize = memsize_z(dc);
2328 int insn_len;
2329 LOG_DIS("cmpu.%c [$r%u%s, $r%u\n",
2330 memsize_char(memsize),
2331 dc->op1, dc->postinc ? "+]" : "]",
2332 dc->op2);
2334 cris_alu_m_alloc_temps(t);
2335 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2336 cris_cc_mask(dc, CC_MASK_NZVC);
2337 cris_alu(dc, CC_OP_CMP, cpu_R[dc->op2], cpu_R[dc->op2], t[1], 4);
2338 do_postinc(dc, memsize);
2339 cris_alu_m_free_temps(t);
2340 return insn_len;
2343 static unsigned int dec_cmps_m(DisasContext *dc)
2345 TCGv t[2];
2346 int memsize = memsize_z(dc);
2347 int insn_len;
2348 LOG_DIS("cmps.%c [$r%u%s, $r%u\n",
2349 memsize_char(memsize),
2350 dc->op1, dc->postinc ? "+]" : "]",
2351 dc->op2);
2353 cris_alu_m_alloc_temps(t);
2354 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2355 cris_cc_mask(dc, CC_MASK_NZVC);
2356 cris_alu(dc, CC_OP_CMP,
2357 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2358 memsize_zz(dc));
2359 do_postinc(dc, memsize);
2360 cris_alu_m_free_temps(t);
2361 return insn_len;
2364 static unsigned int dec_cmp_m(DisasContext *dc)
2366 TCGv t[2];
2367 int memsize = memsize_zz(dc);
2368 int insn_len;
2369 LOG_DIS("cmp.%c [$r%u%s, $r%u\n",
2370 memsize_char(memsize),
2371 dc->op1, dc->postinc ? "+]" : "]",
2372 dc->op2);
2374 cris_alu_m_alloc_temps(t);
2375 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2376 cris_cc_mask(dc, CC_MASK_NZVC);
2377 cris_alu(dc, CC_OP_CMP,
2378 cpu_R[dc->op2], cpu_R[dc->op2], t[1],
2379 memsize_zz(dc));
2380 do_postinc(dc, memsize);
2381 cris_alu_m_free_temps(t);
2382 return insn_len;
2385 static unsigned int dec_test_m(DisasContext *dc)
2387 TCGv t[2];
2388 int memsize = memsize_zz(dc);
2389 int insn_len;
2390 LOG_DIS("test.%c [$r%u%s] op2=%x\n",
2391 memsize_char(memsize),
2392 dc->op1, dc->postinc ? "+]" : "]",
2393 dc->op2);
2395 cris_evaluate_flags(dc);
2397 cris_alu_m_alloc_temps(t);
2398 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2399 cris_cc_mask(dc, CC_MASK_NZ);
2400 tcg_gen_andi_tl(cpu_PR[PR_CCS], cpu_PR[PR_CCS], ~3);
2402 cris_alu(dc, CC_OP_CMP,
2403 cpu_R[dc->op2], t[1], tcg_const_tl(0), memsize_zz(dc));
2404 do_postinc(dc, memsize);
2405 cris_alu_m_free_temps(t);
2406 return insn_len;
2409 static unsigned int dec_and_m(DisasContext *dc)
2411 TCGv t[2];
2412 int memsize = memsize_zz(dc);
2413 int insn_len;
2414 LOG_DIS("and.%c [$r%u%s, $r%u\n",
2415 memsize_char(memsize),
2416 dc->op1, dc->postinc ? "+]" : "]",
2417 dc->op2);
2419 cris_alu_m_alloc_temps(t);
2420 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2421 cris_cc_mask(dc, CC_MASK_NZ);
2422 cris_alu(dc, CC_OP_AND, cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2423 do_postinc(dc, memsize);
2424 cris_alu_m_free_temps(t);
2425 return insn_len;
2428 static unsigned int dec_add_m(DisasContext *dc)
2430 TCGv t[2];
2431 int memsize = memsize_zz(dc);
2432 int insn_len;
2433 LOG_DIS("add.%c [$r%u%s, $r%u\n",
2434 memsize_char(memsize),
2435 dc->op1, dc->postinc ? "+]" : "]",
2436 dc->op2);
2438 cris_alu_m_alloc_temps(t);
2439 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2440 cris_cc_mask(dc, CC_MASK_NZVC);
2441 cris_alu(dc, CC_OP_ADD,
2442 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2443 do_postinc(dc, memsize);
2444 cris_alu_m_free_temps(t);
2445 return insn_len;
2448 static unsigned int dec_addo_m(DisasContext *dc)
2450 TCGv t[2];
2451 int memsize = memsize_zz(dc);
2452 int insn_len;
2453 LOG_DIS("add.%c [$r%u%s, $r%u\n",
2454 memsize_char(memsize),
2455 dc->op1, dc->postinc ? "+]" : "]",
2456 dc->op2);
2458 cris_alu_m_alloc_temps(t);
2459 insn_len = dec_prep_alu_m(dc, 1, memsize, t[0], t[1]);
2460 cris_cc_mask(dc, 0);
2461 cris_alu(dc, CC_OP_ADD, cpu_R[R_ACR], t[0], t[1], 4);
2462 do_postinc(dc, memsize);
2463 cris_alu_m_free_temps(t);
2464 return insn_len;
2467 static unsigned int dec_bound_m(DisasContext *dc)
2469 TCGv l[2];
2470 int memsize = memsize_zz(dc);
2471 int insn_len;
2472 LOG_DIS("bound.%c [$r%u%s, $r%u\n",
2473 memsize_char(memsize),
2474 dc->op1, dc->postinc ? "+]" : "]",
2475 dc->op2);
2477 l[0] = tcg_temp_local_new();
2478 l[1] = tcg_temp_local_new();
2479 insn_len = dec_prep_alu_m(dc, 0, memsize, l[0], l[1]);
2480 cris_cc_mask(dc, CC_MASK_NZ);
2481 cris_alu(dc, CC_OP_BOUND, cpu_R[dc->op2], l[0], l[1], 4);
2482 do_postinc(dc, memsize);
2483 tcg_temp_free(l[0]);
2484 tcg_temp_free(l[1]);
2485 return insn_len;
2488 static unsigned int dec_addc_mr(DisasContext *dc)
2490 TCGv t[2];
2491 int insn_len = 2;
2492 LOG_DIS("addc [$r%u%s, $r%u\n",
2493 dc->op1, dc->postinc ? "+]" : "]",
2494 dc->op2);
2496 cris_evaluate_flags(dc);
2498 /* Set for this insn. */
2499 dc->flagx_known = 1;
2500 dc->flags_x = X_FLAG;
2502 cris_alu_m_alloc_temps(t);
2503 insn_len = dec_prep_alu_m(dc, 0, 4, t[0], t[1]);
2504 cris_cc_mask(dc, CC_MASK_NZVC);
2505 cris_alu(dc, CC_OP_ADDC, cpu_R[dc->op2], t[0], t[1], 4);
2506 do_postinc(dc, 4);
2507 cris_alu_m_free_temps(t);
2508 return insn_len;
2511 static unsigned int dec_sub_m(DisasContext *dc)
2513 TCGv t[2];
2514 int memsize = memsize_zz(dc);
2515 int insn_len;
2516 LOG_DIS("sub.%c [$r%u%s, $r%u ir=%x zz=%x\n",
2517 memsize_char(memsize),
2518 dc->op1, dc->postinc ? "+]" : "]",
2519 dc->op2, dc->ir, dc->zzsize);
2521 cris_alu_m_alloc_temps(t);
2522 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2523 cris_cc_mask(dc, CC_MASK_NZVC);
2524 cris_alu(dc, CC_OP_SUB, cpu_R[dc->op2], t[0], t[1], memsize);
2525 do_postinc(dc, memsize);
2526 cris_alu_m_free_temps(t);
2527 return insn_len;
2530 static unsigned int dec_or_m(DisasContext *dc)
2532 TCGv t[2];
2533 int memsize = memsize_zz(dc);
2534 int insn_len;
2535 LOG_DIS("or.%c [$r%u%s, $r%u pc=%x\n",
2536 memsize_char(memsize),
2537 dc->op1, dc->postinc ? "+]" : "]",
2538 dc->op2, dc->pc);
2540 cris_alu_m_alloc_temps(t);
2541 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2542 cris_cc_mask(dc, CC_MASK_NZ);
2543 cris_alu(dc, CC_OP_OR,
2544 cpu_R[dc->op2], t[0], t[1], memsize_zz(dc));
2545 do_postinc(dc, memsize);
2546 cris_alu_m_free_temps(t);
2547 return insn_len;
2550 static unsigned int dec_move_mp(DisasContext *dc)
2552 TCGv t[2];
2553 int memsize = memsize_zz(dc);
2554 int insn_len = 2;
2556 LOG_DIS("move.%c [$r%u%s, $p%u\n",
2557 memsize_char(memsize),
2558 dc->op1,
2559 dc->postinc ? "+]" : "]",
2560 dc->op2);
2562 cris_alu_m_alloc_temps(t);
2563 insn_len = dec_prep_alu_m(dc, 0, memsize, t[0], t[1]);
2564 cris_cc_mask(dc, 0);
2565 if (dc->op2 == PR_CCS) {
2566 cris_evaluate_flags(dc);
2567 if (dc->tb_flags & U_FLAG) {
2568 /* User space is not allowed to touch all flags. */
2569 tcg_gen_andi_tl(t[1], t[1], 0x39f);
2570 tcg_gen_andi_tl(t[0], cpu_PR[PR_CCS], ~0x39f);
2571 tcg_gen_or_tl(t[1], t[0], t[1]);
2575 t_gen_mov_preg_TN(dc, dc->op2, t[1]);
2577 do_postinc(dc, memsize);
2578 cris_alu_m_free_temps(t);
2579 return insn_len;
2582 static unsigned int dec_move_pm(DisasContext *dc)
2584 TCGv t0;
2585 int memsize;
2587 memsize = preg_sizes[dc->op2];
2589 LOG_DIS("move.%c $p%u, [$r%u%s\n",
2590 memsize_char(memsize),
2591 dc->op2, dc->op1, dc->postinc ? "+]" : "]");
2593 /* prepare store. Address in T0, value in T1. */
2594 if (dc->op2 == PR_CCS)
2595 cris_evaluate_flags(dc);
2596 t0 = tcg_temp_new();
2597 t_gen_mov_TN_preg(t0, dc->op2);
2598 cris_flush_cc_state(dc);
2599 gen_store(dc, cpu_R[dc->op1], t0, memsize);
2600 tcg_temp_free(t0);
2602 cris_cc_mask(dc, 0);
2603 if (dc->postinc)
2604 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2605 return 2;
2608 static unsigned int dec_movem_mr(DisasContext *dc)
2610 TCGv_i64 tmp[16];
2611 TCGv tmp32;
2612 TCGv addr;
2613 int i;
2614 int nr = dc->op2 + 1;
2616 LOG_DIS("movem [$r%u%s, $r%u\n", dc->op1,
2617 dc->postinc ? "+]" : "]", dc->op2);
2619 addr = tcg_temp_new();
2620 /* There are probably better ways of doing this. */
2621 cris_flush_cc_state(dc);
2622 for (i = 0; i < (nr >> 1); i++) {
2623 tmp[i] = tcg_temp_new_i64();
2624 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2625 gen_load64(dc, tmp[i], addr);
2627 if (nr & 1) {
2628 tmp32 = tcg_temp_new_i32();
2629 tcg_gen_addi_tl(addr, cpu_R[dc->op1], i * 8);
2630 gen_load(dc, tmp32, addr, 4, 0);
2631 } else
2632 TCGV_UNUSED(tmp32);
2633 tcg_temp_free(addr);
2635 for (i = 0; i < (nr >> 1); i++) {
2636 tcg_gen_trunc_i64_i32(cpu_R[i * 2], tmp[i]);
2637 tcg_gen_shri_i64(tmp[i], tmp[i], 32);
2638 tcg_gen_trunc_i64_i32(cpu_R[i * 2 + 1], tmp[i]);
2639 tcg_temp_free_i64(tmp[i]);
2641 if (nr & 1) {
2642 tcg_gen_mov_tl(cpu_R[dc->op2], tmp32);
2643 tcg_temp_free(tmp32);
2646 /* writeback the updated pointer value. */
2647 if (dc->postinc)
2648 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], nr * 4);
2650 /* gen_load might want to evaluate the previous insns flags. */
2651 cris_cc_mask(dc, 0);
2652 return 2;
2655 static unsigned int dec_movem_rm(DisasContext *dc)
2657 TCGv tmp;
2658 TCGv addr;
2659 int i;
2661 LOG_DIS("movem $r%u, [$r%u%s\n", dc->op2, dc->op1,
2662 dc->postinc ? "+]" : "]");
2664 cris_flush_cc_state(dc);
2666 tmp = tcg_temp_new();
2667 addr = tcg_temp_new();
2668 tcg_gen_movi_tl(tmp, 4);
2669 tcg_gen_mov_tl(addr, cpu_R[dc->op1]);
2670 for (i = 0; i <= dc->op2; i++) {
2671 /* Displace addr. */
2672 /* Perform the store. */
2673 gen_store(dc, addr, cpu_R[i], 4);
2674 tcg_gen_add_tl(addr, addr, tmp);
2676 if (dc->postinc)
2677 tcg_gen_mov_tl(cpu_R[dc->op1], addr);
2678 cris_cc_mask(dc, 0);
2679 tcg_temp_free(tmp);
2680 tcg_temp_free(addr);
2681 return 2;
2684 static unsigned int dec_move_rm(DisasContext *dc)
2686 int memsize;
2688 memsize = memsize_zz(dc);
2690 LOG_DIS("move.%c $r%u, [$r%u]\n",
2691 memsize_char(memsize), dc->op2, dc->op1);
2693 /* prepare store. */
2694 cris_flush_cc_state(dc);
2695 gen_store(dc, cpu_R[dc->op1], cpu_R[dc->op2], memsize);
2697 if (dc->postinc)
2698 tcg_gen_addi_tl(cpu_R[dc->op1], cpu_R[dc->op1], memsize);
2699 cris_cc_mask(dc, 0);
2700 return 2;
2703 static unsigned int dec_lapcq(DisasContext *dc)
2705 LOG_DIS("lapcq %x, $r%u\n",
2706 dc->pc + dc->op1*2, dc->op2);
2707 cris_cc_mask(dc, 0);
2708 tcg_gen_movi_tl(cpu_R[dc->op2], dc->pc + dc->op1 * 2);
2709 return 2;
2712 static unsigned int dec_lapc_im(DisasContext *dc)
2714 unsigned int rd;
2715 int32_t imm;
2716 int32_t pc;
2718 rd = dc->op2;
2720 cris_cc_mask(dc, 0);
2721 imm = ldl_code(dc->pc + 2);
2722 LOG_DIS("lapc 0x%x, $r%u\n", imm + dc->pc, dc->op2);
2724 pc = dc->pc;
2725 pc += imm;
2726 t_gen_mov_reg_TN(rd, tcg_const_tl(pc));
2727 return 6;
2730 /* Jump to special reg. */
2731 static unsigned int dec_jump_p(DisasContext *dc)
2733 LOG_DIS("jump $p%u\n", dc->op2);
2735 if (dc->op2 == PR_CCS)
2736 cris_evaluate_flags(dc);
2737 t_gen_mov_TN_preg(env_btarget, dc->op2);
2738 /* rete will often have low bit set to indicate delayslot. */
2739 tcg_gen_andi_tl(env_btarget, env_btarget, ~1);
2740 cris_cc_mask(dc, 0);
2741 cris_prepare_jmp(dc, JMP_INDIRECT);
2742 return 2;
2745 /* Jump and save. */
2746 static unsigned int dec_jas_r(DisasContext *dc)
2748 LOG_DIS("jas $r%u, $p%u\n", dc->op1, dc->op2);
2749 cris_cc_mask(dc, 0);
2750 /* Store the return address in Pd. */
2751 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2752 if (dc->op2 > 15)
2753 abort();
2754 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4));
2756 cris_prepare_jmp(dc, JMP_INDIRECT);
2757 return 2;
2760 static unsigned int dec_jas_im(DisasContext *dc)
2762 uint32_t imm;
2764 imm = ldl_code(dc->pc + 2);
2766 LOG_DIS("jas 0x%x\n", imm);
2767 cris_cc_mask(dc, 0);
2768 /* Store the return address in Pd. */
2769 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2771 dc->jmp_pc = imm;
2772 cris_prepare_jmp(dc, JMP_DIRECT);
2773 return 6;
2776 static unsigned int dec_jasc_im(DisasContext *dc)
2778 uint32_t imm;
2780 imm = ldl_code(dc->pc + 2);
2782 LOG_DIS("jasc 0x%x\n", imm);
2783 cris_cc_mask(dc, 0);
2784 /* Store the return address in Pd. */
2785 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8 + 4));
2787 dc->jmp_pc = imm;
2788 cris_prepare_jmp(dc, JMP_DIRECT);
2789 return 6;
2792 static unsigned int dec_jasc_r(DisasContext *dc)
2794 LOG_DIS("jasc_r $r%u, $p%u\n", dc->op1, dc->op2);
2795 cris_cc_mask(dc, 0);
2796 /* Store the return address in Pd. */
2797 tcg_gen_mov_tl(env_btarget, cpu_R[dc->op1]);
2798 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 4 + 4));
2799 cris_prepare_jmp(dc, JMP_INDIRECT);
2800 return 2;
2803 static unsigned int dec_bcc_im(DisasContext *dc)
2805 int32_t offset;
2806 uint32_t cond = dc->op2;
2808 offset = ldsw_code(dc->pc + 2);
2810 LOG_DIS("b%s %d pc=%x dst=%x\n",
2811 cc_name(cond), offset,
2812 dc->pc, dc->pc + offset);
2814 cris_cc_mask(dc, 0);
2815 /* op2 holds the condition-code. */
2816 cris_prepare_cc_branch (dc, offset, cond);
2817 return 4;
2820 static unsigned int dec_bas_im(DisasContext *dc)
2822 int32_t simm;
2825 simm = ldl_code(dc->pc + 2);
2827 LOG_DIS("bas 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2828 cris_cc_mask(dc, 0);
2829 /* Store the return address in Pd. */
2830 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 8));
2832 dc->jmp_pc = dc->pc + simm;
2833 cris_prepare_jmp(dc, JMP_DIRECT);
2834 return 6;
2837 static unsigned int dec_basc_im(DisasContext *dc)
2839 int32_t simm;
2840 simm = ldl_code(dc->pc + 2);
2842 LOG_DIS("basc 0x%x, $p%u\n", dc->pc + simm, dc->op2);
2843 cris_cc_mask(dc, 0);
2844 /* Store the return address in Pd. */
2845 t_gen_mov_preg_TN(dc, dc->op2, tcg_const_tl(dc->pc + 12));
2847 dc->jmp_pc = dc->pc + simm;
2848 cris_prepare_jmp(dc, JMP_DIRECT);
2849 return 6;
2852 static unsigned int dec_rfe_etc(DisasContext *dc)
2854 cris_cc_mask(dc, 0);
2856 if (dc->op2 == 15) {
2857 t_gen_mov_env_TN(halted, tcg_const_tl(1));
2858 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2859 t_gen_raise_exception(EXCP_HLT);
2860 return 2;
2863 switch (dc->op2 & 7) {
2864 case 2:
2865 /* rfe. */
2866 LOG_DIS("rfe\n");
2867 cris_evaluate_flags(dc);
2868 gen_helper_rfe();
2869 dc->is_jmp = DISAS_UPDATE;
2870 break;
2871 case 5:
2872 /* rfn. */
2873 LOG_DIS("rfn\n");
2874 cris_evaluate_flags(dc);
2875 gen_helper_rfn();
2876 dc->is_jmp = DISAS_UPDATE;
2877 break;
2878 case 6:
2879 LOG_DIS("break %d\n", dc->op1);
2880 cris_evaluate_flags (dc);
2881 /* break. */
2882 tcg_gen_movi_tl(env_pc, dc->pc + 2);
2884 /* Breaks start at 16 in the exception vector. */
2885 t_gen_mov_env_TN(trap_vector,
2886 tcg_const_tl(dc->op1 + 16));
2887 t_gen_raise_exception(EXCP_BREAK);
2888 dc->is_jmp = DISAS_UPDATE;
2889 break;
2890 default:
2891 printf ("op2=%x\n", dc->op2);
2892 BUG();
2893 break;
2896 return 2;
2899 static unsigned int dec_ftag_fidx_d_m(DisasContext *dc)
2901 return 2;
2904 static unsigned int dec_ftag_fidx_i_m(DisasContext *dc)
2906 return 2;
2909 static unsigned int dec_null(DisasContext *dc)
2911 printf ("unknown insn pc=%x opc=%x op1=%x op2=%x\n",
2912 dc->pc, dc->opcode, dc->op1, dc->op2);
2913 fflush(NULL);
2914 BUG();
2915 return 2;
2918 static struct decoder_info {
2919 struct {
2920 uint32_t bits;
2921 uint32_t mask;
2923 unsigned int (*dec)(DisasContext *dc);
2924 } decinfo[] = {
2925 /* Order matters here. */
2926 {DEC_MOVEQ, dec_moveq},
2927 {DEC_BTSTQ, dec_btstq},
2928 {DEC_CMPQ, dec_cmpq},
2929 {DEC_ADDOQ, dec_addoq},
2930 {DEC_ADDQ, dec_addq},
2931 {DEC_SUBQ, dec_subq},
2932 {DEC_ANDQ, dec_andq},
2933 {DEC_ORQ, dec_orq},
2934 {DEC_ASRQ, dec_asrq},
2935 {DEC_LSLQ, dec_lslq},
2936 {DEC_LSRQ, dec_lsrq},
2937 {DEC_BCCQ, dec_bccq},
2939 {DEC_BCC_IM, dec_bcc_im},
2940 {DEC_JAS_IM, dec_jas_im},
2941 {DEC_JAS_R, dec_jas_r},
2942 {DEC_JASC_IM, dec_jasc_im},
2943 {DEC_JASC_R, dec_jasc_r},
2944 {DEC_BAS_IM, dec_bas_im},
2945 {DEC_BASC_IM, dec_basc_im},
2946 {DEC_JUMP_P, dec_jump_p},
2947 {DEC_LAPC_IM, dec_lapc_im},
2948 {DEC_LAPCQ, dec_lapcq},
2950 {DEC_RFE_ETC, dec_rfe_etc},
2951 {DEC_ADDC_MR, dec_addc_mr},
2953 {DEC_MOVE_MP, dec_move_mp},
2954 {DEC_MOVE_PM, dec_move_pm},
2955 {DEC_MOVEM_MR, dec_movem_mr},
2956 {DEC_MOVEM_RM, dec_movem_rm},
2957 {DEC_MOVE_PR, dec_move_pr},
2958 {DEC_SCC_R, dec_scc_r},
2959 {DEC_SETF, dec_setclrf},
2960 {DEC_CLEARF, dec_setclrf},
2962 {DEC_MOVE_SR, dec_move_sr},
2963 {DEC_MOVE_RP, dec_move_rp},
2964 {DEC_SWAP_R, dec_swap_r},
2965 {DEC_ABS_R, dec_abs_r},
2966 {DEC_LZ_R, dec_lz_r},
2967 {DEC_MOVE_RS, dec_move_rs},
2968 {DEC_BTST_R, dec_btst_r},
2969 {DEC_ADDC_R, dec_addc_r},
2971 {DEC_DSTEP_R, dec_dstep_r},
2972 {DEC_XOR_R, dec_xor_r},
2973 {DEC_MCP_R, dec_mcp_r},
2974 {DEC_CMP_R, dec_cmp_r},
2976 {DEC_ADDI_R, dec_addi_r},
2977 {DEC_ADDI_ACR, dec_addi_acr},
2979 {DEC_ADD_R, dec_add_r},
2980 {DEC_SUB_R, dec_sub_r},
2982 {DEC_ADDU_R, dec_addu_r},
2983 {DEC_ADDS_R, dec_adds_r},
2984 {DEC_SUBU_R, dec_subu_r},
2985 {DEC_SUBS_R, dec_subs_r},
2986 {DEC_LSL_R, dec_lsl_r},
2988 {DEC_AND_R, dec_and_r},
2989 {DEC_OR_R, dec_or_r},
2990 {DEC_BOUND_R, dec_bound_r},
2991 {DEC_ASR_R, dec_asr_r},
2992 {DEC_LSR_R, dec_lsr_r},
2994 {DEC_MOVU_R, dec_movu_r},
2995 {DEC_MOVS_R, dec_movs_r},
2996 {DEC_NEG_R, dec_neg_r},
2997 {DEC_MOVE_R, dec_move_r},
2999 {DEC_FTAG_FIDX_I_M, dec_ftag_fidx_i_m},
3000 {DEC_FTAG_FIDX_D_M, dec_ftag_fidx_d_m},
3002 {DEC_MULS_R, dec_muls_r},
3003 {DEC_MULU_R, dec_mulu_r},
3005 {DEC_ADDU_M, dec_addu_m},
3006 {DEC_ADDS_M, dec_adds_m},
3007 {DEC_SUBU_M, dec_subu_m},
3008 {DEC_SUBS_M, dec_subs_m},
3010 {DEC_CMPU_M, dec_cmpu_m},
3011 {DEC_CMPS_M, dec_cmps_m},
3012 {DEC_MOVU_M, dec_movu_m},
3013 {DEC_MOVS_M, dec_movs_m},
3015 {DEC_CMP_M, dec_cmp_m},
3016 {DEC_ADDO_M, dec_addo_m},
3017 {DEC_BOUND_M, dec_bound_m},
3018 {DEC_ADD_M, dec_add_m},
3019 {DEC_SUB_M, dec_sub_m},
3020 {DEC_AND_M, dec_and_m},
3021 {DEC_OR_M, dec_or_m},
3022 {DEC_MOVE_RM, dec_move_rm},
3023 {DEC_TEST_M, dec_test_m},
3024 {DEC_MOVE_MR, dec_move_mr},
3026 {{0, 0}, dec_null}
3029 static inline unsigned int
3030 cris_decoder(DisasContext *dc)
3032 unsigned int insn_len = 2;
3033 int i;
3035 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
3036 tcg_gen_debug_insn_start(dc->pc);
3038 /* Load a halfword onto the instruction register. */
3039 dc->ir = lduw_code(dc->pc);
3041 /* Now decode it. */
3042 dc->opcode = EXTRACT_FIELD(dc->ir, 4, 11);
3043 dc->op1 = EXTRACT_FIELD(dc->ir, 0, 3);
3044 dc->op2 = EXTRACT_FIELD(dc->ir, 12, 15);
3045 dc->zsize = EXTRACT_FIELD(dc->ir, 4, 4);
3046 dc->zzsize = EXTRACT_FIELD(dc->ir, 4, 5);
3047 dc->postinc = EXTRACT_FIELD(dc->ir, 10, 10);
3049 /* Large switch for all insns. */
3050 for (i = 0; i < ARRAY_SIZE(decinfo); i++) {
3051 if ((dc->opcode & decinfo[i].mask) == decinfo[i].bits)
3053 insn_len = decinfo[i].dec(dc);
3054 break;
3058 #if !defined(CONFIG_USER_ONLY)
3059 /* Single-stepping ? */
3060 if (dc->tb_flags & S_FLAG) {
3061 int l1;
3063 l1 = gen_new_label();
3064 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_PR[PR_SPC], dc->pc, l1);
3065 /* We treat SPC as a break with an odd trap vector. */
3066 cris_evaluate_flags (dc);
3067 t_gen_mov_env_TN(trap_vector, tcg_const_tl(3));
3068 tcg_gen_movi_tl(env_pc, dc->pc + insn_len);
3069 tcg_gen_movi_tl(cpu_PR[PR_SPC], dc->pc + insn_len);
3070 t_gen_raise_exception(EXCP_BREAK);
3071 gen_set_label(l1);
3073 #endif
3074 return insn_len;
3077 static void check_breakpoint(CPUState *env, DisasContext *dc)
3079 CPUBreakpoint *bp;
3081 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
3082 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
3083 if (bp->pc == dc->pc) {
3084 cris_evaluate_flags (dc);
3085 tcg_gen_movi_tl(env_pc, dc->pc);
3086 t_gen_raise_exception(EXCP_DEBUG);
3087 dc->is_jmp = DISAS_UPDATE;
3095 * Delay slots on QEMU/CRIS.
3097 * If an exception hits on a delayslot, the core will let ERP (the Exception
3098 * Return Pointer) point to the branch (the previous) insn and set the lsb to
3099 * to give SW a hint that the exception actually hit on the dslot.
3101 * CRIS expects all PC addresses to be 16-bit aligned. The lsb is ignored by
3102 * the core and any jmp to an odd addresses will mask off that lsb. It is
3103 * simply there to let sw know there was an exception on a dslot.
3105 * When the software returns from an exception, the branch will re-execute.
3106 * On QEMU care needs to be taken when a branch+delayslot sequence is broken
3107 * and the branch and delayslot dont share pages.
3109 * The TB contaning the branch insn will set up env->btarget and evaluate
3110 * env->btaken. When the translation loop exits we will note that the branch
3111 * sequence is broken and let env->dslot be the size of the branch insn (those
3112 * vary in length).
3114 * The TB contaning the delayslot will have the PC of its real insn (i.e no lsb
3115 * set). It will also expect to have env->dslot setup with the size of the
3116 * delay slot so that env->pc - env->dslot point to the branch insn. This TB
3117 * will execute the dslot and take the branch, either to btarget or just one
3118 * insn ahead.
3120 * When exceptions occur, we check for env->dslot in do_interrupt to detect
3121 * broken branch sequences and setup $erp accordingly (i.e let it point to the
3122 * branch and set lsb). Then env->dslot gets cleared so that the exception
3123 * handler can enter. When returning from exceptions (jump $erp) the lsb gets
3124 * masked off and we will reexecute the branch insn.
3128 /* generate intermediate code for basic block 'tb'. */
3129 static void
3130 gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
3131 int search_pc)
3133 uint16_t *gen_opc_end;
3134 uint32_t pc_start;
3135 unsigned int insn_len;
3136 int j, lj;
3137 struct DisasContext ctx;
3138 struct DisasContext *dc = &ctx;
3139 uint32_t next_page_start;
3140 target_ulong npc;
3141 int num_insns;
3142 int max_insns;
3144 qemu_log_try_set_file(stderr);
3146 /* Odd PC indicates that branch is rexecuting due to exception in the
3147 * delayslot, like in real hw.
3149 pc_start = tb->pc & ~1;
3150 dc->env = env;
3151 dc->tb = tb;
3153 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
3155 dc->is_jmp = DISAS_NEXT;
3156 dc->ppc = pc_start;
3157 dc->pc = pc_start;
3158 dc->singlestep_enabled = env->singlestep_enabled;
3159 dc->flags_uptodate = 1;
3160 dc->flagx_known = 1;
3161 dc->flags_x = tb->flags & X_FLAG;
3162 dc->cc_x_uptodate = 0;
3163 dc->cc_mask = 0;
3164 dc->update_cc = 0;
3166 cris_update_cc_op(dc, CC_OP_FLAGS, 4);
3167 dc->cc_size_uptodate = -1;
3169 /* Decode TB flags. */
3170 dc->tb_flags = tb->flags & (S_FLAG | P_FLAG | U_FLAG | X_FLAG);
3171 dc->delayed_branch = !!(tb->flags & 7);
3172 if (dc->delayed_branch)
3173 dc->jmp = JMP_INDIRECT;
3174 else
3175 dc->jmp = JMP_NOJMP;
3177 dc->cpustate_changed = 0;
3179 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3180 qemu_log(
3181 "srch=%d pc=%x %x flg=%llx bt=%x ds=%u ccs=%x\n"
3182 "pid=%x usp=%x\n"
3183 "%x.%x.%x.%x\n"
3184 "%x.%x.%x.%x\n"
3185 "%x.%x.%x.%x\n"
3186 "%x.%x.%x.%x\n",
3187 search_pc, dc->pc, dc->ppc,
3188 (unsigned long long)tb->flags,
3189 env->btarget, (unsigned)tb->flags & 7,
3190 env->pregs[PR_CCS],
3191 env->pregs[PR_PID], env->pregs[PR_USP],
3192 env->regs[0], env->regs[1], env->regs[2], env->regs[3],
3193 env->regs[4], env->regs[5], env->regs[6], env->regs[7],
3194 env->regs[8], env->regs[9],
3195 env->regs[10], env->regs[11],
3196 env->regs[12], env->regs[13],
3197 env->regs[14], env->regs[15]);
3198 qemu_log("--------------\n");
3199 qemu_log("IN: %s\n", lookup_symbol(pc_start));
3202 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
3203 lj = -1;
3204 num_insns = 0;
3205 max_insns = tb->cflags & CF_COUNT_MASK;
3206 if (max_insns == 0)
3207 max_insns = CF_COUNT_MASK;
3209 gen_icount_start();
3212 check_breakpoint(env, dc);
3214 if (search_pc) {
3215 j = gen_opc_ptr - gen_opc_buf;
3216 if (lj < j) {
3217 lj++;
3218 while (lj < j)
3219 gen_opc_instr_start[lj++] = 0;
3221 if (dc->delayed_branch == 1)
3222 gen_opc_pc[lj] = dc->ppc | 1;
3223 else
3224 gen_opc_pc[lj] = dc->pc;
3225 gen_opc_instr_start[lj] = 1;
3226 gen_opc_icount[lj] = num_insns;
3229 /* Pretty disas. */
3230 LOG_DIS("%8.8x:\t", dc->pc);
3232 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
3233 gen_io_start();
3234 dc->clear_x = 1;
3236 insn_len = cris_decoder(dc);
3237 dc->ppc = dc->pc;
3238 dc->pc += insn_len;
3239 if (dc->clear_x)
3240 cris_clear_x_flag(dc);
3242 num_insns++;
3243 /* Check for delayed branches here. If we do it before
3244 actually generating any host code, the simulator will just
3245 loop doing nothing for on this program location. */
3246 if (dc->delayed_branch) {
3247 dc->delayed_branch--;
3248 if (dc->delayed_branch == 0)
3250 if (tb->flags & 7)
3251 t_gen_mov_env_TN(dslot,
3252 tcg_const_tl(0));
3253 if (dc->jmp == JMP_DIRECT) {
3254 dc->is_jmp = DISAS_NEXT;
3255 } else {
3256 t_gen_cc_jmp(env_btarget,
3257 tcg_const_tl(dc->pc));
3258 dc->is_jmp = DISAS_JUMP;
3260 break;
3264 /* If we are rexecuting a branch due to exceptions on
3265 delay slots dont break. */
3266 if (!(tb->pc & 1) && env->singlestep_enabled)
3267 break;
3268 } while (!dc->is_jmp && !dc->cpustate_changed
3269 && gen_opc_ptr < gen_opc_end
3270 && !singlestep
3271 && (dc->pc < next_page_start)
3272 && num_insns < max_insns);
3274 npc = dc->pc;
3275 if (dc->jmp == JMP_DIRECT && !dc->delayed_branch)
3276 npc = dc->jmp_pc;
3278 if (tb->cflags & CF_LAST_IO)
3279 gen_io_end();
3280 /* Force an update if the per-tb cpu state has changed. */
3281 if (dc->is_jmp == DISAS_NEXT
3282 && (dc->cpustate_changed || !dc->flagx_known
3283 || (dc->flags_x != (tb->flags & X_FLAG)))) {
3284 dc->is_jmp = DISAS_UPDATE;
3285 tcg_gen_movi_tl(env_pc, npc);
3287 /* Broken branch+delayslot sequence. */
3288 if (dc->delayed_branch == 1) {
3289 /* Set env->dslot to the size of the branch insn. */
3290 t_gen_mov_env_TN(dslot, tcg_const_tl(dc->pc - dc->ppc));
3291 cris_store_direct_jmp(dc);
3294 cris_evaluate_flags (dc);
3296 if (unlikely(env->singlestep_enabled)) {
3297 if (dc->is_jmp == DISAS_NEXT)
3298 tcg_gen_movi_tl(env_pc, npc);
3299 t_gen_raise_exception(EXCP_DEBUG);
3300 } else {
3301 switch(dc->is_jmp) {
3302 case DISAS_NEXT:
3303 gen_goto_tb(dc, 1, npc);
3304 break;
3305 default:
3306 case DISAS_JUMP:
3307 case DISAS_UPDATE:
3308 /* indicate that the hash table must be used
3309 to find the next TB */
3310 tcg_gen_exit_tb(0);
3311 break;
3312 case DISAS_SWI:
3313 case DISAS_TB_JUMP:
3314 /* nothing more to generate */
3315 break;
3318 gen_icount_end(tb, num_insns);
3319 *gen_opc_ptr = INDEX_op_end;
3320 if (search_pc) {
3321 j = gen_opc_ptr - gen_opc_buf;
3322 lj++;
3323 while (lj <= j)
3324 gen_opc_instr_start[lj++] = 0;
3325 } else {
3326 tb->size = dc->pc - pc_start;
3327 tb->icount = num_insns;
3330 #ifdef DEBUG_DISAS
3331 #if !DISAS_CRIS
3332 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
3333 log_target_disas(pc_start, dc->pc - pc_start, 0);
3334 qemu_log("\nisize=%d osize=%zd\n",
3335 dc->pc - pc_start, gen_opc_ptr - gen_opc_buf);
3337 #endif
3338 #endif
3341 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
3343 gen_intermediate_code_internal(env, tb, 0);
3346 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
3348 gen_intermediate_code_internal(env, tb, 1);
3351 void cpu_dump_state (CPUState *env, FILE *f,
3352 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
3353 int flags)
3355 int i;
3356 uint32_t srs;
3358 if (!env || !f)
3359 return;
3361 cpu_fprintf(f, "PC=%x CCS=%x btaken=%d btarget=%x\n"
3362 "cc_op=%d cc_src=%d cc_dest=%d cc_result=%x cc_mask=%x\n",
3363 env->pc, env->pregs[PR_CCS], env->btaken, env->btarget,
3364 env->cc_op,
3365 env->cc_src, env->cc_dest, env->cc_result, env->cc_mask);
3368 for (i = 0; i < 16; i++) {
3369 cpu_fprintf(f, "r%2.2d=%8.8x ", i, env->regs[i]);
3370 if ((i + 1) % 4 == 0)
3371 cpu_fprintf(f, "\n");
3373 cpu_fprintf(f, "\nspecial regs:\n");
3374 for (i = 0; i < 16; i++) {
3375 cpu_fprintf(f, "p%2.2d=%8.8x ", i, env->pregs[i]);
3376 if ((i + 1) % 4 == 0)
3377 cpu_fprintf(f, "\n");
3379 srs = env->pregs[PR_SRS];
3380 cpu_fprintf(f, "\nsupport function regs bank %x:\n", srs);
3381 if (srs < 256) {
3382 for (i = 0; i < 16; i++) {
3383 cpu_fprintf(f, "s%2.2d=%8.8x ",
3384 i, env->sregs[srs][i]);
3385 if ((i + 1) % 4 == 0)
3386 cpu_fprintf(f, "\n");
3389 cpu_fprintf(f, "\n\n");
3393 CPUCRISState *cpu_cris_init (const char *cpu_model)
3395 CPUCRISState *env;
3396 static int tcg_initialized = 0;
3397 int i;
3399 env = qemu_mallocz(sizeof(CPUCRISState));
3401 cpu_exec_init(env);
3402 cpu_reset(env);
3403 qemu_init_vcpu(env);
3405 if (tcg_initialized)
3406 return env;
3408 tcg_initialized = 1;
3410 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
3411 cc_x = tcg_global_mem_new(TCG_AREG0,
3412 offsetof(CPUState, cc_x), "cc_x");
3413 cc_src = tcg_global_mem_new(TCG_AREG0,
3414 offsetof(CPUState, cc_src), "cc_src");
3415 cc_dest = tcg_global_mem_new(TCG_AREG0,
3416 offsetof(CPUState, cc_dest),
3417 "cc_dest");
3418 cc_result = tcg_global_mem_new(TCG_AREG0,
3419 offsetof(CPUState, cc_result),
3420 "cc_result");
3421 cc_op = tcg_global_mem_new(TCG_AREG0,
3422 offsetof(CPUState, cc_op), "cc_op");
3423 cc_size = tcg_global_mem_new(TCG_AREG0,
3424 offsetof(CPUState, cc_size),
3425 "cc_size");
3426 cc_mask = tcg_global_mem_new(TCG_AREG0,
3427 offsetof(CPUState, cc_mask),
3428 "cc_mask");
3430 env_pc = tcg_global_mem_new(TCG_AREG0,
3431 offsetof(CPUState, pc),
3432 "pc");
3433 env_btarget = tcg_global_mem_new(TCG_AREG0,
3434 offsetof(CPUState, btarget),
3435 "btarget");
3436 env_btaken = tcg_global_mem_new(TCG_AREG0,
3437 offsetof(CPUState, btaken),
3438 "btaken");
3439 for (i = 0; i < 16; i++) {
3440 cpu_R[i] = tcg_global_mem_new(TCG_AREG0,
3441 offsetof(CPUState, regs[i]),
3442 regnames[i]);
3444 for (i = 0; i < 16; i++) {
3445 cpu_PR[i] = tcg_global_mem_new(TCG_AREG0,
3446 offsetof(CPUState, pregs[i]),
3447 pregnames[i]);
3450 #define GEN_HELPER 2
3451 #include "helper.h"
3453 return env;
3456 void cpu_reset (CPUCRISState *env)
3458 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
3459 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
3460 log_cpu_state(env, 0);
3463 memset(env, 0, offsetof(CPUCRISState, breakpoints));
3464 tlb_flush(env, 1);
3466 env->pregs[PR_VR] = 32;
3467 #if defined(CONFIG_USER_ONLY)
3468 /* start in user mode with interrupts enabled. */
3469 env->pregs[PR_CCS] |= U_FLAG | I_FLAG;
3470 #else
3471 cris_mmu_init(env);
3472 env->pregs[PR_CCS] = 0;
3473 #endif
3476 void gen_pc_load(CPUState *env, struct TranslationBlock *tb,
3477 unsigned long searched_pc, int pc_pos, void *puc)
3479 env->pc = gen_opc_pc[pc_pos];