4 * Copyright (c) 2011-2012 Jia Liu <proljc@gmail.com>
5 * Feng Gao <gf91597@gmail.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "qemu-common.h"
34 #define OPENRISC_DISAS
37 # define LOG_DIS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
39 # define LOG_DIS(...) do { } while (0)
42 typedef struct DisasContext
{
44 target_ulong pc
, ppc
, npc
;
45 uint32_t tb_flags
, synced_flags
, flags
;
48 int singlestep_enabled
;
49 uint32_t delayed_branch
;
52 static TCGv_ptr cpu_env
;
54 static TCGv cpu_R
[32];
56 static TCGv jmp_pc
; /* l.jr/l.jalr temp pc */
59 static TCGv_i32 env_btaken
; /* bf/bnf , F flag taken */
60 static TCGv_i32 fpcsr
;
61 static TCGv machi
, maclo
;
62 static TCGv fpmaddhi
, fpmaddlo
;
63 static TCGv_i32 env_flags
;
64 #include "gen-icount.h"
66 void openrisc_translate_init(void)
68 static const char * const regnames
[] = {
69 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
70 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
71 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
72 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
76 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
77 cpu_sr
= tcg_global_mem_new(TCG_AREG0
,
78 offsetof(CPUOpenRISCState
, sr
), "sr");
79 env_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
80 offsetof(CPUOpenRISCState
, flags
),
82 cpu_pc
= tcg_global_mem_new(TCG_AREG0
,
83 offsetof(CPUOpenRISCState
, pc
), "pc");
84 cpu_npc
= tcg_global_mem_new(TCG_AREG0
,
85 offsetof(CPUOpenRISCState
, npc
), "npc");
86 cpu_ppc
= tcg_global_mem_new(TCG_AREG0
,
87 offsetof(CPUOpenRISCState
, ppc
), "ppc");
88 jmp_pc
= tcg_global_mem_new(TCG_AREG0
,
89 offsetof(CPUOpenRISCState
, jmp_pc
), "jmp_pc");
90 env_btaken
= tcg_global_mem_new_i32(TCG_AREG0
,
91 offsetof(CPUOpenRISCState
, btaken
),
93 fpcsr
= tcg_global_mem_new_i32(TCG_AREG0
,
94 offsetof(CPUOpenRISCState
, fpcsr
),
96 machi
= tcg_global_mem_new(TCG_AREG0
,
97 offsetof(CPUOpenRISCState
, machi
),
99 maclo
= tcg_global_mem_new(TCG_AREG0
,
100 offsetof(CPUOpenRISCState
, maclo
),
102 fpmaddhi
= tcg_global_mem_new(TCG_AREG0
,
103 offsetof(CPUOpenRISCState
, fpmaddhi
),
105 fpmaddlo
= tcg_global_mem_new(TCG_AREG0
,
106 offsetof(CPUOpenRISCState
, fpmaddlo
),
108 for (i
= 0; i
< 32; i
++) {
109 cpu_R
[i
] = tcg_global_mem_new(TCG_AREG0
,
110 offsetof(CPUOpenRISCState
, gpr
[i
]),
117 /* Writeback SR_F transaltion-space to execution-space. */
118 static inline void wb_SR_F(void)
122 label
= gen_new_label();
123 tcg_gen_andi_tl(cpu_sr
, cpu_sr
, ~SR_F
);
124 tcg_gen_brcondi_tl(TCG_COND_EQ
, env_btaken
, 0, label
);
125 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, SR_F
);
126 gen_set_label(label
);
129 static inline int zero_extend(unsigned int val
, int width
)
131 return val
& ((1 << width
) - 1);
134 static inline int sign_extend(unsigned int val
, int width
)
139 val
<<= TARGET_LONG_BITS
- width
;
142 sval
>>= TARGET_LONG_BITS
- width
;
146 static inline void gen_sync_flags(DisasContext
*dc
)
148 /* Sync the tb dependent flag between translate and runtime. */
149 if (dc
->tb_flags
!= dc
->synced_flags
) {
150 tcg_gen_movi_tl(env_flags
, dc
->tb_flags
);
151 dc
->synced_flags
= dc
->tb_flags
;
155 static void gen_exception(DisasContext
*dc
, unsigned int excp
)
157 TCGv_i32 tmp
= tcg_const_i32(excp
);
158 gen_helper_exception(cpu_env
, tmp
);
159 tcg_temp_free_i32(tmp
);
162 static void gen_illegal_exception(DisasContext
*dc
)
164 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
165 gen_exception(dc
, EXCP_ILLEGAL
);
166 dc
->is_jmp
= DISAS_UPDATE
;
169 /* not used yet, open it when we need or64. */
170 /*#ifdef TARGET_OPENRISC64
171 static void check_ob64s(DisasContext *dc)
173 if (!(dc->flags & CPUCFGR_OB64S)) {
174 gen_illegal_exception(dc);
178 static void check_of64s(DisasContext *dc)
180 if (!(dc->flags & CPUCFGR_OF64S)) {
181 gen_illegal_exception(dc);
185 static void check_ov64s(DisasContext *dc)
187 if (!(dc->flags & CPUCFGR_OV64S)) {
188 gen_illegal_exception(dc);
193 static void gen_goto_tb(DisasContext
*dc
, int n
, target_ulong dest
)
195 TranslationBlock
*tb
;
197 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
198 likely(!dc
->singlestep_enabled
)) {
199 tcg_gen_movi_tl(cpu_pc
, dest
);
201 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
203 tcg_gen_movi_tl(cpu_pc
, dest
);
204 if (dc
->singlestep_enabled
) {
205 gen_exception(dc
, EXCP_DEBUG
);
211 static void gen_jump(DisasContext
*dc
, uint32_t imm
, uint32_t reg
, uint32_t op0
)
214 int lab
= gen_new_label();
215 TCGv sr_f
= tcg_temp_new();
216 /* N26, 26bits imm */
217 tmp_pc
= sign_extend((imm
<<2), 26) + dc
->pc
;
218 tcg_gen_andi_tl(sr_f
, cpu_sr
, SR_F
);
220 if (op0
== 0x00) { /* l.j */
221 tcg_gen_movi_tl(jmp_pc
, tmp_pc
);
222 } else if (op0
== 0x01) { /* l.jal */
223 tcg_gen_movi_tl(cpu_R
[9], (dc
->pc
+ 8));
224 tcg_gen_movi_tl(jmp_pc
, tmp_pc
);
225 } else if (op0
== 0x03) { /* l.bnf */
226 tcg_gen_movi_tl(jmp_pc
, dc
->pc
+8);
227 tcg_gen_brcondi_i32(TCG_COND_EQ
, sr_f
, SR_F
, lab
);
228 tcg_gen_movi_tl(jmp_pc
, tmp_pc
);
230 } else if (op0
== 0x04) { /* l.bf */
231 tcg_gen_movi_tl(jmp_pc
, dc
->pc
+8);
232 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_f
, SR_F
, lab
);
233 tcg_gen_movi_tl(jmp_pc
, tmp_pc
);
235 } else if (op0
== 0x11) { /* l.jr */
236 tcg_gen_mov_tl(jmp_pc
, cpu_R
[reg
]);
237 } else if (op0
== 0x12) { /* l.jalr */
238 tcg_gen_movi_tl(cpu_R
[9], (dc
->pc
+ 8));
239 tcg_gen_mov_tl(jmp_pc
, cpu_R
[reg
]);
241 gen_illegal_exception(dc
);
245 dc
->delayed_branch
= 2;
246 dc
->tb_flags
|= D_FLAG
;
250 static void dec_calc(DisasContext
*dc
, uint32_t insn
)
252 uint32_t op0
, op1
, op2
;
254 op0
= extract32(insn
, 0, 4);
255 op1
= extract32(insn
, 8, 2);
256 op2
= extract32(insn
, 6, 2);
257 ra
= extract32(insn
, 16, 5);
258 rb
= extract32(insn
, 11, 5);
259 rd
= extract32(insn
, 21, 5);
264 case 0x00: /* l.add */
265 LOG_DIS("l.add r%d, r%d, r%d\n", rd
, ra
, rb
);
267 int lab
= gen_new_label();
268 TCGv_i64 ta
= tcg_temp_new_i64();
269 TCGv_i64 tb
= tcg_temp_new_i64();
270 TCGv_i64 td
= tcg_temp_local_new_i64();
271 TCGv_i32 res
= tcg_temp_local_new_i32();
272 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
273 tcg_gen_extu_i32_i64(ta
, cpu_R
[ra
]);
274 tcg_gen_extu_i32_i64(tb
, cpu_R
[rb
]);
275 tcg_gen_add_i64(td
, ta
, tb
);
276 tcg_gen_trunc_i64_i32(res
, td
);
277 tcg_gen_shri_i64(td
, td
, 31);
278 tcg_gen_andi_i64(td
, td
, 0x3);
279 /* Jump to lab when no overflow. */
280 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x0, lab
);
281 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x3, lab
);
282 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
283 tcg_gen_andi_i32(sr_ove
, cpu_sr
, SR_OVE
);
284 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
285 gen_exception(dc
, EXCP_RANGE
);
287 tcg_gen_mov_i32(cpu_R
[rd
], res
);
288 tcg_temp_free_i64(ta
);
289 tcg_temp_free_i64(tb
);
290 tcg_temp_free_i64(td
);
291 tcg_temp_free_i32(res
);
292 tcg_temp_free_i32(sr_ove
);
296 gen_illegal_exception(dc
);
301 case 0x0001: /* l.addc */
304 LOG_DIS("l.addc r%d, r%d, r%d\n", rd
, ra
, rb
);
306 int lab
= gen_new_label();
307 TCGv_i64 ta
= tcg_temp_new_i64();
308 TCGv_i64 tb
= tcg_temp_new_i64();
309 TCGv_i64 tcy
= tcg_temp_local_new_i64();
310 TCGv_i64 td
= tcg_temp_local_new_i64();
311 TCGv_i32 res
= tcg_temp_local_new_i32();
312 TCGv_i32 sr_cy
= tcg_temp_local_new_i32();
313 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
314 tcg_gen_extu_i32_i64(ta
, cpu_R
[ra
]);
315 tcg_gen_extu_i32_i64(tb
, cpu_R
[rb
]);
316 tcg_gen_andi_i32(sr_cy
, cpu_sr
, SR_CY
);
317 tcg_gen_extu_i32_i64(tcy
, sr_cy
);
318 tcg_gen_shri_i64(tcy
, tcy
, 10);
319 tcg_gen_add_i64(td
, ta
, tb
);
320 tcg_gen_add_i64(td
, td
, tcy
);
321 tcg_gen_trunc_i64_i32(res
, td
);
322 tcg_gen_shri_i64(td
, td
, 32);
323 tcg_gen_andi_i64(td
, td
, 0x3);
324 /* Jump to lab when no overflow. */
325 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x0, lab
);
326 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x3, lab
);
327 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
328 tcg_gen_andi_i32(sr_ove
, cpu_sr
, SR_OVE
);
329 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
330 gen_exception(dc
, EXCP_RANGE
);
332 tcg_gen_mov_i32(cpu_R
[rd
], res
);
333 tcg_temp_free_i64(ta
);
334 tcg_temp_free_i64(tb
);
335 tcg_temp_free_i64(tcy
);
336 tcg_temp_free_i64(td
);
337 tcg_temp_free_i32(res
);
338 tcg_temp_free_i32(sr_cy
);
339 tcg_temp_free_i32(sr_ove
);
343 gen_illegal_exception(dc
);
348 case 0x0002: /* l.sub */
351 LOG_DIS("l.sub r%d, r%d, r%d\n", rd
, ra
, rb
);
353 int lab
= gen_new_label();
354 TCGv_i64 ta
= tcg_temp_new_i64();
355 TCGv_i64 tb
= tcg_temp_new_i64();
356 TCGv_i64 td
= tcg_temp_local_new_i64();
357 TCGv_i32 res
= tcg_temp_local_new_i32();
358 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
360 tcg_gen_extu_i32_i64(ta
, cpu_R
[ra
]);
361 tcg_gen_extu_i32_i64(tb
, cpu_R
[rb
]);
362 tcg_gen_sub_i64(td
, ta
, tb
);
363 tcg_gen_trunc_i64_i32(res
, td
);
364 tcg_gen_shri_i64(td
, td
, 31);
365 tcg_gen_andi_i64(td
, td
, 0x3);
366 /* Jump to lab when no overflow. */
367 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x0, lab
);
368 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x3, lab
);
369 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
370 tcg_gen_andi_i32(sr_ove
, cpu_sr
, SR_OVE
);
371 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
372 gen_exception(dc
, EXCP_RANGE
);
374 tcg_gen_mov_i32(cpu_R
[rd
], res
);
375 tcg_temp_free_i64(ta
);
376 tcg_temp_free_i64(tb
);
377 tcg_temp_free_i64(td
);
378 tcg_temp_free_i32(res
);
379 tcg_temp_free_i32(sr_ove
);
383 gen_illegal_exception(dc
);
388 case 0x0003: /* l.and */
391 LOG_DIS("l.and r%d, r%d, r%d\n", rd
, ra
, rb
);
392 tcg_gen_and_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
395 gen_illegal_exception(dc
);
400 case 0x0004: /* l.or */
403 LOG_DIS("l.or r%d, r%d, r%d\n", rd
, ra
, rb
);
404 tcg_gen_or_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
407 gen_illegal_exception(dc
);
414 case 0x00: /* l.xor */
415 LOG_DIS("l.xor r%d, r%d, r%d\n", rd
, ra
, rb
);
416 tcg_gen_xor_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
419 gen_illegal_exception(dc
);
426 case 0x03: /* l.mul */
427 LOG_DIS("l.mul r%d, r%d, r%d\n", rd
, ra
, rb
);
428 if (ra
!= 0 && rb
!= 0) {
429 gen_helper_mul32(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
431 tcg_gen_movi_tl(cpu_R
[rd
], 0x0);
435 gen_illegal_exception(dc
);
442 case 0x03: /* l.div */
443 LOG_DIS("l.div r%d, r%d, r%d\n", rd
, ra
, rb
);
445 int lab0
= gen_new_label();
446 int lab1
= gen_new_label();
447 int lab2
= gen_new_label();
448 int lab3
= gen_new_label();
449 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
451 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
452 tcg_gen_andi_tl(sr_ove
, cpu_sr
, SR_OVE
);
453 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_ove
, SR_OVE
, lab0
);
454 gen_exception(dc
, EXCP_RANGE
);
457 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_R
[rb
],
459 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_R
[ra
],
461 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_R
[rb
],
464 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
465 tcg_gen_andi_tl(sr_ove
, cpu_sr
, SR_OVE
);
466 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_ove
, SR_OVE
, lab3
);
467 gen_exception(dc
, EXCP_RANGE
);
469 tcg_gen_div_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
472 tcg_temp_free_i32(sr_ove
);
477 gen_illegal_exception(dc
);
484 case 0x03: /* l.divu */
485 LOG_DIS("l.divu r%d, r%d, r%d\n", rd
, ra
, rb
);
487 int lab0
= gen_new_label();
488 int lab1
= gen_new_label();
489 int lab2
= gen_new_label();
490 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
492 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
493 tcg_gen_andi_tl(sr_ove
, cpu_sr
, SR_OVE
);
494 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_ove
, SR_OVE
, lab0
);
495 gen_exception(dc
, EXCP_RANGE
);
498 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_R
[rb
],
500 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
501 tcg_gen_andi_tl(sr_ove
, cpu_sr
, SR_OVE
);
502 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_ove
, SR_OVE
, lab2
);
503 gen_exception(dc
, EXCP_RANGE
);
505 tcg_gen_divu_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
508 tcg_temp_free_i32(sr_ove
);
513 gen_illegal_exception(dc
);
520 case 0x03: /* l.mulu */
521 LOG_DIS("l.mulu r%d, r%d, r%d\n", rd
, ra
, rb
);
522 if (rb
!= 0 && ra
!= 0) {
523 TCGv_i64 result
= tcg_temp_local_new_i64();
524 TCGv_i64 tra
= tcg_temp_local_new_i64();
525 TCGv_i64 trb
= tcg_temp_local_new_i64();
526 TCGv_i64 high
= tcg_temp_new_i64();
527 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
528 int lab
= gen_new_label();
529 /* Calculate the each result. */
530 tcg_gen_extu_i32_i64(tra
, cpu_R
[ra
]);
531 tcg_gen_extu_i32_i64(trb
, cpu_R
[rb
]);
532 tcg_gen_mul_i64(result
, tra
, trb
);
533 tcg_temp_free_i64(tra
);
534 tcg_temp_free_i64(trb
);
535 tcg_gen_shri_i64(high
, result
, TARGET_LONG_BITS
);
536 /* Overflow or not. */
537 tcg_gen_brcondi_i64(TCG_COND_EQ
, high
, 0x00000000, lab
);
538 tcg_gen_ori_tl(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
539 tcg_gen_andi_tl(sr_ove
, cpu_sr
, SR_OVE
);
540 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
541 gen_exception(dc
, EXCP_RANGE
);
543 tcg_temp_free_i64(high
);
544 tcg_gen_trunc_i64_tl(cpu_R
[rd
], result
);
545 tcg_temp_free_i64(result
);
546 tcg_temp_free_i32(sr_ove
);
548 tcg_gen_movi_tl(cpu_R
[rd
], 0);
553 gen_illegal_exception(dc
);
560 case 0x00: /* l.cmov */
561 LOG_DIS("l.cmov r%d, r%d, r%d\n", rd
, ra
, rb
);
563 int lab
= gen_new_label();
564 TCGv res
= tcg_temp_local_new();
565 TCGv sr_f
= tcg_temp_new();
566 tcg_gen_andi_tl(sr_f
, cpu_sr
, SR_F
);
567 tcg_gen_mov_tl(res
, cpu_R
[rb
]);
568 tcg_gen_brcondi_tl(TCG_COND_NE
, sr_f
, SR_F
, lab
);
569 tcg_gen_mov_tl(res
, cpu_R
[ra
]);
571 tcg_gen_mov_tl(cpu_R
[rd
], res
);
578 gen_illegal_exception(dc
);
585 case 0x00: /* l.ff1 */
586 LOG_DIS("l.ff1 r%d, r%d, r%d\n", rd
, ra
, rb
);
587 gen_helper_ff1(cpu_R
[rd
], cpu_R
[ra
]);
589 case 0x01: /* l.fl1 */
590 LOG_DIS("l.fl1 r%d, r%d, r%d\n", rd
, ra
, rb
);
591 gen_helper_fl1(cpu_R
[rd
], cpu_R
[ra
]);
595 gen_illegal_exception(dc
);
604 case 0x00: /* l.sll */
605 LOG_DIS("l.sll r%d, r%d, r%d\n", rd
, ra
, rb
);
606 tcg_gen_shl_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
608 case 0x01: /* l.srl */
609 LOG_DIS("l.srl r%d, r%d, r%d\n", rd
, ra
, rb
);
610 tcg_gen_shr_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
612 case 0x02: /* l.sra */
613 LOG_DIS("l.sra r%d, r%d, r%d\n", rd
, ra
, rb
);
614 tcg_gen_sar_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
616 case 0x03: /* l.ror */
617 LOG_DIS("l.ror r%d, r%d, r%d\n", rd
, ra
, rb
);
618 tcg_gen_rotr_tl(cpu_R
[rd
], cpu_R
[ra
], cpu_R
[rb
]);
622 gen_illegal_exception(dc
);
628 gen_illegal_exception(dc
);
637 case 0x00: /* l.exths */
638 LOG_DIS("l.exths r%d, r%d\n", rd
, ra
);
639 tcg_gen_ext16s_tl(cpu_R
[rd
], cpu_R
[ra
]);
641 case 0x01: /* l.extbs */
642 LOG_DIS("l.extbs r%d, r%d\n", rd
, ra
);
643 tcg_gen_ext8s_tl(cpu_R
[rd
], cpu_R
[ra
]);
645 case 0x02: /* l.exthz */
646 LOG_DIS("l.exthz r%d, r%d\n", rd
, ra
);
647 tcg_gen_ext16u_tl(cpu_R
[rd
], cpu_R
[ra
]);
649 case 0x03: /* l.extbz */
650 LOG_DIS("l.extbz r%d, r%d\n", rd
, ra
);
651 tcg_gen_ext8u_tl(cpu_R
[rd
], cpu_R
[ra
]);
655 gen_illegal_exception(dc
);
661 gen_illegal_exception(dc
);
670 case 0x00: /* l.extws */
671 LOG_DIS("l.extws r%d, r%d\n", rd
, ra
);
672 tcg_gen_ext32s_tl(cpu_R
[rd
], cpu_R
[ra
]);
674 case 0x01: /* l.extwz */
675 LOG_DIS("l.extwz r%d, r%d\n", rd
, ra
);
676 tcg_gen_ext32u_tl(cpu_R
[rd
], cpu_R
[ra
]);
680 gen_illegal_exception(dc
);
686 gen_illegal_exception(dc
);
692 gen_illegal_exception(dc
);
697 static void dec_misc(DisasContext
*dc
, uint32_t insn
)
701 #ifdef OPENRISC_DISAS
704 uint32_t I16
, I5
, I11
, N26
, tmp
;
705 op0
= extract32(insn
, 26, 6);
706 op1
= extract32(insn
, 24, 2);
707 ra
= extract32(insn
, 16, 5);
708 rb
= extract32(insn
, 11, 5);
709 rd
= extract32(insn
, 21, 5);
710 #ifdef OPENRISC_DISAS
711 L6
= extract32(insn
, 5, 6);
712 K5
= extract32(insn
, 0, 5);
714 I16
= extract32(insn
, 0, 16);
715 I5
= extract32(insn
, 21, 5);
716 I11
= extract32(insn
, 0, 11);
717 N26
= extract32(insn
, 0, 26);
718 tmp
= (I5
<<11) + I11
;
722 LOG_DIS("l.j %d\n", N26
);
723 gen_jump(dc
, N26
, 0, op0
);
726 case 0x01: /* l.jal */
727 LOG_DIS("l.jal %d\n", N26
);
728 gen_jump(dc
, N26
, 0, op0
);
731 case 0x03: /* l.bnf */
732 LOG_DIS("l.bnf %d\n", N26
);
733 gen_jump(dc
, N26
, 0, op0
);
736 case 0x04: /* l.bf */
737 LOG_DIS("l.bf %d\n", N26
);
738 gen_jump(dc
, N26
, 0, op0
);
743 case 0x01: /* l.nop */
744 LOG_DIS("l.nop %d\n", I16
);
748 gen_illegal_exception(dc
);
753 case 0x11: /* l.jr */
754 LOG_DIS("l.jr r%d\n", rb
);
755 gen_jump(dc
, 0, rb
, op0
);
758 case 0x12: /* l.jalr */
759 LOG_DIS("l.jalr r%d\n", rb
);
760 gen_jump(dc
, 0, rb
, op0
);
763 case 0x13: /* l.maci */
764 LOG_DIS("l.maci %d, r%d, %d\n", I5
, ra
, I11
);
766 TCGv_i64 t1
= tcg_temp_new_i64();
767 TCGv_i64 t2
= tcg_temp_new_i64();
768 TCGv_i32 dst
= tcg_temp_new_i32();
769 TCGv ttmp
= tcg_const_tl(tmp
);
770 tcg_gen_mul_tl(dst
, cpu_R
[ra
], ttmp
);
771 tcg_gen_ext_i32_i64(t1
, dst
);
772 tcg_gen_concat_i32_i64(t2
, maclo
, machi
);
773 tcg_gen_add_i64(t2
, t2
, t1
);
774 tcg_gen_trunc_i64_i32(maclo
, t2
);
775 tcg_gen_shri_i64(t2
, t2
, 32);
776 tcg_gen_trunc_i64_i32(machi
, t2
);
777 tcg_temp_free_i32(dst
);
779 tcg_temp_free_i64(t1
);
780 tcg_temp_free_i64(t2
);
784 case 0x09: /* l.rfe */
787 #if defined(CONFIG_USER_ONLY)
790 if (dc
->mem_idx
== MMU_USER_IDX
) {
791 gen_illegal_exception(dc
);
794 gen_helper_rfe(cpu_env
);
795 dc
->is_jmp
= DISAS_UPDATE
;
800 case 0x1c: /* l.cust1 */
801 LOG_DIS("l.cust1\n");
804 case 0x1d: /* l.cust2 */
805 LOG_DIS("l.cust2\n");
808 case 0x1e: /* l.cust3 */
809 LOG_DIS("l.cust3\n");
812 case 0x1f: /* l.cust4 */
813 LOG_DIS("l.cust4\n");
816 case 0x3c: /* l.cust5 */
817 LOG_DIS("l.cust5 r%d, r%d, r%d, %d, %d\n", rd
, ra
, rb
, L6
, K5
);
820 case 0x3d: /* l.cust6 */
821 LOG_DIS("l.cust6\n");
824 case 0x3e: /* l.cust7 */
825 LOG_DIS("l.cust7\n");
828 case 0x3f: /* l.cust8 */
829 LOG_DIS("l.cust8\n");
832 /* not used yet, open it when we need or64. */
833 /*#ifdef TARGET_OPENRISC64
835 LOG_DIS("l.ld r%d, r%d, %d\n", rd, ra, I16);
838 TCGv_i64 t0 = tcg_temp_new_i64();
839 tcg_gen_addi_i64(t0, cpu_R[ra], sign_extend(I16, 16));
840 tcg_gen_qemu_ld64(cpu_R[rd], t0, dc->mem_idx);
841 tcg_temp_free_i64(t0);
846 case 0x21: /* l.lwz */
847 LOG_DIS("l.lwz r%d, r%d, %d\n", rd
, ra
, I16
);
849 TCGv t0
= tcg_temp_new();
850 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
851 tcg_gen_qemu_ld32u(cpu_R
[rd
], t0
, dc
->mem_idx
);
856 case 0x22: /* l.lws */
857 LOG_DIS("l.lws r%d, r%d, %d\n", rd
, ra
, I16
);
859 TCGv t0
= tcg_temp_new();
860 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
861 tcg_gen_qemu_ld32s(cpu_R
[rd
], t0
, dc
->mem_idx
);
866 case 0x23: /* l.lbz */
867 LOG_DIS("l.lbz r%d, r%d, %d\n", rd
, ra
, I16
);
869 TCGv t0
= tcg_temp_new();
870 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
871 tcg_gen_qemu_ld8u(cpu_R
[rd
], t0
, dc
->mem_idx
);
876 case 0x24: /* l.lbs */
877 LOG_DIS("l.lbs r%d, r%d, %d\n", rd
, ra
, I16
);
879 TCGv t0
= tcg_temp_new();
880 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
881 tcg_gen_qemu_ld8s(cpu_R
[rd
], t0
, dc
->mem_idx
);
886 case 0x25: /* l.lhz */
887 LOG_DIS("l.lhz r%d, r%d, %d\n", rd
, ra
, I16
);
889 TCGv t0
= tcg_temp_new();
890 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
891 tcg_gen_qemu_ld16u(cpu_R
[rd
], t0
, dc
->mem_idx
);
896 case 0x26: /* l.lhs */
897 LOG_DIS("l.lhs r%d, r%d, %d\n", rd
, ra
, I16
);
899 TCGv t0
= tcg_temp_new();
900 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(I16
, 16));
901 tcg_gen_qemu_ld16s(cpu_R
[rd
], t0
, dc
->mem_idx
);
906 case 0x27: /* l.addi */
907 LOG_DIS("l.addi r%d, r%d, %d\n", rd
, ra
, I16
);
909 int lab
= gen_new_label();
910 TCGv_i64 ta
= tcg_temp_new_i64();
911 TCGv_i64 td
= tcg_temp_local_new_i64();
912 TCGv_i32 res
= tcg_temp_local_new_i32();
913 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
914 tcg_gen_extu_i32_i64(ta
, cpu_R
[ra
]);
915 tcg_gen_addi_i64(td
, ta
, sign_extend(I16
, 16));
916 tcg_gen_trunc_i64_i32(res
, td
);
917 tcg_gen_shri_i64(td
, td
, 32);
918 tcg_gen_andi_i64(td
, td
, 0x3);
919 /* Jump to lab when no overflow. */
920 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x0, lab
);
921 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x3, lab
);
922 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
923 tcg_gen_andi_i32(sr_ove
, cpu_sr
, SR_OVE
);
924 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
925 gen_exception(dc
, EXCP_RANGE
);
927 tcg_gen_mov_i32(cpu_R
[rd
], res
);
928 tcg_temp_free_i64(ta
);
929 tcg_temp_free_i64(td
);
930 tcg_temp_free_i32(res
);
931 tcg_temp_free_i32(sr_ove
);
935 case 0x28: /* l.addic */
936 LOG_DIS("l.addic r%d, r%d, %d\n", rd
, ra
, I16
);
938 int lab
= gen_new_label();
939 TCGv_i64 ta
= tcg_temp_new_i64();
940 TCGv_i64 td
= tcg_temp_local_new_i64();
941 TCGv_i64 tcy
= tcg_temp_local_new_i64();
942 TCGv_i32 res
= tcg_temp_local_new_i32();
943 TCGv_i32 sr_cy
= tcg_temp_local_new_i32();
944 TCGv_i32 sr_ove
= tcg_temp_local_new_i32();
945 tcg_gen_extu_i32_i64(ta
, cpu_R
[ra
]);
946 tcg_gen_andi_i32(sr_cy
, cpu_sr
, SR_CY
);
947 tcg_gen_shri_i32(sr_cy
, sr_cy
, 10);
948 tcg_gen_extu_i32_i64(tcy
, sr_cy
);
949 tcg_gen_addi_i64(td
, ta
, sign_extend(I16
, 16));
950 tcg_gen_add_i64(td
, td
, tcy
);
951 tcg_gen_trunc_i64_i32(res
, td
);
952 tcg_gen_shri_i64(td
, td
, 32);
953 tcg_gen_andi_i64(td
, td
, 0x3);
954 /* Jump to lab when no overflow. */
955 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x0, lab
);
956 tcg_gen_brcondi_i64(TCG_COND_EQ
, td
, 0x3, lab
);
957 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, (SR_OV
| SR_CY
));
958 tcg_gen_andi_i32(sr_ove
, cpu_sr
, SR_OVE
);
959 tcg_gen_brcondi_i32(TCG_COND_NE
, sr_ove
, SR_OVE
, lab
);
960 gen_exception(dc
, EXCP_RANGE
);
962 tcg_gen_mov_i32(cpu_R
[rd
], res
);
963 tcg_temp_free_i64(ta
);
964 tcg_temp_free_i64(td
);
965 tcg_temp_free_i64(tcy
);
966 tcg_temp_free_i32(res
);
967 tcg_temp_free_i32(sr_cy
);
968 tcg_temp_free_i32(sr_ove
);
972 case 0x29: /* l.andi */
973 LOG_DIS("l.andi r%d, r%d, %d\n", rd
, ra
, I16
);
974 tcg_gen_andi_tl(cpu_R
[rd
], cpu_R
[ra
], zero_extend(I16
, 16));
977 case 0x2a: /* l.ori */
978 LOG_DIS("l.ori r%d, r%d, %d\n", rd
, ra
, I16
);
979 tcg_gen_ori_tl(cpu_R
[rd
], cpu_R
[ra
], zero_extend(I16
, 16));
982 case 0x2b: /* l.xori */
983 LOG_DIS("l.xori r%d, r%d, %d\n", rd
, ra
, I16
);
984 tcg_gen_xori_tl(cpu_R
[rd
], cpu_R
[ra
], sign_extend(I16
, 16));
987 case 0x2c: /* l.muli */
988 LOG_DIS("l.muli r%d, r%d, %d\n", rd
, ra
, I16
);
989 if (ra
!= 0 && I16
!= 0) {
990 TCGv_i32 im
= tcg_const_i32(I16
);
991 gen_helper_mul32(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], im
);
992 tcg_temp_free_i32(im
);
994 tcg_gen_movi_tl(cpu_R
[rd
], 0x0);
998 case 0x2d: /* l.mfspr */
999 LOG_DIS("l.mfspr r%d, r%d, %d\n", rd
, ra
, I16
);
1001 #if defined(CONFIG_USER_ONLY)
1004 TCGv_i32 ti
= tcg_const_i32(I16
);
1005 if (dc
->mem_idx
== MMU_USER_IDX
) {
1006 gen_illegal_exception(dc
);
1009 gen_helper_mfspr(cpu_R
[rd
], cpu_env
, cpu_R
[rd
], cpu_R
[ra
], ti
);
1010 tcg_temp_free_i32(ti
);
1015 case 0x30: /* l.mtspr */
1016 LOG_DIS("l.mtspr %d, r%d, r%d, %d\n", I5
, ra
, rb
, I11
);
1018 #if defined(CONFIG_USER_ONLY)
1021 TCGv_i32 im
= tcg_const_i32(tmp
);
1022 if (dc
->mem_idx
== MMU_USER_IDX
) {
1023 gen_illegal_exception(dc
);
1026 gen_helper_mtspr(cpu_env
, cpu_R
[ra
], cpu_R
[rb
], im
);
1027 tcg_temp_free_i32(im
);
1032 /* not used yet, open it when we need or64. */
1033 /*#ifdef TARGET_OPENRISC64
1035 LOG_DIS("l.sd %d, r%d, r%d, %d\n", I5, ra, rb, I11);
1038 TCGv_i64 t0 = tcg_temp_new_i64();
1039 tcg_gen_addi_tl(t0, cpu_R[ra], sign_extend(tmp, 16));
1040 tcg_gen_qemu_st64(cpu_R[rb], t0, dc->mem_idx);
1041 tcg_temp_free_i64(t0);
1046 case 0x35: /* l.sw */
1047 LOG_DIS("l.sw %d, r%d, r%d, %d\n", I5
, ra
, rb
, I11
);
1049 TCGv t0
= tcg_temp_new();
1050 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(tmp
, 16));
1051 tcg_gen_qemu_st32(cpu_R
[rb
], t0
, dc
->mem_idx
);
1056 case 0x36: /* l.sb */
1057 LOG_DIS("l.sb %d, r%d, r%d, %d\n", I5
, ra
, rb
, I11
);
1059 TCGv t0
= tcg_temp_new();
1060 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(tmp
, 16));
1061 tcg_gen_qemu_st8(cpu_R
[rb
], t0
, dc
->mem_idx
);
1066 case 0x37: /* l.sh */
1067 LOG_DIS("l.sh %d, r%d, r%d, %d\n", I5
, ra
, rb
, I11
);
1069 TCGv t0
= tcg_temp_new();
1070 tcg_gen_addi_tl(t0
, cpu_R
[ra
], sign_extend(tmp
, 16));
1071 tcg_gen_qemu_st16(cpu_R
[rb
], t0
, dc
->mem_idx
);
1077 gen_illegal_exception(dc
);
1082 static void dec_mac(DisasContext
*dc
, uint32_t insn
)
1086 op0
= extract32(insn
, 0, 4);
1087 ra
= extract32(insn
, 16, 5);
1088 rb
= extract32(insn
, 11, 5);
1091 case 0x0001: /* l.mac */
1092 LOG_DIS("l.mac r%d, r%d\n", ra
, rb
);
1094 TCGv_i32 t0
= tcg_temp_new_i32();
1095 TCGv_i64 t1
= tcg_temp_new_i64();
1096 TCGv_i64 t2
= tcg_temp_new_i64();
1097 tcg_gen_mul_tl(t0
, cpu_R
[ra
], cpu_R
[rb
]);
1098 tcg_gen_ext_i32_i64(t1
, t0
);
1099 tcg_gen_concat_i32_i64(t2
, maclo
, machi
);
1100 tcg_gen_add_i64(t2
, t2
, t1
);
1101 tcg_gen_trunc_i64_i32(maclo
, t2
);
1102 tcg_gen_shri_i64(t2
, t2
, 32);
1103 tcg_gen_trunc_i64_i32(machi
, t2
);
1104 tcg_temp_free_i32(t0
);
1105 tcg_temp_free_i64(t1
);
1106 tcg_temp_free_i64(t2
);
1110 case 0x0002: /* l.msb */
1111 LOG_DIS("l.msb r%d, r%d\n", ra
, rb
);
1113 TCGv_i32 t0
= tcg_temp_new_i32();
1114 TCGv_i64 t1
= tcg_temp_new_i64();
1115 TCGv_i64 t2
= tcg_temp_new_i64();
1116 tcg_gen_mul_tl(t0
, cpu_R
[ra
], cpu_R
[rb
]);
1117 tcg_gen_ext_i32_i64(t1
, t0
);
1118 tcg_gen_concat_i32_i64(t2
, maclo
, machi
);
1119 tcg_gen_sub_i64(t2
, t2
, t1
);
1120 tcg_gen_trunc_i64_i32(maclo
, t2
);
1121 tcg_gen_shri_i64(t2
, t2
, 32);
1122 tcg_gen_trunc_i64_i32(machi
, t2
);
1123 tcg_temp_free_i32(t0
);
1124 tcg_temp_free_i64(t1
);
1125 tcg_temp_free_i64(t2
);
1130 gen_illegal_exception(dc
);
1135 static void dec_logic(DisasContext
*dc
, uint32_t insn
)
1138 uint32_t rd
, ra
, L6
;
1139 op0
= extract32(insn
, 6, 2);
1140 rd
= extract32(insn
, 21, 5);
1141 ra
= extract32(insn
, 16, 5);
1142 L6
= extract32(insn
, 0, 6);
1145 case 0x00: /* l.slli */
1146 LOG_DIS("l.slli r%d, r%d, %d\n", rd
, ra
, L6
);
1147 tcg_gen_shli_tl(cpu_R
[rd
], cpu_R
[ra
], (L6
& 0x1f));
1150 case 0x01: /* l.srli */
1151 LOG_DIS("l.srli r%d, r%d, %d\n", rd
, ra
, L6
);
1152 tcg_gen_shri_tl(cpu_R
[rd
], cpu_R
[ra
], (L6
& 0x1f));
1155 case 0x02: /* l.srai */
1156 LOG_DIS("l.srai r%d, r%d, %d\n", rd
, ra
, L6
);
1157 tcg_gen_sari_tl(cpu_R
[rd
], cpu_R
[ra
], (L6
& 0x1f)); break;
1159 case 0x03: /* l.rori */
1160 LOG_DIS("l.rori r%d, r%d, %d\n", rd
, ra
, L6
);
1161 tcg_gen_rotri_tl(cpu_R
[rd
], cpu_R
[ra
], (L6
& 0x1f));
1165 gen_illegal_exception(dc
);
1170 static void dec_M(DisasContext
*dc
, uint32_t insn
)
1175 op0
= extract32(insn
, 16, 1);
1176 rd
= extract32(insn
, 21, 5);
1177 K16
= extract32(insn
, 0, 16);
1180 case 0x0: /* l.movhi */
1181 LOG_DIS("l.movhi r%d, %d\n", rd
, K16
);
1182 tcg_gen_movi_tl(cpu_R
[rd
], (K16
<< 16));
1185 case 0x1: /* l.macrc */
1186 LOG_DIS("l.macrc r%d\n", rd
);
1187 tcg_gen_mov_tl(cpu_R
[rd
], maclo
);
1188 tcg_gen_movi_tl(maclo
, 0x0);
1189 tcg_gen_movi_tl(machi
, 0x0);
1193 gen_illegal_exception(dc
);
1198 static void dec_comp(DisasContext
*dc
, uint32_t insn
)
1203 op0
= extract32(insn
, 21, 5);
1204 ra
= extract32(insn
, 16, 5);
1205 rb
= extract32(insn
, 11, 5);
1207 tcg_gen_movi_i32(env_btaken
, 0x0);
1208 /* unsigned integers */
1209 tcg_gen_ext32u_tl(cpu_R
[ra
], cpu_R
[ra
]);
1210 tcg_gen_ext32u_tl(cpu_R
[rb
], cpu_R
[rb
]);
1213 case 0x0: /* l.sfeq */
1214 LOG_DIS("l.sfeq r%d, r%d\n", ra
, rb
);
1215 tcg_gen_setcond_tl(TCG_COND_EQ
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1218 case 0x1: /* l.sfne */
1219 LOG_DIS("l.sfne r%d, r%d\n", ra
, rb
);
1220 tcg_gen_setcond_tl(TCG_COND_NE
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1223 case 0x2: /* l.sfgtu */
1224 LOG_DIS("l.sfgtu r%d, r%d\n", ra
, rb
);
1225 tcg_gen_setcond_tl(TCG_COND_GTU
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1228 case 0x3: /* l.sfgeu */
1229 LOG_DIS("l.sfgeu r%d, r%d\n", ra
, rb
);
1230 tcg_gen_setcond_tl(TCG_COND_GEU
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1233 case 0x4: /* l.sfltu */
1234 LOG_DIS("l.sfltu r%d, r%d\n", ra
, rb
);
1235 tcg_gen_setcond_tl(TCG_COND_LTU
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1238 case 0x5: /* l.sfleu */
1239 LOG_DIS("l.sfleu r%d, r%d\n", ra
, rb
);
1240 tcg_gen_setcond_tl(TCG_COND_LEU
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1243 case 0xa: /* l.sfgts */
1244 LOG_DIS("l.sfgts r%d, r%d\n", ra
, rb
);
1245 tcg_gen_setcond_tl(TCG_COND_GT
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1248 case 0xb: /* l.sfges */
1249 LOG_DIS("l.sfges r%d, r%d\n", ra
, rb
);
1250 tcg_gen_setcond_tl(TCG_COND_GE
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1253 case 0xc: /* l.sflts */
1254 LOG_DIS("l.sflts r%d, r%d\n", ra
, rb
);
1255 tcg_gen_setcond_tl(TCG_COND_LT
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1258 case 0xd: /* l.sfles */
1259 LOG_DIS("l.sfles r%d, r%d\n", ra
, rb
);
1260 tcg_gen_setcond_tl(TCG_COND_LE
, env_btaken
, cpu_R
[ra
], cpu_R
[rb
]);
1264 gen_illegal_exception(dc
);
1270 static void dec_compi(DisasContext
*dc
, uint32_t insn
)
1275 op0
= extract32(insn
, 21, 5);
1276 ra
= extract32(insn
, 16, 5);
1277 I16
= extract32(insn
, 0, 16);
1279 tcg_gen_movi_i32(env_btaken
, 0x0);
1280 I16
= sign_extend(I16
, 16);
1283 case 0x0: /* l.sfeqi */
1284 LOG_DIS("l.sfeqi r%d, %d\n", ra
, I16
);
1285 tcg_gen_setcondi_tl(TCG_COND_EQ
, env_btaken
, cpu_R
[ra
], I16
);
1288 case 0x1: /* l.sfnei */
1289 LOG_DIS("l.sfnei r%d, %d\n", ra
, I16
);
1290 tcg_gen_setcondi_tl(TCG_COND_NE
, env_btaken
, cpu_R
[ra
], I16
);
1293 case 0x2: /* l.sfgtui */
1294 LOG_DIS("l.sfgtui r%d, %d\n", ra
, I16
);
1295 tcg_gen_setcondi_tl(TCG_COND_GTU
, env_btaken
, cpu_R
[ra
], I16
);
1298 case 0x3: /* l.sfgeui */
1299 LOG_DIS("l.sfgeui r%d, %d\n", ra
, I16
);
1300 tcg_gen_setcondi_tl(TCG_COND_GEU
, env_btaken
, cpu_R
[ra
], I16
);
1303 case 0x4: /* l.sfltui */
1304 LOG_DIS("l.sfltui r%d, %d\n", ra
, I16
);
1305 tcg_gen_setcondi_tl(TCG_COND_LTU
, env_btaken
, cpu_R
[ra
], I16
);
1308 case 0x5: /* l.sfleui */
1309 LOG_DIS("l.sfleui r%d, %d\n", ra
, I16
);
1310 tcg_gen_setcondi_tl(TCG_COND_LEU
, env_btaken
, cpu_R
[ra
], I16
);
1313 case 0xa: /* l.sfgtsi */
1314 LOG_DIS("l.sfgtsi r%d, %d\n", ra
, I16
);
1315 tcg_gen_setcondi_tl(TCG_COND_GT
, env_btaken
, cpu_R
[ra
], I16
);
1318 case 0xb: /* l.sfgesi */
1319 LOG_DIS("l.sfgesi r%d, %d\n", ra
, I16
);
1320 tcg_gen_setcondi_tl(TCG_COND_GE
, env_btaken
, cpu_R
[ra
], I16
);
1323 case 0xc: /* l.sfltsi */
1324 LOG_DIS("l.sfltsi r%d, %d\n", ra
, I16
);
1325 tcg_gen_setcondi_tl(TCG_COND_LT
, env_btaken
, cpu_R
[ra
], I16
);
1328 case 0xd: /* l.sflesi */
1329 LOG_DIS("l.sflesi r%d, %d\n", ra
, I16
);
1330 tcg_gen_setcondi_tl(TCG_COND_LE
, env_btaken
, cpu_R
[ra
], I16
);
1334 gen_illegal_exception(dc
);
1340 static void dec_sys(DisasContext
*dc
, uint32_t insn
)
1343 #ifdef OPENRISC_DISAS
1346 op0
= extract32(insn
, 16, 8);
1347 #ifdef OPENRISC_DISAS
1348 K16
= extract32(insn
, 0, 16);
1352 case 0x000: /* l.sys */
1353 LOG_DIS("l.sys %d\n", K16
);
1354 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1355 gen_exception(dc
, EXCP_SYSCALL
);
1356 dc
->is_jmp
= DISAS_UPDATE
;
1359 case 0x100: /* l.trap */
1360 LOG_DIS("l.trap %d\n", K16
);
1361 #if defined(CONFIG_USER_ONLY)
1364 if (dc
->mem_idx
== MMU_USER_IDX
) {
1365 gen_illegal_exception(dc
);
1368 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1369 gen_exception(dc
, EXCP_TRAP
);
1373 case 0x300: /* l.csync */
1374 LOG_DIS("l.csync\n");
1375 #if defined(CONFIG_USER_ONLY)
1378 if (dc
->mem_idx
== MMU_USER_IDX
) {
1379 gen_illegal_exception(dc
);
1385 case 0x200: /* l.msync */
1386 LOG_DIS("l.msync\n");
1387 #if defined(CONFIG_USER_ONLY)
1390 if (dc
->mem_idx
== MMU_USER_IDX
) {
1391 gen_illegal_exception(dc
);
1397 case 0x270: /* l.psync */
1398 LOG_DIS("l.psync\n");
1399 #if defined(CONFIG_USER_ONLY)
1402 if (dc
->mem_idx
== MMU_USER_IDX
) {
1403 gen_illegal_exception(dc
);
1410 gen_illegal_exception(dc
);
1415 static void dec_float(DisasContext
*dc
, uint32_t insn
)
1418 uint32_t ra
, rb
, rd
;
1419 op0
= extract32(insn
, 0, 8);
1420 ra
= extract32(insn
, 16, 5);
1421 rb
= extract32(insn
, 11, 5);
1422 rd
= extract32(insn
, 21, 5);
1425 case 0x00: /* lf.add.s */
1426 LOG_DIS("lf.add.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1427 gen_helper_float_add_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1430 case 0x01: /* lf.sub.s */
1431 LOG_DIS("lf.sub.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1432 gen_helper_float_sub_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1436 case 0x02: /* lf.mul.s */
1437 LOG_DIS("lf.mul.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1438 if (ra
!= 0 && rb
!= 0) {
1439 gen_helper_float_mul_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1441 tcg_gen_ori_tl(fpcsr
, fpcsr
, FPCSR_ZF
);
1442 tcg_gen_movi_i32(cpu_R
[rd
], 0x0);
1446 case 0x03: /* lf.div.s */
1447 LOG_DIS("lf.div.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1448 gen_helper_float_div_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1451 case 0x04: /* lf.itof.s */
1452 LOG_DIS("lf.itof r%d, r%d\n", rd
, ra
);
1453 gen_helper_itofs(cpu_R
[rd
], cpu_env
, cpu_R
[ra
]);
1456 case 0x05: /* lf.ftoi.s */
1457 LOG_DIS("lf.ftoi r%d, r%d\n", rd
, ra
);
1458 gen_helper_ftois(cpu_R
[rd
], cpu_env
, cpu_R
[ra
]);
1461 case 0x06: /* lf.rem.s */
1462 LOG_DIS("lf.rem.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1463 gen_helper_float_rem_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1466 case 0x07: /* lf.madd.s */
1467 LOG_DIS("lf.madd.s r%d, r%d, r%d\n", rd
, ra
, rb
);
1468 gen_helper_float_muladd_s(cpu_R
[rd
], cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1471 case 0x08: /* lf.sfeq.s */
1472 LOG_DIS("lf.sfeq.s r%d, r%d\n", ra
, rb
);
1473 gen_helper_float_eq_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1476 case 0x09: /* lf.sfne.s */
1477 LOG_DIS("lf.sfne.s r%d, r%d\n", ra
, rb
);
1478 gen_helper_float_ne_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1481 case 0x0a: /* lf.sfgt.s */
1482 LOG_DIS("lf.sfgt.s r%d, r%d\n", ra
, rb
);
1483 gen_helper_float_gt_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1486 case 0x0b: /* lf.sfge.s */
1487 LOG_DIS("lf.sfge.s r%d, r%d\n", ra
, rb
);
1488 gen_helper_float_ge_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1491 case 0x0c: /* lf.sflt.s */
1492 LOG_DIS("lf.sflt.s r%d, r%d\n", ra
, rb
);
1493 gen_helper_float_lt_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1496 case 0x0d: /* lf.sfle.s */
1497 LOG_DIS("lf.sfle.s r%d, r%d\n", ra
, rb
);
1498 gen_helper_float_le_s(env_btaken
, cpu_env
, cpu_R
[ra
], cpu_R
[rb
]);
1501 /* not used yet, open it when we need or64. */
1502 /*#ifdef TARGET_OPENRISC64
1504 LOG_DIS("lf.add.d r%d, r%d, r%d\n", rd, ra, rb);
1506 gen_helper_float_add_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1510 LOG_DIS("lf.sub.d r%d, r%d, r%d\n", rd, ra, rb);
1512 gen_helper_float_sub_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1516 LOG_DIS("lf.mul.d r%d, r%d, r%d\n", rd, ra, rb);
1518 if (ra != 0 && rb != 0) {
1519 gen_helper_float_mul_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1521 tcg_gen_ori_tl(fpcsr, fpcsr, FPCSR_ZF);
1522 tcg_gen_movi_i64(cpu_R[rd], 0x0);
1527 LOG_DIS("lf.div.d r%d, r%d, r%d\n", rd, ra, rb);
1529 gen_helper_float_div_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1532 case 0x14: lf.itof.d
1533 LOG_DIS("lf.itof r%d, r%d\n", rd, ra);
1535 gen_helper_itofd(cpu_R[rd], cpu_env, cpu_R[ra]);
1538 case 0x15: lf.ftoi.d
1539 LOG_DIS("lf.ftoi r%d, r%d\n", rd, ra);
1541 gen_helper_ftoid(cpu_R[rd], cpu_env, cpu_R[ra]);
1545 LOG_DIS("lf.rem.d r%d, r%d, r%d\n", rd, ra, rb);
1547 gen_helper_float_rem_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1550 case 0x17: lf.madd.d
1551 LOG_DIS("lf.madd.d r%d, r%d, r%d\n", rd, ra, rb);
1553 gen_helper_float_muladd_d(cpu_R[rd], cpu_env, cpu_R[ra], cpu_R[rb]);
1556 case 0x18: lf.sfeq.d
1557 LOG_DIS("lf.sfeq.d r%d, r%d\n", ra, rb);
1559 gen_helper_float_eq_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1562 case 0x1a: lf.sfgt.d
1563 LOG_DIS("lf.sfgt.d r%d, r%d\n", ra, rb);
1565 gen_helper_float_gt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1568 case 0x1b: lf.sfge.d
1569 LOG_DIS("lf.sfge.d r%d, r%d\n", ra, rb);
1571 gen_helper_float_ge_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1574 case 0x19: lf.sfne.d
1575 LOG_DIS("lf.sfne.d r%d, r%d\n", ra, rb);
1577 gen_helper_float_ne_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1580 case 0x1c: lf.sflt.d
1581 LOG_DIS("lf.sflt.d r%d, r%d\n", ra, rb);
1583 gen_helper_float_lt_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1586 case 0x1d: lf.sfle.d
1587 LOG_DIS("lf.sfle.d r%d, r%d\n", ra, rb);
1589 gen_helper_float_le_d(env_btaken, cpu_env, cpu_R[ra], cpu_R[rb]);
1594 gen_illegal_exception(dc
);
1600 static void disas_openrisc_insn(DisasContext
*dc
, OpenRISCCPU
*cpu
)
1604 insn
= cpu_ldl_code(&cpu
->env
, dc
->pc
);
1605 op0
= extract32(insn
, 26, 6);
1617 dec_logic(dc
, insn
);
1621 dec_compi(dc
, insn
);
1629 dec_float(dc
, insn
);
1646 static void check_breakpoint(OpenRISCCPU
*cpu
, DisasContext
*dc
)
1650 if (unlikely(!QTAILQ_EMPTY(&cpu
->env
.breakpoints
))) {
1651 QTAILQ_FOREACH(bp
, &cpu
->env
.breakpoints
, entry
) {
1652 if (bp
->pc
== dc
->pc
) {
1653 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1654 gen_exception(dc
, EXCP_DEBUG
);
1655 dc
->is_jmp
= DISAS_UPDATE
;
1661 static inline void gen_intermediate_code_internal(OpenRISCCPU
*cpu
,
1662 TranslationBlock
*tb
,
1665 struct DisasContext ctx
, *dc
= &ctx
;
1666 uint16_t *gen_opc_end
;
1669 uint32_t next_page_start
;
1673 qemu_log_try_set_file(stderr
);
1678 gen_opc_end
= tcg_ctx
.gen_opc_buf
+ OPC_MAX_SIZE
;
1679 dc
->is_jmp
= DISAS_NEXT
;
1682 dc
->flags
= cpu
->env
.cpucfgr
;
1683 dc
->mem_idx
= cpu_mmu_index(&cpu
->env
);
1684 dc
->synced_flags
= dc
->tb_flags
= tb
->flags
;
1685 dc
->delayed_branch
= !!(dc
->tb_flags
& D_FLAG
);
1686 dc
->singlestep_enabled
= cpu
->env
.singlestep_enabled
;
1687 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
1688 qemu_log("-----------------------------------------\n");
1689 log_cpu_state(&cpu
->env
, 0);
1692 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
1695 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1697 if (max_insns
== 0) {
1698 max_insns
= CF_COUNT_MASK
;
1704 check_breakpoint(cpu
, dc
);
1706 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
1710 gen_opc_instr_start
[k
++] = 0;
1713 gen_opc_pc
[k
] = dc
->pc
;
1714 gen_opc_instr_start
[k
] = 1;
1715 gen_opc_icount
[k
] = num_insns
;
1718 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
1719 tcg_gen_debug_insn_start(dc
->pc
);
1722 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
1725 dc
->ppc
= dc
->pc
- 4;
1726 dc
->npc
= dc
->pc
+ 4;
1727 tcg_gen_movi_tl(cpu_ppc
, dc
->ppc
);
1728 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1729 disas_openrisc_insn(dc
, cpu
);
1733 if (dc
->delayed_branch
) {
1734 dc
->delayed_branch
--;
1735 if (!dc
->delayed_branch
) {
1736 dc
->tb_flags
&= ~D_FLAG
;
1738 tcg_gen_mov_tl(cpu_pc
, jmp_pc
);
1739 tcg_gen_mov_tl(cpu_npc
, jmp_pc
);
1740 tcg_gen_movi_tl(jmp_pc
, 0);
1742 dc
->is_jmp
= DISAS_JUMP
;
1746 } while (!dc
->is_jmp
1747 && tcg_ctx
.gen_opc_ptr
< gen_opc_end
1748 && !cpu
->env
.singlestep_enabled
1750 && (dc
->pc
< next_page_start
)
1751 && num_insns
< max_insns
);
1753 if (tb
->cflags
& CF_LAST_IO
) {
1756 if (dc
->is_jmp
== DISAS_NEXT
) {
1757 dc
->is_jmp
= DISAS_UPDATE
;
1758 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1760 if (unlikely(cpu
->env
.singlestep_enabled
)) {
1761 if (dc
->is_jmp
== DISAS_NEXT
) {
1762 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1764 gen_exception(dc
, EXCP_DEBUG
);
1766 switch (dc
->is_jmp
) {
1768 gen_goto_tb(dc
, 0, dc
->pc
);
1774 /* indicate that the hash table must be used
1775 to find the next TB */
1779 /* nothing more to generate */
1784 gen_icount_end(tb
, num_insns
);
1785 *tcg_ctx
.gen_opc_ptr
= INDEX_op_end
;
1787 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
1790 gen_opc_instr_start
[k
++] = 0;
1793 tb
->size
= dc
->pc
- pc_start
;
1794 tb
->icount
= num_insns
;
1798 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
1800 log_target_disas(&cpu
->env
, pc_start
, dc
->pc
- pc_start
, 0);
1801 qemu_log("\nisize=%d osize=%td\n",
1802 dc
->pc
- pc_start
, tcg_ctx
.gen_opc_ptr
-
1803 tcg_ctx
.gen_opc_buf
);
1808 void gen_intermediate_code(CPUOpenRISCState
*env
, struct TranslationBlock
*tb
)
1810 gen_intermediate_code_internal(openrisc_env_get_cpu(env
), tb
, 0);
1813 void gen_intermediate_code_pc(CPUOpenRISCState
*env
,
1814 struct TranslationBlock
*tb
)
1816 gen_intermediate_code_internal(openrisc_env_get_cpu(env
), tb
, 1);
1819 void cpu_dump_state(CPUOpenRISCState
*env
, FILE *f
,
1820 fprintf_function cpu_fprintf
,
1824 uint32_t *regs
= env
->gpr
;
1825 cpu_fprintf(f
, "PC=%08x\n", env
->pc
);
1826 for (i
= 0; i
< 32; ++i
) {
1827 cpu_fprintf(f
, "R%02d=%08x%c", i
, regs
[i
],
1828 (i
% 4) == 3 ? '\n' : ' ');
1832 void restore_state_to_opc(CPUOpenRISCState
*env
, TranslationBlock
*tb
,
1835 env
->pc
= gen_opc_pc
[pc_pos
];