2 * RISC-V translation routines for the RV64Zfh Standard Extension.
4 * Copyright (c) 2020 Chih-Min Chao, chihmin.chao@sifive.com
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2 or later, as published by the Free Software Foundation.
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
19 #define REQUIRE_ZFH(ctx) do { \
20 if (!ctx->cfg_ptr->ext_zfh) { \
25 #define REQUIRE_ZHINX_OR_ZFH(ctx) do { \
26 if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \
31 #define REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx) do { \
32 if (!ctx->cfg_ptr->ext_zfhmin && !ctx->cfg_ptr->ext_zfbfmin) { \
37 #define REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx) do { \
38 if (!(ctx->cfg_ptr->ext_zfhmin || ctx->cfg_ptr->ext_zhinxmin)) { \
43 static bool trans_flh(DisasContext *ctx, arg_flh *a)
49 REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
52 t0 = get_gpr(ctx, a->rs1, EXT_NONE);
54 TCGv temp = tcg_temp_new();
55 tcg_gen_addi_tl(temp, t0, a->imm);
59 dest = cpu_fpr[a->rd];
60 tcg_gen_qemu_ld_i64(dest, t0, ctx->mem_idx, MO_TEUW);
61 gen_nanbox_h(dest, dest);
67 static bool trans_fsh(DisasContext *ctx, arg_fsh *a)
72 REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
75 t0 = get_gpr(ctx, a->rs1, EXT_NONE);
77 TCGv temp = tcg_temp_new();
78 tcg_gen_addi_tl(temp, t0, a->imm);
82 tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], t0, ctx->mem_idx, MO_TEUW);
87 static bool trans_fmadd_h(DisasContext *ctx, arg_fmadd_h *a)
90 REQUIRE_ZHINX_OR_ZFH(ctx);
92 TCGv_i64 dest = dest_fpr(ctx, a->rd);
93 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
94 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
95 TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
97 gen_set_rm(ctx, a->rm);
98 gen_helper_fmadd_h(dest, tcg_env, src1, src2, src3);
99 gen_set_fpr_hs(ctx, a->rd, dest);
104 static bool trans_fmsub_h(DisasContext *ctx, arg_fmsub_h *a)
107 REQUIRE_ZHINX_OR_ZFH(ctx);
109 TCGv_i64 dest = dest_fpr(ctx, a->rd);
110 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
111 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
112 TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
114 gen_set_rm(ctx, a->rm);
115 gen_helper_fmsub_h(dest, tcg_env, src1, src2, src3);
116 gen_set_fpr_hs(ctx, a->rd, dest);
121 static bool trans_fnmsub_h(DisasContext *ctx, arg_fnmsub_h *a)
124 REQUIRE_ZHINX_OR_ZFH(ctx);
126 TCGv_i64 dest = dest_fpr(ctx, a->rd);
127 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
128 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
129 TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
131 gen_set_rm(ctx, a->rm);
132 gen_helper_fnmsub_h(dest, tcg_env, src1, src2, src3);
133 gen_set_fpr_hs(ctx, a->rd, dest);
138 static bool trans_fnmadd_h(DisasContext *ctx, arg_fnmadd_h *a)
141 REQUIRE_ZHINX_OR_ZFH(ctx);
143 TCGv_i64 dest = dest_fpr(ctx, a->rd);
144 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
145 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
146 TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
148 gen_set_rm(ctx, a->rm);
149 gen_helper_fnmadd_h(dest, tcg_env, src1, src2, src3);
150 gen_set_fpr_hs(ctx, a->rd, dest);
155 static bool trans_fadd_h(DisasContext *ctx, arg_fadd_h *a)
158 REQUIRE_ZHINX_OR_ZFH(ctx);
160 TCGv_i64 dest = dest_fpr(ctx, a->rd);
161 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
162 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
164 gen_set_rm(ctx, a->rm);
165 gen_helper_fadd_h(dest, tcg_env, src1, src2);
166 gen_set_fpr_hs(ctx, a->rd, dest);
171 static bool trans_fsub_h(DisasContext *ctx, arg_fsub_h *a)
174 REQUIRE_ZHINX_OR_ZFH(ctx);
176 TCGv_i64 dest = dest_fpr(ctx, a->rd);
177 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
178 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
180 gen_set_rm(ctx, a->rm);
181 gen_helper_fsub_h(dest, tcg_env, src1, src2);
182 gen_set_fpr_hs(ctx, a->rd, dest);
187 static bool trans_fmul_h(DisasContext *ctx, arg_fmul_h *a)
190 REQUIRE_ZHINX_OR_ZFH(ctx);
192 TCGv_i64 dest = dest_fpr(ctx, a->rd);
193 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
194 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
196 gen_set_rm(ctx, a->rm);
197 gen_helper_fmul_h(dest, tcg_env, src1, src2);
198 gen_set_fpr_hs(ctx, a->rd, dest);
203 static bool trans_fdiv_h(DisasContext *ctx, arg_fdiv_h *a)
206 REQUIRE_ZHINX_OR_ZFH(ctx);
208 TCGv_i64 dest = dest_fpr(ctx, a->rd);
209 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
210 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
212 gen_set_rm(ctx, a->rm);
213 gen_helper_fdiv_h(dest, tcg_env, src1, src2);
214 gen_set_fpr_hs(ctx, a->rd, dest);
219 static bool trans_fsqrt_h(DisasContext *ctx, arg_fsqrt_h *a)
222 REQUIRE_ZHINX_OR_ZFH(ctx);
224 TCGv_i64 dest = dest_fpr(ctx, a->rd);
225 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
227 gen_set_rm(ctx, a->rm);
228 gen_helper_fsqrt_h(dest, tcg_env, src1);
229 gen_set_fpr_hs(ctx, a->rd, dest);
234 static bool trans_fsgnj_h(DisasContext *ctx, arg_fsgnj_h *a)
237 REQUIRE_ZHINX_OR_ZFH(ctx);
239 TCGv_i64 dest = dest_fpr(ctx, a->rd);
240 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
242 if (a->rs1 == a->rs2) { /* FMOV */
243 if (!ctx->cfg_ptr->ext_zfinx) {
244 gen_check_nanbox_h(dest, src1);
246 tcg_gen_ext16s_i64(dest, src1);
249 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
251 if (!ctx->cfg_ptr->ext_zfinx) {
252 TCGv_i64 rs1 = tcg_temp_new_i64();
253 TCGv_i64 rs2 = tcg_temp_new_i64();
254 gen_check_nanbox_h(rs1, src1);
255 gen_check_nanbox_h(rs2, src2);
257 /* This formulation retains the nanboxing of rs2 in normal 'Zfh'. */
258 tcg_gen_deposit_i64(dest, rs2, rs1, 0, 15);
260 tcg_gen_deposit_i64(dest, src2, src1, 0, 15);
261 tcg_gen_ext16s_i64(dest, dest);
264 gen_set_fpr_hs(ctx, a->rd, dest);
269 static bool trans_fsgnjn_h(DisasContext *ctx, arg_fsgnjn_h *a)
271 TCGv_i64 rs1, rs2, mask;
274 REQUIRE_ZHINX_OR_ZFH(ctx);
276 TCGv_i64 dest = dest_fpr(ctx, a->rd);
277 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
279 rs1 = tcg_temp_new_i64();
280 if (!ctx->cfg_ptr->ext_zfinx) {
281 gen_check_nanbox_h(rs1, src1);
283 tcg_gen_mov_i64(rs1, src1);
286 if (a->rs1 == a->rs2) { /* FNEG */
287 tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(15, 1));
289 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
290 rs2 = tcg_temp_new_i64();
292 if (!ctx->cfg_ptr->ext_zfinx) {
293 gen_check_nanbox_h(rs2, src2);
295 tcg_gen_mov_i64(rs2, src2);
299 * Replace bit 15 in rs1 with inverse in rs2.
300 * This formulation retains the nanboxing of rs1.
302 mask = tcg_constant_i64(~MAKE_64BIT_MASK(15, 1));
303 tcg_gen_not_i64(rs2, rs2);
304 tcg_gen_andc_i64(rs2, rs2, mask);
305 tcg_gen_and_i64(dest, mask, rs1);
306 tcg_gen_or_i64(dest, dest, rs2);
308 /* signed-extended instead of nanboxing for result if enable zfinx */
309 if (ctx->cfg_ptr->ext_zfinx) {
310 tcg_gen_ext16s_i64(dest, dest);
316 static bool trans_fsgnjx_h(DisasContext *ctx, arg_fsgnjx_h *a)
321 REQUIRE_ZHINX_OR_ZFH(ctx);
323 TCGv_i64 dest = dest_fpr(ctx, a->rd);
324 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
326 rs1 = tcg_temp_new_i64();
327 if (!ctx->cfg_ptr->ext_zfinx) {
328 gen_check_nanbox_h(rs1, src1);
330 tcg_gen_mov_i64(rs1, src1);
333 if (a->rs1 == a->rs2) { /* FABS */
334 tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(15, 1));
336 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
337 rs2 = tcg_temp_new_i64();
339 if (!ctx->cfg_ptr->ext_zfinx) {
340 gen_check_nanbox_h(rs2, src2);
342 tcg_gen_mov_i64(rs2, src2);
346 * Xor bit 15 in rs1 with that in rs2.
347 * This formulation retains the nanboxing of rs1.
349 tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(15, 1));
350 tcg_gen_xor_i64(dest, rs1, dest);
352 /* signed-extended instead of nanboxing for result if enable zfinx */
353 if (ctx->cfg_ptr->ext_zfinx) {
354 tcg_gen_ext16s_i64(dest, dest);
360 static bool trans_fmin_h(DisasContext *ctx, arg_fmin_h *a)
363 REQUIRE_ZHINX_OR_ZFH(ctx);
365 TCGv_i64 dest = dest_fpr(ctx, a->rd);
366 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
367 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
369 gen_helper_fmin_h(dest, tcg_env, src1, src2);
370 gen_set_fpr_hs(ctx, a->rd, dest);
375 static bool trans_fmax_h(DisasContext *ctx, arg_fmax_h *a)
378 REQUIRE_ZHINX_OR_ZFH(ctx);
380 TCGv_i64 dest = dest_fpr(ctx, a->rd);
381 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
382 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
384 gen_helper_fmax_h(dest, tcg_env, src1, src2);
385 gen_set_fpr_hs(ctx, a->rd, dest);
390 static bool trans_fcvt_s_h(DisasContext *ctx, arg_fcvt_s_h *a)
393 REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
395 TCGv_i64 dest = dest_fpr(ctx, a->rd);
396 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
398 gen_set_rm(ctx, a->rm);
399 gen_helper_fcvt_s_h(dest, tcg_env, src1);
400 gen_set_fpr_hs(ctx, a->rd, dest);
407 static bool trans_fcvt_d_h(DisasContext *ctx, arg_fcvt_d_h *a)
410 REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
411 REQUIRE_ZDINX_OR_D(ctx);
413 TCGv_i64 dest = dest_fpr(ctx, a->rd);
414 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
416 gen_set_rm(ctx, a->rm);
417 gen_helper_fcvt_d_h(dest, tcg_env, src1);
418 gen_set_fpr_d(ctx, a->rd, dest);
425 static bool trans_fcvt_h_s(DisasContext *ctx, arg_fcvt_h_s *a)
428 REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
430 TCGv_i64 dest = dest_fpr(ctx, a->rd);
431 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
433 gen_set_rm(ctx, a->rm);
434 gen_helper_fcvt_h_s(dest, tcg_env, src1);
435 gen_set_fpr_hs(ctx, a->rd, dest);
441 static bool trans_fcvt_h_d(DisasContext *ctx, arg_fcvt_h_d *a)
444 REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
445 REQUIRE_ZDINX_OR_D(ctx);
447 TCGv_i64 dest = dest_fpr(ctx, a->rd);
448 TCGv_i64 src1 = get_fpr_d(ctx, a->rs1);
450 gen_set_rm(ctx, a->rm);
451 gen_helper_fcvt_h_d(dest, tcg_env, src1);
452 gen_set_fpr_hs(ctx, a->rd, dest);
458 static bool trans_feq_h(DisasContext *ctx, arg_feq_h *a)
461 REQUIRE_ZHINX_OR_ZFH(ctx);
463 TCGv dest = dest_gpr(ctx, a->rd);
464 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
465 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
467 gen_helper_feq_h(dest, tcg_env, src1, src2);
468 gen_set_gpr(ctx, a->rd, dest);
472 static bool trans_flt_h(DisasContext *ctx, arg_flt_h *a)
475 REQUIRE_ZHINX_OR_ZFH(ctx);
477 TCGv dest = dest_gpr(ctx, a->rd);
478 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
479 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
481 gen_helper_flt_h(dest, tcg_env, src1, src2);
482 gen_set_gpr(ctx, a->rd, dest);
487 static bool trans_fle_h(DisasContext *ctx, arg_fle_h *a)
490 REQUIRE_ZHINX_OR_ZFH(ctx);
492 TCGv dest = dest_gpr(ctx, a->rd);
493 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
494 TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
496 gen_helper_fle_h(dest, tcg_env, src1, src2);
497 gen_set_gpr(ctx, a->rd, dest);
501 static bool trans_fclass_h(DisasContext *ctx, arg_fclass_h *a)
504 REQUIRE_ZHINX_OR_ZFH(ctx);
506 TCGv dest = dest_gpr(ctx, a->rd);
507 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
509 gen_helper_fclass_h(dest, tcg_env, src1);
510 gen_set_gpr(ctx, a->rd, dest);
514 static bool trans_fcvt_w_h(DisasContext *ctx, arg_fcvt_w_h *a)
517 REQUIRE_ZHINX_OR_ZFH(ctx);
519 TCGv dest = dest_gpr(ctx, a->rd);
520 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
522 gen_set_rm(ctx, a->rm);
523 gen_helper_fcvt_w_h(dest, tcg_env, src1);
524 gen_set_gpr(ctx, a->rd, dest);
528 static bool trans_fcvt_wu_h(DisasContext *ctx, arg_fcvt_wu_h *a)
531 REQUIRE_ZHINX_OR_ZFH(ctx);
533 TCGv dest = dest_gpr(ctx, a->rd);
534 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
536 gen_set_rm(ctx, a->rm);
537 gen_helper_fcvt_wu_h(dest, tcg_env, src1);
538 gen_set_gpr(ctx, a->rd, dest);
542 static bool trans_fcvt_h_w(DisasContext *ctx, arg_fcvt_h_w *a)
545 REQUIRE_ZHINX_OR_ZFH(ctx);
547 TCGv_i64 dest = dest_fpr(ctx, a->rd);
548 TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
550 gen_set_rm(ctx, a->rm);
551 gen_helper_fcvt_h_w(dest, tcg_env, t0);
552 gen_set_fpr_hs(ctx, a->rd, dest);
558 static bool trans_fcvt_h_wu(DisasContext *ctx, arg_fcvt_h_wu *a)
561 REQUIRE_ZHINX_OR_ZFH(ctx);
563 TCGv_i64 dest = dest_fpr(ctx, a->rd);
564 TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
566 gen_set_rm(ctx, a->rm);
567 gen_helper_fcvt_h_wu(dest, tcg_env, t0);
568 gen_set_fpr_hs(ctx, a->rd, dest);
574 static bool trans_fmv_x_h(DisasContext *ctx, arg_fmv_x_h *a)
577 REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
579 TCGv dest = dest_gpr(ctx, a->rd);
581 #if defined(TARGET_RISCV64)
582 /* 16 bits -> 64 bits */
583 tcg_gen_ext16s_tl(dest, cpu_fpr[a->rs1]);
585 /* 16 bits -> 32 bits */
586 tcg_gen_extrl_i64_i32(dest, cpu_fpr[a->rs1]);
587 tcg_gen_ext16s_tl(dest, dest);
590 gen_set_gpr(ctx, a->rd, dest);
594 static bool trans_fmv_h_x(DisasContext *ctx, arg_fmv_h_x *a)
597 REQUIRE_ZFHMIN_OR_ZFBFMIN(ctx);
599 TCGv t0 = get_gpr(ctx, a->rs1, EXT_ZERO);
601 tcg_gen_extu_tl_i64(cpu_fpr[a->rd], t0);
602 gen_nanbox_h(cpu_fpr[a->rd], cpu_fpr[a->rd]);
608 static bool trans_fcvt_l_h(DisasContext *ctx, arg_fcvt_l_h *a)
612 REQUIRE_ZHINX_OR_ZFH(ctx);
614 TCGv dest = dest_gpr(ctx, a->rd);
615 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
617 gen_set_rm(ctx, a->rm);
618 gen_helper_fcvt_l_h(dest, tcg_env, src1);
619 gen_set_gpr(ctx, a->rd, dest);
623 static bool trans_fcvt_lu_h(DisasContext *ctx, arg_fcvt_lu_h *a)
627 REQUIRE_ZHINX_OR_ZFH(ctx);
629 TCGv dest = dest_gpr(ctx, a->rd);
630 TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
632 gen_set_rm(ctx, a->rm);
633 gen_helper_fcvt_lu_h(dest, tcg_env, src1);
634 gen_set_gpr(ctx, a->rd, dest);
638 static bool trans_fcvt_h_l(DisasContext *ctx, arg_fcvt_h_l *a)
642 REQUIRE_ZHINX_OR_ZFH(ctx);
644 TCGv_i64 dest = dest_fpr(ctx, a->rd);
645 TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
647 gen_set_rm(ctx, a->rm);
648 gen_helper_fcvt_h_l(dest, tcg_env, t0);
649 gen_set_fpr_hs(ctx, a->rd, dest);
655 static bool trans_fcvt_h_lu(DisasContext *ctx, arg_fcvt_h_lu *a)
659 REQUIRE_ZHINX_OR_ZFH(ctx);
661 TCGv_i64 dest = dest_fpr(ctx, a->rd);
662 TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
664 gen_set_rm(ctx, a->rm);
665 gen_helper_fcvt_h_lu(dest, tcg_env, t0);
666 gen_set_fpr_hs(ctx, a->rd, dest);