i386: Rewrite check for AVX512 features
[official-gcc.git] / gcc / internal-fn.c
blobe24ed1695159bdf8db1a424499d24265f6d714ab
1 /* Internal functions.
2 Copyright (C) 2011-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "stringpool.h"
30 #include "tree-vrp.h"
31 #include "tree-ssanames.h"
32 #include "expmed.h"
33 #include "memmodel.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "internal-fn.h"
39 #include "stor-layout.h"
40 #include "dojump.h"
41 #include "expr.h"
42 #include "asan.h"
43 #include "ubsan.h"
44 #include "recog.h"
45 #include "builtins.h"
46 #include "optabs-tree.h"
48 /* The names of each internal function, indexed by function number. */
49 const char *const internal_fn_name_array[] = {
50 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
51 #include "internal-fn.def"
52 "<invalid-fn>"
55 /* The ECF_* flags of each internal function, indexed by function number. */
56 const int internal_fn_flags_array[] = {
57 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
58 #include "internal-fn.def"
62 /* Fnspec of each internal function, indexed by function number. */
63 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
65 void
66 init_internal_fns ()
68 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
69 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
70 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
71 #include "internal-fn.def"
72 internal_fn_fnspec_array[IFN_LAST] = 0;
75 /* Create static initializers for the information returned by
76 direct_internal_fn. */
77 #define not_direct { -2, -2, false }
78 #define mask_load_direct { -1, 2, false }
79 #define load_lanes_direct { -1, -1, false }
80 #define mask_store_direct { 3, 2, false }
81 #define store_lanes_direct { 0, 0, false }
82 #define unary_direct { 0, 0, true }
83 #define binary_direct { 0, 0, true }
85 const direct_internal_fn_info direct_internal_fn_array[IFN_LAST + 1] = {
86 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) not_direct,
87 #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) TYPE##_direct,
88 #include "internal-fn.def"
89 not_direct
92 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
93 for load-lanes-style optab OPTAB, or CODE_FOR_nothing if none. */
95 static enum insn_code
96 get_multi_vector_move (tree array_type, convert_optab optab)
98 machine_mode imode;
99 machine_mode vmode;
101 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
102 imode = TYPE_MODE (array_type);
103 vmode = TYPE_MODE (TREE_TYPE (array_type));
105 return convert_optab_handler (optab, imode, vmode);
108 /* Expand LOAD_LANES call STMT using optab OPTAB. */
110 static void
111 expand_load_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
113 struct expand_operand ops[2];
114 tree type, lhs, rhs;
115 rtx target, mem;
117 lhs = gimple_call_lhs (stmt);
118 rhs = gimple_call_arg (stmt, 0);
119 type = TREE_TYPE (lhs);
121 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
122 mem = expand_normal (rhs);
124 gcc_assert (MEM_P (mem));
125 PUT_MODE (mem, TYPE_MODE (type));
127 create_output_operand (&ops[0], target, TYPE_MODE (type));
128 create_fixed_operand (&ops[1], mem);
129 expand_insn (get_multi_vector_move (type, optab), 2, ops);
132 /* Expand STORE_LANES call STMT using optab OPTAB. */
134 static void
135 expand_store_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
137 struct expand_operand ops[2];
138 tree type, lhs, rhs;
139 rtx target, reg;
141 lhs = gimple_call_lhs (stmt);
142 rhs = gimple_call_arg (stmt, 0);
143 type = TREE_TYPE (rhs);
145 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
146 reg = expand_normal (rhs);
148 gcc_assert (MEM_P (target));
149 PUT_MODE (target, TYPE_MODE (type));
151 create_fixed_operand (&ops[0], target);
152 create_input_operand (&ops[1], reg, TYPE_MODE (type));
153 expand_insn (get_multi_vector_move (type, optab), 2, ops);
156 static void
157 expand_ANNOTATE (internal_fn, gcall *)
159 gcc_unreachable ();
162 /* This should get expanded in omp_device_lower pass. */
164 static void
165 expand_GOMP_USE_SIMT (internal_fn, gcall *)
167 gcc_unreachable ();
170 /* This should get expanded in omp_device_lower pass. */
172 static void
173 expand_GOMP_SIMT_ENTER (internal_fn, gcall *)
175 gcc_unreachable ();
178 /* Allocate per-lane storage and begin non-uniform execution region. */
180 static void
181 expand_GOMP_SIMT_ENTER_ALLOC (internal_fn, gcall *stmt)
183 rtx target;
184 tree lhs = gimple_call_lhs (stmt);
185 if (lhs)
186 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
187 else
188 target = gen_reg_rtx (Pmode);
189 rtx size = expand_normal (gimple_call_arg (stmt, 0));
190 rtx align = expand_normal (gimple_call_arg (stmt, 1));
191 struct expand_operand ops[3];
192 create_output_operand (&ops[0], target, Pmode);
193 create_input_operand (&ops[1], size, Pmode);
194 create_input_operand (&ops[2], align, Pmode);
195 gcc_assert (targetm.have_omp_simt_enter ());
196 expand_insn (targetm.code_for_omp_simt_enter, 3, ops);
199 /* Deallocate per-lane storage and leave non-uniform execution region. */
201 static void
202 expand_GOMP_SIMT_EXIT (internal_fn, gcall *stmt)
204 gcc_checking_assert (!gimple_call_lhs (stmt));
205 rtx arg = expand_normal (gimple_call_arg (stmt, 0));
206 struct expand_operand ops[1];
207 create_input_operand (&ops[0], arg, Pmode);
208 gcc_assert (targetm.have_omp_simt_exit ());
209 expand_insn (targetm.code_for_omp_simt_exit, 1, ops);
212 /* Lane index on SIMT targets: thread index in the warp on NVPTX. On targets
213 without SIMT execution this should be expanded in omp_device_lower pass. */
215 static void
216 expand_GOMP_SIMT_LANE (internal_fn, gcall *stmt)
218 tree lhs = gimple_call_lhs (stmt);
219 if (!lhs)
220 return;
222 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
223 gcc_assert (targetm.have_omp_simt_lane ());
224 emit_insn (targetm.gen_omp_simt_lane (target));
227 /* This should get expanded in omp_device_lower pass. */
229 static void
230 expand_GOMP_SIMT_VF (internal_fn, gcall *)
232 gcc_unreachable ();
235 /* Lane index of the first SIMT lane that supplies a non-zero argument.
236 This is a SIMT counterpart to GOMP_SIMD_LAST_LANE, used to represent the
237 lane that executed the last iteration for handling OpenMP lastprivate. */
239 static void
240 expand_GOMP_SIMT_LAST_LANE (internal_fn, gcall *stmt)
242 tree lhs = gimple_call_lhs (stmt);
243 if (!lhs)
244 return;
246 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
247 rtx cond = expand_normal (gimple_call_arg (stmt, 0));
248 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
249 struct expand_operand ops[2];
250 create_output_operand (&ops[0], target, mode);
251 create_input_operand (&ops[1], cond, mode);
252 gcc_assert (targetm.have_omp_simt_last_lane ());
253 expand_insn (targetm.code_for_omp_simt_last_lane, 2, ops);
256 /* Non-transparent predicate used in SIMT lowering of OpenMP "ordered". */
258 static void
259 expand_GOMP_SIMT_ORDERED_PRED (internal_fn, gcall *stmt)
261 tree lhs = gimple_call_lhs (stmt);
262 if (!lhs)
263 return;
265 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
266 rtx ctr = expand_normal (gimple_call_arg (stmt, 0));
267 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
268 struct expand_operand ops[2];
269 create_output_operand (&ops[0], target, mode);
270 create_input_operand (&ops[1], ctr, mode);
271 gcc_assert (targetm.have_omp_simt_ordered ());
272 expand_insn (targetm.code_for_omp_simt_ordered, 2, ops);
275 /* "Or" boolean reduction across SIMT lanes: return non-zero in all lanes if
276 any lane supplies a non-zero argument. */
278 static void
279 expand_GOMP_SIMT_VOTE_ANY (internal_fn, gcall *stmt)
281 tree lhs = gimple_call_lhs (stmt);
282 if (!lhs)
283 return;
285 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
286 rtx cond = expand_normal (gimple_call_arg (stmt, 0));
287 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
288 struct expand_operand ops[2];
289 create_output_operand (&ops[0], target, mode);
290 create_input_operand (&ops[1], cond, mode);
291 gcc_assert (targetm.have_omp_simt_vote_any ());
292 expand_insn (targetm.code_for_omp_simt_vote_any, 2, ops);
295 /* Exchange between SIMT lanes with a "butterfly" pattern: source lane index
296 is destination lane index XOR given offset. */
298 static void
299 expand_GOMP_SIMT_XCHG_BFLY (internal_fn, gcall *stmt)
301 tree lhs = gimple_call_lhs (stmt);
302 if (!lhs)
303 return;
305 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
306 rtx src = expand_normal (gimple_call_arg (stmt, 0));
307 rtx idx = expand_normal (gimple_call_arg (stmt, 1));
308 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
309 struct expand_operand ops[3];
310 create_output_operand (&ops[0], target, mode);
311 create_input_operand (&ops[1], src, mode);
312 create_input_operand (&ops[2], idx, SImode);
313 gcc_assert (targetm.have_omp_simt_xchg_bfly ());
314 expand_insn (targetm.code_for_omp_simt_xchg_bfly, 3, ops);
317 /* Exchange between SIMT lanes according to given source lane index. */
319 static void
320 expand_GOMP_SIMT_XCHG_IDX (internal_fn, gcall *stmt)
322 tree lhs = gimple_call_lhs (stmt);
323 if (!lhs)
324 return;
326 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
327 rtx src = expand_normal (gimple_call_arg (stmt, 0));
328 rtx idx = expand_normal (gimple_call_arg (stmt, 1));
329 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
330 struct expand_operand ops[3];
331 create_output_operand (&ops[0], target, mode);
332 create_input_operand (&ops[1], src, mode);
333 create_input_operand (&ops[2], idx, SImode);
334 gcc_assert (targetm.have_omp_simt_xchg_idx ());
335 expand_insn (targetm.code_for_omp_simt_xchg_idx, 3, ops);
338 /* This should get expanded in adjust_simduid_builtins. */
340 static void
341 expand_GOMP_SIMD_LANE (internal_fn, gcall *)
343 gcc_unreachable ();
346 /* This should get expanded in adjust_simduid_builtins. */
348 static void
349 expand_GOMP_SIMD_VF (internal_fn, gcall *)
351 gcc_unreachable ();
354 /* This should get expanded in adjust_simduid_builtins. */
356 static void
357 expand_GOMP_SIMD_LAST_LANE (internal_fn, gcall *)
359 gcc_unreachable ();
362 /* This should get expanded in adjust_simduid_builtins. */
364 static void
365 expand_GOMP_SIMD_ORDERED_START (internal_fn, gcall *)
367 gcc_unreachable ();
370 /* This should get expanded in adjust_simduid_builtins. */
372 static void
373 expand_GOMP_SIMD_ORDERED_END (internal_fn, gcall *)
375 gcc_unreachable ();
378 /* This should get expanded in the sanopt pass. */
380 static void
381 expand_UBSAN_NULL (internal_fn, gcall *)
383 gcc_unreachable ();
386 /* This should get expanded in the sanopt pass. */
388 static void
389 expand_UBSAN_BOUNDS (internal_fn, gcall *)
391 gcc_unreachable ();
394 /* This should get expanded in the sanopt pass. */
396 static void
397 expand_UBSAN_VPTR (internal_fn, gcall *)
399 gcc_unreachable ();
402 /* This should get expanded in the sanopt pass. */
404 static void
405 expand_UBSAN_PTR (internal_fn, gcall *)
407 gcc_unreachable ();
410 /* This should get expanded in the sanopt pass. */
412 static void
413 expand_UBSAN_OBJECT_SIZE (internal_fn, gcall *)
415 gcc_unreachable ();
418 /* This should get expanded in the sanopt pass. */
420 static void
421 expand_ASAN_CHECK (internal_fn, gcall *)
423 gcc_unreachable ();
426 /* This should get expanded in the sanopt pass. */
428 static void
429 expand_ASAN_MARK (internal_fn, gcall *)
431 gcc_unreachable ();
434 /* This should get expanded in the sanopt pass. */
436 static void
437 expand_ASAN_POISON (internal_fn, gcall *)
439 gcc_unreachable ();
442 /* This should get expanded in the sanopt pass. */
444 static void
445 expand_ASAN_POISON_USE (internal_fn, gcall *)
447 gcc_unreachable ();
450 /* This should get expanded in the tsan pass. */
452 static void
453 expand_TSAN_FUNC_EXIT (internal_fn, gcall *)
455 gcc_unreachable ();
458 /* This should get expanded in the lower pass. */
460 static void
461 expand_FALLTHROUGH (internal_fn, gcall *call)
463 error_at (gimple_location (call),
464 "invalid use of attribute %<fallthrough%>");
467 /* Return minimum precision needed to represent all values
468 of ARG in SIGNed integral type. */
470 static int
471 get_min_precision (tree arg, signop sign)
473 int prec = TYPE_PRECISION (TREE_TYPE (arg));
474 int cnt = 0;
475 signop orig_sign = sign;
476 if (TREE_CODE (arg) == INTEGER_CST)
478 int p;
479 if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
481 widest_int w = wi::to_widest (arg);
482 w = wi::ext (w, prec, sign);
483 p = wi::min_precision (w, sign);
485 else
486 p = wi::min_precision (arg, sign);
487 return MIN (p, prec);
489 while (CONVERT_EXPR_P (arg)
490 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
491 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
493 arg = TREE_OPERAND (arg, 0);
494 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
496 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
497 sign = UNSIGNED;
498 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
499 return prec + (orig_sign != sign);
500 prec = TYPE_PRECISION (TREE_TYPE (arg));
502 if (++cnt > 30)
503 return prec + (orig_sign != sign);
505 if (TREE_CODE (arg) != SSA_NAME)
506 return prec + (orig_sign != sign);
507 wide_int arg_min, arg_max;
508 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
510 gimple *g = SSA_NAME_DEF_STMT (arg);
511 if (is_gimple_assign (g)
512 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
514 tree t = gimple_assign_rhs1 (g);
515 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
516 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
518 arg = t;
519 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
521 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
522 sign = UNSIGNED;
523 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
524 return prec + (orig_sign != sign);
525 prec = TYPE_PRECISION (TREE_TYPE (arg));
527 if (++cnt > 30)
528 return prec + (orig_sign != sign);
529 continue;
532 return prec + (orig_sign != sign);
534 if (sign == TYPE_SIGN (TREE_TYPE (arg)))
536 int p1 = wi::min_precision (arg_min, sign);
537 int p2 = wi::min_precision (arg_max, sign);
538 p1 = MAX (p1, p2);
539 prec = MIN (prec, p1);
541 else if (sign == UNSIGNED && !wi::neg_p (arg_min, SIGNED))
543 int p = wi::min_precision (arg_max, UNSIGNED);
544 prec = MIN (prec, p);
546 return prec + (orig_sign != sign);
549 /* Helper for expand_*_overflow. Set the __imag__ part to true
550 (1 except for signed:1 type, in which case store -1). */
552 static void
553 expand_arith_set_overflow (tree lhs, rtx target)
555 if (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs))) == 1
556 && !TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs))))
557 write_complex_part (target, constm1_rtx, true);
558 else
559 write_complex_part (target, const1_rtx, true);
562 /* Helper for expand_*_overflow. Store RES into the __real__ part
563 of TARGET. If RES has larger MODE than __real__ part of TARGET,
564 set the __imag__ part to 1 if RES doesn't fit into it. Similarly
565 if LHS has smaller precision than its mode. */
567 static void
568 expand_arith_overflow_result_store (tree lhs, rtx target,
569 machine_mode mode, rtx res)
571 machine_mode tgtmode = GET_MODE_INNER (GET_MODE (target));
572 rtx lres = res;
573 if (tgtmode != mode)
575 rtx_code_label *done_label = gen_label_rtx ();
576 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
577 lres = convert_modes (tgtmode, mode, res, uns);
578 gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
579 do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
580 EQ, true, mode, NULL_RTX, NULL, done_label,
581 profile_probability::very_likely ());
582 expand_arith_set_overflow (lhs, target);
583 emit_label (done_label);
585 int prec = TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs)));
586 int tgtprec = GET_MODE_PRECISION (tgtmode);
587 if (prec < tgtprec)
589 rtx_code_label *done_label = gen_label_rtx ();
590 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
591 res = lres;
592 if (uns)
594 rtx mask
595 = immed_wide_int_const (wi::shifted_mask (0, prec, false, tgtprec),
596 tgtmode);
597 lres = expand_simple_binop (tgtmode, AND, res, mask, NULL_RTX,
598 true, OPTAB_LIB_WIDEN);
600 else
602 lres = expand_shift (LSHIFT_EXPR, tgtmode, res, tgtprec - prec,
603 NULL_RTX, 1);
604 lres = expand_shift (RSHIFT_EXPR, tgtmode, lres, tgtprec - prec,
605 NULL_RTX, 0);
607 do_compare_rtx_and_jump (res, lres,
608 EQ, true, tgtmode, NULL_RTX, NULL, done_label,
609 profile_probability::very_likely ());
610 expand_arith_set_overflow (lhs, target);
611 emit_label (done_label);
613 write_complex_part (target, lres, false);
616 /* Helper for expand_*_overflow. Store RES into TARGET. */
618 static void
619 expand_ubsan_result_store (rtx target, rtx res)
621 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
622 /* If this is a scalar in a register that is stored in a wider mode
623 than the declared mode, compute the result into its declared mode
624 and then convert to the wider mode. Our value is the computed
625 expression. */
626 convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
627 else
628 emit_move_insn (target, res);
631 /* Add sub/add overflow checking to the statement STMT.
632 CODE says whether the operation is +, or -. */
634 static void
635 expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
636 tree arg0, tree arg1, bool unsr_p, bool uns0_p,
637 bool uns1_p, bool is_ubsan, tree *datap)
639 rtx res, target = NULL_RTX;
640 tree fn;
641 rtx_code_label *done_label = gen_label_rtx ();
642 rtx_code_label *do_error = gen_label_rtx ();
643 do_pending_stack_adjust ();
644 rtx op0 = expand_normal (arg0);
645 rtx op1 = expand_normal (arg1);
646 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
647 int prec = GET_MODE_PRECISION (mode);
648 rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
649 bool do_xor = false;
651 if (is_ubsan)
652 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
654 if (lhs)
656 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
657 if (!is_ubsan)
658 write_complex_part (target, const0_rtx, true);
661 /* We assume both operands and result have the same precision
662 here (GET_MODE_BITSIZE (mode)), S stands for signed type
663 with that precision, U for unsigned type with that precision,
664 sgn for unsigned most significant bit in that precision.
665 s1 is signed first operand, u1 is unsigned first operand,
666 s2 is signed second operand, u2 is unsigned second operand,
667 sr is signed result, ur is unsigned result and the following
668 rules say how to compute result (which is always result of
669 the operands as if both were unsigned, cast to the right
670 signedness) and how to compute whether operation overflowed.
672 s1 + s2 -> sr
673 res = (S) ((U) s1 + (U) s2)
674 ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
675 s1 - s2 -> sr
676 res = (S) ((U) s1 - (U) s2)
677 ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
678 u1 + u2 -> ur
679 res = u1 + u2
680 ovf = res < u1 (or jump on carry, but RTL opts will handle it)
681 u1 - u2 -> ur
682 res = u1 - u2
683 ovf = res > u1 (or jump on carry, but RTL opts will handle it)
684 s1 + u2 -> sr
685 res = (S) ((U) s1 + u2)
686 ovf = ((U) res ^ sgn) < u2
687 s1 + u2 -> ur
688 t1 = (S) (u2 ^ sgn)
689 t2 = s1 + t1
690 res = (U) t2 ^ sgn
691 ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
692 s1 - u2 -> sr
693 res = (S) ((U) s1 - u2)
694 ovf = u2 > ((U) s1 ^ sgn)
695 s1 - u2 -> ur
696 res = (U) s1 - u2
697 ovf = s1 < 0 || u2 > (U) s1
698 u1 - s2 -> sr
699 res = u1 - (U) s2
700 ovf = u1 >= ((U) s2 ^ sgn)
701 u1 - s2 -> ur
702 t1 = u1 ^ sgn
703 t2 = t1 - (U) s2
704 res = t2 ^ sgn
705 ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
706 s1 + s2 -> ur
707 res = (U) s1 + (U) s2
708 ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
709 u1 + u2 -> sr
710 res = (S) (u1 + u2)
711 ovf = (U) res < u2 || res < 0
712 u1 - u2 -> sr
713 res = (S) (u1 - u2)
714 ovf = u1 >= u2 ? res < 0 : res >= 0
715 s1 - s2 -> ur
716 res = (U) s1 - (U) s2
717 ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0) */
719 if (code == PLUS_EXPR && uns0_p && !uns1_p)
721 /* PLUS_EXPR is commutative, if operand signedness differs,
722 canonicalize to the first operand being signed and second
723 unsigned to simplify following code. */
724 std::swap (op0, op1);
725 std::swap (arg0, arg1);
726 uns0_p = false;
727 uns1_p = true;
730 /* u1 +- u2 -> ur */
731 if (uns0_p && uns1_p && unsr_p)
733 insn_code icode = optab_handler (code == PLUS_EXPR ? uaddv4_optab
734 : usubv4_optab, mode);
735 if (icode != CODE_FOR_nothing)
737 struct expand_operand ops[4];
738 rtx_insn *last = get_last_insn ();
740 res = gen_reg_rtx (mode);
741 create_output_operand (&ops[0], res, mode);
742 create_input_operand (&ops[1], op0, mode);
743 create_input_operand (&ops[2], op1, mode);
744 create_fixed_operand (&ops[3], do_error);
745 if (maybe_expand_insn (icode, 4, ops))
747 last = get_last_insn ();
748 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
749 && JUMP_P (last)
750 && any_condjump_p (last)
751 && !find_reg_note (last, REG_BR_PROB, 0))
752 add_reg_br_prob_note (last,
753 profile_probability::very_unlikely ());
754 emit_jump (done_label);
755 goto do_error_label;
758 delete_insns_since (last);
761 /* Compute the operation. On RTL level, the addition is always
762 unsigned. */
763 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
764 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
765 rtx tem = op0;
766 /* For PLUS_EXPR, the operation is commutative, so we can pick
767 operand to compare against. For prec <= BITS_PER_WORD, I think
768 preferring REG operand is better over CONST_INT, because
769 the CONST_INT might enlarge the instruction or CSE would need
770 to figure out we'd already loaded it into a register before.
771 For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
772 as then the multi-word comparison can be perhaps simplified. */
773 if (code == PLUS_EXPR
774 && (prec <= BITS_PER_WORD
775 ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
776 : CONST_SCALAR_INT_P (op1)))
777 tem = op1;
778 do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
779 true, mode, NULL_RTX, NULL, done_label,
780 profile_probability::very_likely ());
781 goto do_error_label;
784 /* s1 +- u2 -> sr */
785 if (!uns0_p && uns1_p && !unsr_p)
787 /* Compute the operation. On RTL level, the addition is always
788 unsigned. */
789 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
790 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
791 rtx tem = expand_binop (mode, add_optab,
792 code == PLUS_EXPR ? res : op0, sgn,
793 NULL_RTX, false, OPTAB_LIB_WIDEN);
794 do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL,
795 done_label, profile_probability::very_likely ());
796 goto do_error_label;
799 /* s1 + u2 -> ur */
800 if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
802 op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
803 OPTAB_LIB_WIDEN);
804 /* As we've changed op1, we have to avoid using the value range
805 for the original argument. */
806 arg1 = error_mark_node;
807 do_xor = true;
808 goto do_signed;
811 /* u1 - s2 -> ur */
812 if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
814 op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
815 OPTAB_LIB_WIDEN);
816 /* As we've changed op0, we have to avoid using the value range
817 for the original argument. */
818 arg0 = error_mark_node;
819 do_xor = true;
820 goto do_signed;
823 /* s1 - u2 -> ur */
824 if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
826 /* Compute the operation. On RTL level, the addition is always
827 unsigned. */
828 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
829 OPTAB_LIB_WIDEN);
830 int pos_neg = get_range_pos_neg (arg0);
831 if (pos_neg == 2)
832 /* If ARG0 is known to be always negative, this is always overflow. */
833 emit_jump (do_error);
834 else if (pos_neg == 3)
835 /* If ARG0 is not known to be always positive, check at runtime. */
836 do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
837 NULL, do_error, profile_probability::very_unlikely ());
838 do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL,
839 done_label, profile_probability::very_likely ());
840 goto do_error_label;
843 /* u1 - s2 -> sr */
844 if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
846 /* Compute the operation. On RTL level, the addition is always
847 unsigned. */
848 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
849 OPTAB_LIB_WIDEN);
850 rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
851 OPTAB_LIB_WIDEN);
852 do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL,
853 done_label, profile_probability::very_likely ());
854 goto do_error_label;
857 /* u1 + u2 -> sr */
858 if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
860 /* Compute the operation. On RTL level, the addition is always
861 unsigned. */
862 res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
863 OPTAB_LIB_WIDEN);
864 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
865 NULL, do_error, profile_probability::very_unlikely ());
866 rtx tem = op1;
867 /* The operation is commutative, so we can pick operand to compare
868 against. For prec <= BITS_PER_WORD, I think preferring REG operand
869 is better over CONST_INT, because the CONST_INT might enlarge the
870 instruction or CSE would need to figure out we'd already loaded it
871 into a register before. For prec > BITS_PER_WORD, I think CONST_INT
872 might be more beneficial, as then the multi-word comparison can be
873 perhaps simplified. */
874 if (prec <= BITS_PER_WORD
875 ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
876 : CONST_SCALAR_INT_P (op0))
877 tem = op0;
878 do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL,
879 done_label, profile_probability::very_likely ());
880 goto do_error_label;
883 /* s1 +- s2 -> ur */
884 if (!uns0_p && !uns1_p && unsr_p)
886 /* Compute the operation. On RTL level, the addition is always
887 unsigned. */
888 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
889 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
890 int pos_neg = get_range_pos_neg (arg1);
891 if (code == PLUS_EXPR)
893 int pos_neg0 = get_range_pos_neg (arg0);
894 if (pos_neg0 != 3 && pos_neg == 3)
896 std::swap (op0, op1);
897 pos_neg = pos_neg0;
900 rtx tem;
901 if (pos_neg != 3)
903 tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
904 ? and_optab : ior_optab,
905 op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
906 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL,
907 NULL, done_label, profile_probability::very_likely ());
909 else
911 rtx_code_label *do_ior_label = gen_label_rtx ();
912 do_compare_rtx_and_jump (op1, const0_rtx,
913 code == MINUS_EXPR ? GE : LT, false, mode,
914 NULL_RTX, NULL, do_ior_label,
915 profile_probability::even ());
916 tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
917 OPTAB_LIB_WIDEN);
918 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
919 NULL, done_label, profile_probability::very_likely ());
920 emit_jump (do_error);
921 emit_label (do_ior_label);
922 tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
923 OPTAB_LIB_WIDEN);
924 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
925 NULL, done_label, profile_probability::very_likely ());
927 goto do_error_label;
930 /* u1 - u2 -> sr */
931 if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
933 /* Compute the operation. On RTL level, the addition is always
934 unsigned. */
935 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
936 OPTAB_LIB_WIDEN);
937 rtx_code_label *op0_geu_op1 = gen_label_rtx ();
938 do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL,
939 op0_geu_op1, profile_probability::even ());
940 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
941 NULL, done_label, profile_probability::very_likely ());
942 emit_jump (do_error);
943 emit_label (op0_geu_op1);
944 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
945 NULL, done_label, profile_probability::very_likely ());
946 goto do_error_label;
949 gcc_assert (!uns0_p && !uns1_p && !unsr_p);
951 /* s1 +- s2 -> sr */
952 do_signed:
954 insn_code icode = optab_handler (code == PLUS_EXPR ? addv4_optab
955 : subv4_optab, mode);
956 if (icode != CODE_FOR_nothing)
958 struct expand_operand ops[4];
959 rtx_insn *last = get_last_insn ();
961 res = gen_reg_rtx (mode);
962 create_output_operand (&ops[0], res, mode);
963 create_input_operand (&ops[1], op0, mode);
964 create_input_operand (&ops[2], op1, mode);
965 create_fixed_operand (&ops[3], do_error);
966 if (maybe_expand_insn (icode, 4, ops))
968 last = get_last_insn ();
969 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
970 && JUMP_P (last)
971 && any_condjump_p (last)
972 && !find_reg_note (last, REG_BR_PROB, 0))
973 add_reg_br_prob_note (last,
974 profile_probability::very_unlikely ());
975 emit_jump (done_label);
976 goto do_error_label;
979 delete_insns_since (last);
982 /* Compute the operation. On RTL level, the addition is always
983 unsigned. */
984 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
985 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
987 /* If we can prove that one of the arguments (for MINUS_EXPR only
988 the second operand, as subtraction is not commutative) is always
989 non-negative or always negative, we can do just one comparison
990 and conditional jump. */
991 int pos_neg = get_range_pos_neg (arg1);
992 if (code == PLUS_EXPR)
994 int pos_neg0 = get_range_pos_neg (arg0);
995 if (pos_neg0 != 3 && pos_neg == 3)
997 std::swap (op0, op1);
998 pos_neg = pos_neg0;
1002 /* Addition overflows if and only if the two operands have the same sign,
1003 and the result has the opposite sign. Subtraction overflows if and
1004 only if the two operands have opposite sign, and the subtrahend has
1005 the same sign as the result. Here 0 is counted as positive. */
1006 if (pos_neg == 3)
1008 /* Compute op0 ^ op1 (operands have opposite sign). */
1009 rtx op_xor = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1010 OPTAB_LIB_WIDEN);
1012 /* Compute res ^ op1 (result and 2nd operand have opposite sign). */
1013 rtx res_xor = expand_binop (mode, xor_optab, res, op1, NULL_RTX, false,
1014 OPTAB_LIB_WIDEN);
1016 rtx tem;
1017 if (code == PLUS_EXPR)
1019 /* Compute (res ^ op1) & ~(op0 ^ op1). */
1020 tem = expand_unop (mode, one_cmpl_optab, op_xor, NULL_RTX, false);
1021 tem = expand_binop (mode, and_optab, res_xor, tem, NULL_RTX, false,
1022 OPTAB_LIB_WIDEN);
1024 else
1026 /* Compute (op0 ^ op1) & ~(res ^ op1). */
1027 tem = expand_unop (mode, one_cmpl_optab, res_xor, NULL_RTX, false);
1028 tem = expand_binop (mode, and_optab, op_xor, tem, NULL_RTX, false,
1029 OPTAB_LIB_WIDEN);
1032 /* No overflow if the result has bit sign cleared. */
1033 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1034 NULL, done_label, profile_probability::very_likely ());
1037 /* Compare the result of the operation with the first operand.
1038 No overflow for addition if second operand is positive and result
1039 is larger or second operand is negative and result is smaller.
1040 Likewise for subtraction with sign of second operand flipped. */
1041 else
1042 do_compare_rtx_and_jump (res, op0,
1043 (pos_neg == 1) ^ (code == MINUS_EXPR) ? GE : LE,
1044 false, mode, NULL_RTX, NULL, done_label,
1045 profile_probability::very_likely ());
1048 do_error_label:
1049 emit_label (do_error);
1050 if (is_ubsan)
1052 /* Expand the ubsan builtin call. */
1053 push_temp_slots ();
1054 fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
1055 arg0, arg1, datap);
1056 expand_normal (fn);
1057 pop_temp_slots ();
1058 do_pending_stack_adjust ();
1060 else if (lhs)
1061 expand_arith_set_overflow (lhs, target);
1063 /* We're done. */
1064 emit_label (done_label);
1066 if (lhs)
1068 if (is_ubsan)
1069 expand_ubsan_result_store (target, res);
1070 else
1072 if (do_xor)
1073 res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
1074 OPTAB_LIB_WIDEN);
1076 expand_arith_overflow_result_store (lhs, target, mode, res);
1081 /* Add negate overflow checking to the statement STMT. */
1083 static void
1084 expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
1085 tree *datap)
1087 rtx res, op1;
1088 tree fn;
1089 rtx_code_label *done_label, *do_error;
1090 rtx target = NULL_RTX;
1092 done_label = gen_label_rtx ();
1093 do_error = gen_label_rtx ();
1095 do_pending_stack_adjust ();
1096 op1 = expand_normal (arg1);
1098 machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
1099 if (lhs)
1101 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1102 if (!is_ubsan)
1103 write_complex_part (target, const0_rtx, true);
1106 enum insn_code icode = optab_handler (negv3_optab, mode);
1107 if (icode != CODE_FOR_nothing)
1109 struct expand_operand ops[3];
1110 rtx_insn *last = get_last_insn ();
1112 res = gen_reg_rtx (mode);
1113 create_output_operand (&ops[0], res, mode);
1114 create_input_operand (&ops[1], op1, mode);
1115 create_fixed_operand (&ops[2], do_error);
1116 if (maybe_expand_insn (icode, 3, ops))
1118 last = get_last_insn ();
1119 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1120 && JUMP_P (last)
1121 && any_condjump_p (last)
1122 && !find_reg_note (last, REG_BR_PROB, 0))
1123 add_reg_br_prob_note (last,
1124 profile_probability::very_unlikely ());
1125 emit_jump (done_label);
1127 else
1129 delete_insns_since (last);
1130 icode = CODE_FOR_nothing;
1134 if (icode == CODE_FOR_nothing)
1136 /* Compute the operation. On RTL level, the addition is always
1137 unsigned. */
1138 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1140 /* Compare the operand with the most negative value. */
1141 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
1142 do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL,
1143 done_label, profile_probability::very_likely ());
1146 emit_label (do_error);
1147 if (is_ubsan)
1149 /* Expand the ubsan builtin call. */
1150 push_temp_slots ();
1151 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
1152 arg1, NULL_TREE, datap);
1153 expand_normal (fn);
1154 pop_temp_slots ();
1155 do_pending_stack_adjust ();
1157 else if (lhs)
1158 expand_arith_set_overflow (lhs, target);
1160 /* We're done. */
1161 emit_label (done_label);
1163 if (lhs)
1165 if (is_ubsan)
1166 expand_ubsan_result_store (target, res);
1167 else
1168 expand_arith_overflow_result_store (lhs, target, mode, res);
1172 /* Add mul overflow checking to the statement STMT. */
1174 static void
1175 expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
1176 bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan,
1177 tree *datap)
1179 rtx res, op0, op1;
1180 tree fn, type;
1181 rtx_code_label *done_label, *do_error;
1182 rtx target = NULL_RTX;
1183 signop sign;
1184 enum insn_code icode;
1186 done_label = gen_label_rtx ();
1187 do_error = gen_label_rtx ();
1189 do_pending_stack_adjust ();
1190 op0 = expand_normal (arg0);
1191 op1 = expand_normal (arg1);
1193 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
1194 bool uns = unsr_p;
1195 if (lhs)
1197 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1198 if (!is_ubsan)
1199 write_complex_part (target, const0_rtx, true);
1202 if (is_ubsan)
1203 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
1205 /* We assume both operands and result have the same precision
1206 here (GET_MODE_BITSIZE (mode)), S stands for signed type
1207 with that precision, U for unsigned type with that precision,
1208 sgn for unsigned most significant bit in that precision.
1209 s1 is signed first operand, u1 is unsigned first operand,
1210 s2 is signed second operand, u2 is unsigned second operand,
1211 sr is signed result, ur is unsigned result and the following
1212 rules say how to compute result (which is always result of
1213 the operands as if both were unsigned, cast to the right
1214 signedness) and how to compute whether operation overflowed.
1215 main_ovf (false) stands for jump on signed multiplication
1216 overflow or the main algorithm with uns == false.
1217 main_ovf (true) stands for jump on unsigned multiplication
1218 overflow or the main algorithm with uns == true.
1220 s1 * s2 -> sr
1221 res = (S) ((U) s1 * (U) s2)
1222 ovf = main_ovf (false)
1223 u1 * u2 -> ur
1224 res = u1 * u2
1225 ovf = main_ovf (true)
1226 s1 * u2 -> ur
1227 res = (U) s1 * u2
1228 ovf = (s1 < 0 && u2) || main_ovf (true)
1229 u1 * u2 -> sr
1230 res = (S) (u1 * u2)
1231 ovf = res < 0 || main_ovf (true)
1232 s1 * u2 -> sr
1233 res = (S) ((U) s1 * u2)
1234 ovf = (S) u2 >= 0 ? main_ovf (false)
1235 : (s1 != 0 && (s1 != -1 || u2 != (U) res))
1236 s1 * s2 -> ur
1237 t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
1238 t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
1239 res = t1 * t2
1240 ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true) */
1242 if (uns0_p && !uns1_p)
1244 /* Multiplication is commutative, if operand signedness differs,
1245 canonicalize to the first operand being signed and second
1246 unsigned to simplify following code. */
1247 std::swap (op0, op1);
1248 std::swap (arg0, arg1);
1249 uns0_p = false;
1250 uns1_p = true;
1253 int pos_neg0 = get_range_pos_neg (arg0);
1254 int pos_neg1 = get_range_pos_neg (arg1);
1256 /* s1 * u2 -> ur */
1257 if (!uns0_p && uns1_p && unsr_p)
1259 switch (pos_neg0)
1261 case 1:
1262 /* If s1 is non-negative, just perform normal u1 * u2 -> ur. */
1263 goto do_main;
1264 case 2:
1265 /* If s1 is negative, avoid the main code, just multiply and
1266 signal overflow if op1 is not 0. */
1267 struct separate_ops ops;
1268 ops.code = MULT_EXPR;
1269 ops.type = TREE_TYPE (arg1);
1270 ops.op0 = make_tree (ops.type, op0);
1271 ops.op1 = make_tree (ops.type, op1);
1272 ops.op2 = NULL_TREE;
1273 ops.location = loc;
1274 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1275 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1276 NULL, done_label, profile_probability::very_likely ());
1277 goto do_error_label;
1278 case 3:
1279 rtx_code_label *do_main_label;
1280 do_main_label = gen_label_rtx ();
1281 do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
1282 NULL, do_main_label, profile_probability::very_likely ());
1283 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1284 NULL, do_main_label, profile_probability::very_likely ());
1285 expand_arith_set_overflow (lhs, target);
1286 emit_label (do_main_label);
1287 goto do_main;
1288 default:
1289 gcc_unreachable ();
1293 /* u1 * u2 -> sr */
1294 if (uns0_p && uns1_p && !unsr_p)
1296 uns = true;
1297 /* Rest of handling of this case after res is computed. */
1298 goto do_main;
1301 /* s1 * u2 -> sr */
1302 if (!uns0_p && uns1_p && !unsr_p)
1304 switch (pos_neg1)
1306 case 1:
1307 goto do_main;
1308 case 2:
1309 /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
1310 avoid the main code, just multiply and signal overflow
1311 unless 0 * u2 or -1 * ((U) Smin). */
1312 struct separate_ops ops;
1313 ops.code = MULT_EXPR;
1314 ops.type = TREE_TYPE (arg1);
1315 ops.op0 = make_tree (ops.type, op0);
1316 ops.op1 = make_tree (ops.type, op1);
1317 ops.op2 = NULL_TREE;
1318 ops.location = loc;
1319 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1320 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1321 NULL, done_label, profile_probability::very_likely ());
1322 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1323 NULL, do_error, profile_probability::very_unlikely ());
1324 int prec;
1325 prec = GET_MODE_PRECISION (mode);
1326 rtx sgn;
1327 sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
1328 do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
1329 NULL, done_label, profile_probability::very_likely ());
1330 goto do_error_label;
1331 case 3:
1332 /* Rest of handling of this case after res is computed. */
1333 goto do_main;
1334 default:
1335 gcc_unreachable ();
1339 /* s1 * s2 -> ur */
1340 if (!uns0_p && !uns1_p && unsr_p)
1342 rtx tem, tem2;
1343 switch (pos_neg0 | pos_neg1)
1345 case 1: /* Both operands known to be non-negative. */
1346 goto do_main;
1347 case 2: /* Both operands known to be negative. */
1348 op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
1349 op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1350 /* Avoid looking at arg0/arg1 ranges, as we've changed
1351 the arguments. */
1352 arg0 = error_mark_node;
1353 arg1 = error_mark_node;
1354 goto do_main;
1355 case 3:
1356 if ((pos_neg0 ^ pos_neg1) == 3)
1358 /* If one operand is known to be negative and the other
1359 non-negative, this overflows always, unless the non-negative
1360 one is 0. Just do normal multiply and set overflow
1361 unless one of the operands is 0. */
1362 struct separate_ops ops;
1363 ops.code = MULT_EXPR;
1364 ops.type
1365 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
1367 ops.op0 = make_tree (ops.type, op0);
1368 ops.op1 = make_tree (ops.type, op1);
1369 ops.op2 = NULL_TREE;
1370 ops.location = loc;
1371 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1372 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1373 OPTAB_LIB_WIDEN);
1374 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode,
1375 NULL_RTX, NULL, done_label,
1376 profile_probability::very_likely ());
1377 goto do_error_label;
1379 /* The general case, do all the needed comparisons at runtime. */
1380 rtx_code_label *do_main_label, *after_negate_label;
1381 rtx rop0, rop1;
1382 rop0 = gen_reg_rtx (mode);
1383 rop1 = gen_reg_rtx (mode);
1384 emit_move_insn (rop0, op0);
1385 emit_move_insn (rop1, op1);
1386 op0 = rop0;
1387 op1 = rop1;
1388 do_main_label = gen_label_rtx ();
1389 after_negate_label = gen_label_rtx ();
1390 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1391 OPTAB_LIB_WIDEN);
1392 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1393 NULL, after_negate_label, profile_probability::very_likely ());
1394 /* Both arguments negative here, negate them and continue with
1395 normal unsigned overflow checking multiplication. */
1396 emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
1397 NULL_RTX, false));
1398 emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
1399 NULL_RTX, false));
1400 /* Avoid looking at arg0/arg1 ranges, as we might have changed
1401 the arguments. */
1402 arg0 = error_mark_node;
1403 arg1 = error_mark_node;
1404 emit_jump (do_main_label);
1405 emit_label (after_negate_label);
1406 tem2 = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1407 OPTAB_LIB_WIDEN);
1408 do_compare_rtx_and_jump (tem2, const0_rtx, GE, false, mode, NULL_RTX,
1409 NULL, do_main_label, profile_probability::very_likely ());
1410 /* One argument is negative here, the other positive. This
1411 overflows always, unless one of the arguments is 0. But
1412 if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
1413 is, thus we can keep do_main code oring in overflow as is. */
1414 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode, NULL_RTX,
1415 NULL, do_main_label, profile_probability::very_likely ());
1416 expand_arith_set_overflow (lhs, target);
1417 emit_label (do_main_label);
1418 goto do_main;
1419 default:
1420 gcc_unreachable ();
1424 do_main:
1425 type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
1426 sign = uns ? UNSIGNED : SIGNED;
1427 icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
1428 if (icode != CODE_FOR_nothing)
1430 struct expand_operand ops[4];
1431 rtx_insn *last = get_last_insn ();
1433 res = gen_reg_rtx (mode);
1434 create_output_operand (&ops[0], res, mode);
1435 create_input_operand (&ops[1], op0, mode);
1436 create_input_operand (&ops[2], op1, mode);
1437 create_fixed_operand (&ops[3], do_error);
1438 if (maybe_expand_insn (icode, 4, ops))
1440 last = get_last_insn ();
1441 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1442 && JUMP_P (last)
1443 && any_condjump_p (last)
1444 && !find_reg_note (last, REG_BR_PROB, 0))
1445 add_reg_br_prob_note (last,
1446 profile_probability::very_unlikely ());
1447 emit_jump (done_label);
1449 else
1451 delete_insns_since (last);
1452 icode = CODE_FOR_nothing;
1456 if (icode == CODE_FOR_nothing)
1458 struct separate_ops ops;
1459 int prec = GET_MODE_PRECISION (mode);
1460 machine_mode hmode = mode_for_size (prec / 2, MODE_INT, 1);
1461 ops.op0 = make_tree (type, op0);
1462 ops.op1 = make_tree (type, op1);
1463 ops.op2 = NULL_TREE;
1464 ops.location = loc;
1465 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
1466 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
1468 machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
1469 ops.code = WIDEN_MULT_EXPR;
1470 ops.type
1471 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
1473 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
1474 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
1475 NULL_RTX, uns);
1476 hipart = convert_modes (mode, wmode, hipart, uns);
1477 res = convert_modes (mode, wmode, res, uns);
1478 if (uns)
1479 /* For the unsigned multiplication, there was overflow if
1480 HIPART is non-zero. */
1481 do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1482 NULL_RTX, NULL, done_label,
1483 profile_probability::very_likely ());
1484 else
1486 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1487 NULL_RTX, 0);
1488 /* RES is low half of the double width result, HIPART
1489 the high half. There was overflow if
1490 HIPART is different from RES < 0 ? -1 : 0. */
1491 do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1492 NULL_RTX, NULL, done_label,
1493 profile_probability::very_likely ());
1496 else if (hmode != BLKmode && 2 * GET_MODE_PRECISION (hmode) == prec)
1498 rtx_code_label *large_op0 = gen_label_rtx ();
1499 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
1500 rtx_code_label *one_small_one_large = gen_label_rtx ();
1501 rtx_code_label *both_ops_large = gen_label_rtx ();
1502 rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
1503 rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
1504 rtx_code_label *do_overflow = gen_label_rtx ();
1505 rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
1507 unsigned int hprec = GET_MODE_PRECISION (hmode);
1508 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
1509 NULL_RTX, uns);
1510 hipart0 = convert_modes (hmode, mode, hipart0, uns);
1511 rtx lopart0 = convert_modes (hmode, mode, op0, uns);
1512 rtx signbit0 = const0_rtx;
1513 if (!uns)
1514 signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
1515 NULL_RTX, 0);
1516 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
1517 NULL_RTX, uns);
1518 hipart1 = convert_modes (hmode, mode, hipart1, uns);
1519 rtx lopart1 = convert_modes (hmode, mode, op1, uns);
1520 rtx signbit1 = const0_rtx;
1521 if (!uns)
1522 signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
1523 NULL_RTX, 0);
1525 res = gen_reg_rtx (mode);
1527 /* True if op0 resp. op1 are known to be in the range of
1528 halfstype. */
1529 bool op0_small_p = false;
1530 bool op1_small_p = false;
1531 /* True if op0 resp. op1 are known to have all zeros or all ones
1532 in the upper half of bits, but are not known to be
1533 op{0,1}_small_p. */
1534 bool op0_medium_p = false;
1535 bool op1_medium_p = false;
1536 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
1537 nonnegative, 1 if unknown. */
1538 int op0_sign = 1;
1539 int op1_sign = 1;
1541 if (pos_neg0 == 1)
1542 op0_sign = 0;
1543 else if (pos_neg0 == 2)
1544 op0_sign = -1;
1545 if (pos_neg1 == 1)
1546 op1_sign = 0;
1547 else if (pos_neg1 == 2)
1548 op1_sign = -1;
1550 unsigned int mprec0 = prec;
1551 if (arg0 != error_mark_node)
1552 mprec0 = get_min_precision (arg0, sign);
1553 if (mprec0 <= hprec)
1554 op0_small_p = true;
1555 else if (!uns && mprec0 <= hprec + 1)
1556 op0_medium_p = true;
1557 unsigned int mprec1 = prec;
1558 if (arg1 != error_mark_node)
1559 mprec1 = get_min_precision (arg1, sign);
1560 if (mprec1 <= hprec)
1561 op1_small_p = true;
1562 else if (!uns && mprec1 <= hprec + 1)
1563 op1_medium_p = true;
1565 int smaller_sign = 1;
1566 int larger_sign = 1;
1567 if (op0_small_p)
1569 smaller_sign = op0_sign;
1570 larger_sign = op1_sign;
1572 else if (op1_small_p)
1574 smaller_sign = op1_sign;
1575 larger_sign = op0_sign;
1577 else if (op0_sign == op1_sign)
1579 smaller_sign = op0_sign;
1580 larger_sign = op0_sign;
1583 if (!op0_small_p)
1584 do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
1585 NULL_RTX, NULL, large_op0,
1586 profile_probability::unlikely ());
1588 if (!op1_small_p)
1589 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1590 NULL_RTX, NULL, small_op0_large_op1,
1591 profile_probability::unlikely ());
1593 /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
1594 hmode to mode, the multiplication will never overflow. We can
1595 do just one hmode x hmode => mode widening multiplication. */
1596 rtx lopart0s = lopart0, lopart1s = lopart1;
1597 if (GET_CODE (lopart0) == SUBREG)
1599 lopart0s = shallow_copy_rtx (lopart0);
1600 SUBREG_PROMOTED_VAR_P (lopart0s) = 1;
1601 SUBREG_PROMOTED_SET (lopart0s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1603 if (GET_CODE (lopart1) == SUBREG)
1605 lopart1s = shallow_copy_rtx (lopart1);
1606 SUBREG_PROMOTED_VAR_P (lopart1s) = 1;
1607 SUBREG_PROMOTED_SET (lopart1s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1609 tree halfstype = build_nonstandard_integer_type (hprec, uns);
1610 ops.op0 = make_tree (halfstype, lopart0s);
1611 ops.op1 = make_tree (halfstype, lopart1s);
1612 ops.code = WIDEN_MULT_EXPR;
1613 ops.type = type;
1614 rtx thisres
1615 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1616 emit_move_insn (res, thisres);
1617 emit_jump (done_label);
1619 emit_label (small_op0_large_op1);
1621 /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
1622 but op1 is not, just swap the arguments and handle it as op1
1623 sign/zero extended, op0 not. */
1624 rtx larger = gen_reg_rtx (mode);
1625 rtx hipart = gen_reg_rtx (hmode);
1626 rtx lopart = gen_reg_rtx (hmode);
1627 emit_move_insn (larger, op1);
1628 emit_move_insn (hipart, hipart1);
1629 emit_move_insn (lopart, lopart0);
1630 emit_jump (one_small_one_large);
1632 emit_label (large_op0);
1634 if (!op1_small_p)
1635 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1636 NULL_RTX, NULL, both_ops_large,
1637 profile_probability::unlikely ());
1639 /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
1640 but op0 is not, prepare larger, hipart and lopart pseudos and
1641 handle it together with small_op0_large_op1. */
1642 emit_move_insn (larger, op0);
1643 emit_move_insn (hipart, hipart0);
1644 emit_move_insn (lopart, lopart1);
1646 emit_label (one_small_one_large);
1648 /* lopart is the low part of the operand that is sign extended
1649 to mode, larger is the other operand, hipart is the
1650 high part of larger and lopart0 and lopart1 are the low parts
1651 of both operands.
1652 We perform lopart0 * lopart1 and lopart * hipart widening
1653 multiplications. */
1654 tree halfutype = build_nonstandard_integer_type (hprec, 1);
1655 ops.op0 = make_tree (halfutype, lopart0);
1656 ops.op1 = make_tree (halfutype, lopart1);
1657 rtx lo0xlo1
1658 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1660 ops.op0 = make_tree (halfutype, lopart);
1661 ops.op1 = make_tree (halfutype, hipart);
1662 rtx loxhi = gen_reg_rtx (mode);
1663 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1664 emit_move_insn (loxhi, tem);
1666 if (!uns)
1668 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
1669 if (larger_sign == 0)
1670 emit_jump (after_hipart_neg);
1671 else if (larger_sign != -1)
1672 do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
1673 NULL_RTX, NULL, after_hipart_neg,
1674 profile_probability::even ());
1676 tem = convert_modes (mode, hmode, lopart, 1);
1677 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
1678 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
1679 1, OPTAB_DIRECT);
1680 emit_move_insn (loxhi, tem);
1682 emit_label (after_hipart_neg);
1684 /* if (lopart < 0) loxhi -= larger; */
1685 if (smaller_sign == 0)
1686 emit_jump (after_lopart_neg);
1687 else if (smaller_sign != -1)
1688 do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
1689 NULL_RTX, NULL, after_lopart_neg,
1690 profile_probability::even ());
1692 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
1693 1, OPTAB_DIRECT);
1694 emit_move_insn (loxhi, tem);
1696 emit_label (after_lopart_neg);
1699 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
1700 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
1701 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
1702 1, OPTAB_DIRECT);
1703 emit_move_insn (loxhi, tem);
1705 /* if (loxhi >> (bitsize / 2)
1706 == (hmode) loxhi >> (bitsize / 2 - 1)) (if !uns)
1707 if (loxhi >> (bitsize / 2) == 0 (if uns). */
1708 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
1709 NULL_RTX, 0);
1710 hipartloxhi = convert_modes (hmode, mode, hipartloxhi, 0);
1711 rtx signbitloxhi = const0_rtx;
1712 if (!uns)
1713 signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
1714 convert_modes (hmode, mode,
1715 loxhi, 0),
1716 hprec - 1, NULL_RTX, 0);
1718 do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
1719 NULL_RTX, NULL, do_overflow,
1720 profile_probability::very_unlikely ());
1722 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
1723 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
1724 NULL_RTX, 1);
1725 tem = convert_modes (mode, hmode,
1726 convert_modes (hmode, mode, lo0xlo1, 1), 1);
1728 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
1729 1, OPTAB_DIRECT);
1730 if (tem != res)
1731 emit_move_insn (res, tem);
1732 emit_jump (done_label);
1734 emit_label (both_ops_large);
1736 /* If both operands are large (not sign (!uns) or zero (uns)
1737 extended from hmode), then perform the full multiplication
1738 which will be the result of the operation.
1739 The only cases which don't overflow are for signed multiplication
1740 some cases where both hipart0 and highpart1 are 0 or -1.
1741 For unsigned multiplication when high parts are both non-zero
1742 this overflows always. */
1743 ops.code = MULT_EXPR;
1744 ops.op0 = make_tree (type, op0);
1745 ops.op1 = make_tree (type, op1);
1746 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1747 emit_move_insn (res, tem);
1749 if (!uns)
1751 if (!op0_medium_p)
1753 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
1754 NULL_RTX, 1, OPTAB_DIRECT);
1755 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1756 NULL_RTX, NULL, do_error,
1757 profile_probability::very_unlikely ());
1760 if (!op1_medium_p)
1762 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
1763 NULL_RTX, 1, OPTAB_DIRECT);
1764 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1765 NULL_RTX, NULL, do_error,
1766 profile_probability::very_unlikely ());
1769 /* At this point hipart{0,1} are both in [-1, 0]. If they are
1770 the same, overflow happened if res is negative, if they are
1771 different, overflow happened if res is positive. */
1772 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
1773 emit_jump (hipart_different);
1774 else if (op0_sign == 1 || op1_sign == 1)
1775 do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
1776 NULL_RTX, NULL, hipart_different,
1777 profile_probability::even ());
1779 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode,
1780 NULL_RTX, NULL, do_error,
1781 profile_probability::very_unlikely ());
1782 emit_jump (done_label);
1784 emit_label (hipart_different);
1786 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
1787 NULL_RTX, NULL, do_error,
1788 profile_probability::very_unlikely ());
1789 emit_jump (done_label);
1792 emit_label (do_overflow);
1794 /* Overflow, do full multiplication and fallthru into do_error. */
1795 ops.op0 = make_tree (type, op0);
1796 ops.op1 = make_tree (type, op1);
1797 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1798 emit_move_insn (res, tem);
1800 else
1802 gcc_assert (!is_ubsan);
1803 ops.code = MULT_EXPR;
1804 ops.type = type;
1805 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1806 emit_jump (done_label);
1810 do_error_label:
1811 emit_label (do_error);
1812 if (is_ubsan)
1814 /* Expand the ubsan builtin call. */
1815 push_temp_slots ();
1816 fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
1817 arg0, arg1, datap);
1818 expand_normal (fn);
1819 pop_temp_slots ();
1820 do_pending_stack_adjust ();
1822 else if (lhs)
1823 expand_arith_set_overflow (lhs, target);
1825 /* We're done. */
1826 emit_label (done_label);
1828 /* u1 * u2 -> sr */
1829 if (uns0_p && uns1_p && !unsr_p)
1831 rtx_code_label *all_done_label = gen_label_rtx ();
1832 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
1833 NULL, all_done_label, profile_probability::very_likely ());
1834 expand_arith_set_overflow (lhs, target);
1835 emit_label (all_done_label);
1838 /* s1 * u2 -> sr */
1839 if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
1841 rtx_code_label *all_done_label = gen_label_rtx ();
1842 rtx_code_label *set_noovf = gen_label_rtx ();
1843 do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
1844 NULL, all_done_label, profile_probability::very_likely ());
1845 expand_arith_set_overflow (lhs, target);
1846 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1847 NULL, set_noovf, profile_probability::very_likely ());
1848 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1849 NULL, all_done_label, profile_probability::very_unlikely ());
1850 do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL,
1851 all_done_label, profile_probability::very_unlikely ());
1852 emit_label (set_noovf);
1853 write_complex_part (target, const0_rtx, true);
1854 emit_label (all_done_label);
1857 if (lhs)
1859 if (is_ubsan)
1860 expand_ubsan_result_store (target, res);
1861 else
1862 expand_arith_overflow_result_store (lhs, target, mode, res);
1866 /* Expand UBSAN_CHECK_* internal function if it has vector operands. */
1868 static void
1869 expand_vector_ubsan_overflow (location_t loc, enum tree_code code, tree lhs,
1870 tree arg0, tree arg1)
1872 int cnt = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1873 rtx_code_label *loop_lab = NULL;
1874 rtx cntvar = NULL_RTX;
1875 tree cntv = NULL_TREE;
1876 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
1877 tree sz = TYPE_SIZE (eltype);
1878 tree data = NULL_TREE;
1879 tree resv = NULL_TREE;
1880 rtx lhsr = NULL_RTX;
1881 rtx resvr = NULL_RTX;
1883 if (lhs)
1885 optab op;
1886 lhsr = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1887 if (!VECTOR_MODE_P (GET_MODE (lhsr))
1888 || (op = optab_for_tree_code (code, TREE_TYPE (arg0),
1889 optab_default)) == unknown_optab
1890 || (optab_handler (op, TYPE_MODE (TREE_TYPE (arg0)))
1891 == CODE_FOR_nothing))
1893 if (MEM_P (lhsr))
1894 resv = make_tree (TREE_TYPE (lhs), lhsr);
1895 else
1897 resvr = assign_temp (TREE_TYPE (lhs), 1, 1);
1898 resv = make_tree (TREE_TYPE (lhs), resvr);
1902 if (cnt > 4)
1904 do_pending_stack_adjust ();
1905 loop_lab = gen_label_rtx ();
1906 cntvar = gen_reg_rtx (TYPE_MODE (sizetype));
1907 cntv = make_tree (sizetype, cntvar);
1908 emit_move_insn (cntvar, const0_rtx);
1909 emit_label (loop_lab);
1911 if (TREE_CODE (arg0) != VECTOR_CST)
1913 rtx arg0r = expand_normal (arg0);
1914 arg0 = make_tree (TREE_TYPE (arg0), arg0r);
1916 if (TREE_CODE (arg1) != VECTOR_CST)
1918 rtx arg1r = expand_normal (arg1);
1919 arg1 = make_tree (TREE_TYPE (arg1), arg1r);
1921 for (int i = 0; i < (cnt > 4 ? 1 : cnt); i++)
1923 tree op0, op1, res = NULL_TREE;
1924 if (cnt > 4)
1926 tree atype = build_array_type_nelts (eltype, cnt);
1927 op0 = uniform_vector_p (arg0);
1928 if (op0 == NULL_TREE)
1930 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg0);
1931 op0 = build4_loc (loc, ARRAY_REF, eltype, op0, cntv,
1932 NULL_TREE, NULL_TREE);
1934 op1 = uniform_vector_p (arg1);
1935 if (op1 == NULL_TREE)
1937 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg1);
1938 op1 = build4_loc (loc, ARRAY_REF, eltype, op1, cntv,
1939 NULL_TREE, NULL_TREE);
1941 if (resv)
1943 res = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, resv);
1944 res = build4_loc (loc, ARRAY_REF, eltype, res, cntv,
1945 NULL_TREE, NULL_TREE);
1948 else
1950 tree bitpos = bitsize_int (tree_to_uhwi (sz) * i);
1951 op0 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg0, sz, bitpos);
1952 op1 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg1, sz, bitpos);
1953 if (resv)
1954 res = fold_build3_loc (loc, BIT_FIELD_REF, eltype, resv, sz,
1955 bitpos);
1957 switch (code)
1959 case PLUS_EXPR:
1960 expand_addsub_overflow (loc, PLUS_EXPR, res, op0, op1,
1961 false, false, false, true, &data);
1962 break;
1963 case MINUS_EXPR:
1964 if (cnt > 4 ? integer_zerop (arg0) : integer_zerop (op0))
1965 expand_neg_overflow (loc, res, op1, true, &data);
1966 else
1967 expand_addsub_overflow (loc, MINUS_EXPR, res, op0, op1,
1968 false, false, false, true, &data);
1969 break;
1970 case MULT_EXPR:
1971 expand_mul_overflow (loc, res, op0, op1, false, false, false,
1972 true, &data);
1973 break;
1974 default:
1975 gcc_unreachable ();
1978 if (cnt > 4)
1980 struct separate_ops ops;
1981 ops.code = PLUS_EXPR;
1982 ops.type = TREE_TYPE (cntv);
1983 ops.op0 = cntv;
1984 ops.op1 = build_int_cst (TREE_TYPE (cntv), 1);
1985 ops.op2 = NULL_TREE;
1986 ops.location = loc;
1987 rtx ret = expand_expr_real_2 (&ops, cntvar, TYPE_MODE (sizetype),
1988 EXPAND_NORMAL);
1989 if (ret != cntvar)
1990 emit_move_insn (cntvar, ret);
1991 do_compare_rtx_and_jump (cntvar, GEN_INT (cnt), NE, false,
1992 TYPE_MODE (sizetype), NULL_RTX, NULL, loop_lab,
1993 profile_probability::very_likely ());
1995 if (lhs && resv == NULL_TREE)
1997 struct separate_ops ops;
1998 ops.code = code;
1999 ops.type = TREE_TYPE (arg0);
2000 ops.op0 = arg0;
2001 ops.op1 = arg1;
2002 ops.op2 = NULL_TREE;
2003 ops.location = loc;
2004 rtx ret = expand_expr_real_2 (&ops, lhsr, TYPE_MODE (TREE_TYPE (arg0)),
2005 EXPAND_NORMAL);
2006 if (ret != lhsr)
2007 emit_move_insn (lhsr, ret);
2009 else if (resvr)
2010 emit_move_insn (lhsr, resvr);
2013 /* Expand UBSAN_CHECK_ADD call STMT. */
2015 static void
2016 expand_UBSAN_CHECK_ADD (internal_fn, gcall *stmt)
2018 location_t loc = gimple_location (stmt);
2019 tree lhs = gimple_call_lhs (stmt);
2020 tree arg0 = gimple_call_arg (stmt, 0);
2021 tree arg1 = gimple_call_arg (stmt, 1);
2022 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2023 expand_vector_ubsan_overflow (loc, PLUS_EXPR, lhs, arg0, arg1);
2024 else
2025 expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
2026 false, false, false, true, NULL);
2029 /* Expand UBSAN_CHECK_SUB call STMT. */
2031 static void
2032 expand_UBSAN_CHECK_SUB (internal_fn, gcall *stmt)
2034 location_t loc = gimple_location (stmt);
2035 tree lhs = gimple_call_lhs (stmt);
2036 tree arg0 = gimple_call_arg (stmt, 0);
2037 tree arg1 = gimple_call_arg (stmt, 1);
2038 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2039 expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
2040 else if (integer_zerop (arg0))
2041 expand_neg_overflow (loc, lhs, arg1, true, NULL);
2042 else
2043 expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
2044 false, false, false, true, NULL);
2047 /* Expand UBSAN_CHECK_MUL call STMT. */
2049 static void
2050 expand_UBSAN_CHECK_MUL (internal_fn, gcall *stmt)
2052 location_t loc = gimple_location (stmt);
2053 tree lhs = gimple_call_lhs (stmt);
2054 tree arg0 = gimple_call_arg (stmt, 0);
2055 tree arg1 = gimple_call_arg (stmt, 1);
2056 if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
2057 expand_vector_ubsan_overflow (loc, MULT_EXPR, lhs, arg0, arg1);
2058 else
2059 expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true,
2060 NULL);
2063 /* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
2065 static void
2066 expand_arith_overflow (enum tree_code code, gimple *stmt)
2068 tree lhs = gimple_call_lhs (stmt);
2069 if (lhs == NULL_TREE)
2070 return;
2071 tree arg0 = gimple_call_arg (stmt, 0);
2072 tree arg1 = gimple_call_arg (stmt, 1);
2073 tree type = TREE_TYPE (TREE_TYPE (lhs));
2074 int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
2075 int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
2076 int unsr_p = TYPE_UNSIGNED (type);
2077 int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
2078 int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
2079 int precres = TYPE_PRECISION (type);
2080 location_t loc = gimple_location (stmt);
2081 if (!uns0_p && get_range_pos_neg (arg0) == 1)
2082 uns0_p = true;
2083 if (!uns1_p && get_range_pos_neg (arg1) == 1)
2084 uns1_p = true;
2085 int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
2086 prec0 = MIN (prec0, pr);
2087 pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
2088 prec1 = MIN (prec1, pr);
2090 /* If uns0_p && uns1_p, precop is minimum needed precision
2091 of unsigned type to hold the exact result, otherwise
2092 precop is minimum needed precision of signed type to
2093 hold the exact result. */
2094 int precop;
2095 if (code == MULT_EXPR)
2096 precop = prec0 + prec1 + (uns0_p != uns1_p);
2097 else
2099 if (uns0_p == uns1_p)
2100 precop = MAX (prec0, prec1) + 1;
2101 else if (uns0_p)
2102 precop = MAX (prec0 + 1, prec1) + 1;
2103 else
2104 precop = MAX (prec0, prec1 + 1) + 1;
2106 int orig_precres = precres;
2110 if ((uns0_p && uns1_p)
2111 ? ((precop + !unsr_p) <= precres
2112 /* u1 - u2 -> ur can overflow, no matter what precision
2113 the result has. */
2114 && (code != MINUS_EXPR || !unsr_p))
2115 : (!unsr_p && precop <= precres))
2117 /* The infinity precision result will always fit into result. */
2118 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2119 write_complex_part (target, const0_rtx, true);
2120 machine_mode mode = TYPE_MODE (type);
2121 struct separate_ops ops;
2122 ops.code = code;
2123 ops.type = type;
2124 ops.op0 = fold_convert_loc (loc, type, arg0);
2125 ops.op1 = fold_convert_loc (loc, type, arg1);
2126 ops.op2 = NULL_TREE;
2127 ops.location = loc;
2128 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
2129 expand_arith_overflow_result_store (lhs, target, mode, tem);
2130 return;
2133 /* For operations with low precision, if target doesn't have them, start
2134 with precres widening right away, otherwise do it only if the most
2135 simple cases can't be used. */
2136 const int min_precision = targetm.min_arithmetic_precision ();
2137 if (orig_precres == precres && precres < min_precision)
2139 else if ((uns0_p && uns1_p && unsr_p && prec0 <= precres
2140 && prec1 <= precres)
2141 || ((!uns0_p || !uns1_p) && !unsr_p
2142 && prec0 + uns0_p <= precres
2143 && prec1 + uns1_p <= precres))
2145 arg0 = fold_convert_loc (loc, type, arg0);
2146 arg1 = fold_convert_loc (loc, type, arg1);
2147 switch (code)
2149 case MINUS_EXPR:
2150 if (integer_zerop (arg0) && !unsr_p)
2152 expand_neg_overflow (loc, lhs, arg1, false, NULL);
2153 return;
2155 /* FALLTHRU */
2156 case PLUS_EXPR:
2157 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
2158 unsr_p, unsr_p, false, NULL);
2159 return;
2160 case MULT_EXPR:
2161 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
2162 unsr_p, unsr_p, false, NULL);
2163 return;
2164 default:
2165 gcc_unreachable ();
2169 /* For sub-word operations, retry with a wider type first. */
2170 if (orig_precres == precres && precop <= BITS_PER_WORD)
2172 int p = MAX (min_precision, precop);
2173 machine_mode m = smallest_mode_for_size (p, MODE_INT);
2174 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
2175 uns0_p && uns1_p
2176 && unsr_p);
2177 p = TYPE_PRECISION (optype);
2178 if (p > precres)
2180 precres = p;
2181 unsr_p = TYPE_UNSIGNED (optype);
2182 type = optype;
2183 continue;
2187 if (prec0 <= precres && prec1 <= precres)
2189 tree types[2];
2190 if (unsr_p)
2192 types[0] = build_nonstandard_integer_type (precres, 0);
2193 types[1] = type;
2195 else
2197 types[0] = type;
2198 types[1] = build_nonstandard_integer_type (precres, 1);
2200 arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
2201 arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
2202 if (code != MULT_EXPR)
2203 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
2204 uns0_p, uns1_p, false, NULL);
2205 else
2206 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
2207 uns0_p, uns1_p, false, NULL);
2208 return;
2211 /* Retry with a wider type. */
2212 if (orig_precres == precres)
2214 int p = MAX (prec0, prec1);
2215 machine_mode m = smallest_mode_for_size (p, MODE_INT);
2216 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
2217 uns0_p && uns1_p
2218 && unsr_p);
2219 p = TYPE_PRECISION (optype);
2220 if (p > precres)
2222 precres = p;
2223 unsr_p = TYPE_UNSIGNED (optype);
2224 type = optype;
2225 continue;
2229 gcc_unreachable ();
2231 while (1);
2234 /* Expand ADD_OVERFLOW STMT. */
2236 static void
2237 expand_ADD_OVERFLOW (internal_fn, gcall *stmt)
2239 expand_arith_overflow (PLUS_EXPR, stmt);
2242 /* Expand SUB_OVERFLOW STMT. */
2244 static void
2245 expand_SUB_OVERFLOW (internal_fn, gcall *stmt)
2247 expand_arith_overflow (MINUS_EXPR, stmt);
2250 /* Expand MUL_OVERFLOW STMT. */
2252 static void
2253 expand_MUL_OVERFLOW (internal_fn, gcall *stmt)
2255 expand_arith_overflow (MULT_EXPR, stmt);
2258 /* This should get folded in tree-vectorizer.c. */
2260 static void
2261 expand_LOOP_VECTORIZED (internal_fn, gcall *)
2263 gcc_unreachable ();
2266 /* This should get folded in tree-vectorizer.c. */
2268 static void
2269 expand_LOOP_DIST_ALIAS (internal_fn, gcall *)
2271 gcc_unreachable ();
2274 /* Expand MASK_LOAD call STMT using optab OPTAB. */
2276 static void
2277 expand_mask_load_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
2279 struct expand_operand ops[3];
2280 tree type, lhs, rhs, maskt, ptr;
2281 rtx mem, target, mask;
2282 unsigned align;
2284 maskt = gimple_call_arg (stmt, 2);
2285 lhs = gimple_call_lhs (stmt);
2286 if (lhs == NULL_TREE)
2287 return;
2288 type = TREE_TYPE (lhs);
2289 ptr = build_int_cst (TREE_TYPE (gimple_call_arg (stmt, 1)), 0);
2290 align = tree_to_shwi (gimple_call_arg (stmt, 1));
2291 if (TYPE_ALIGN (type) != align)
2292 type = build_aligned_type (type, align);
2293 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0), ptr);
2295 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2296 gcc_assert (MEM_P (mem));
2297 mask = expand_normal (maskt);
2298 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2299 create_output_operand (&ops[0], target, TYPE_MODE (type));
2300 create_fixed_operand (&ops[1], mem);
2301 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
2302 expand_insn (convert_optab_handler (optab, TYPE_MODE (type),
2303 TYPE_MODE (TREE_TYPE (maskt))),
2304 3, ops);
2307 /* Expand MASK_STORE call STMT using optab OPTAB. */
2309 static void
2310 expand_mask_store_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
2312 struct expand_operand ops[3];
2313 tree type, lhs, rhs, maskt, ptr;
2314 rtx mem, reg, mask;
2315 unsigned align;
2317 maskt = gimple_call_arg (stmt, 2);
2318 rhs = gimple_call_arg (stmt, 3);
2319 type = TREE_TYPE (rhs);
2320 ptr = build_int_cst (TREE_TYPE (gimple_call_arg (stmt, 1)), 0);
2321 align = tree_to_shwi (gimple_call_arg (stmt, 1));
2322 if (TYPE_ALIGN (type) != align)
2323 type = build_aligned_type (type, align);
2324 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0), ptr);
2326 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2327 gcc_assert (MEM_P (mem));
2328 mask = expand_normal (maskt);
2329 reg = expand_normal (rhs);
2330 create_fixed_operand (&ops[0], mem);
2331 create_input_operand (&ops[1], reg, TYPE_MODE (type));
2332 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
2333 expand_insn (convert_optab_handler (optab, TYPE_MODE (type),
2334 TYPE_MODE (TREE_TYPE (maskt))),
2335 3, ops);
2338 static void
2339 expand_ABNORMAL_DISPATCHER (internal_fn, gcall *)
2343 static void
2344 expand_BUILTIN_EXPECT (internal_fn, gcall *stmt)
2346 /* When guessing was done, the hints should be already stripped away. */
2347 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
2349 rtx target;
2350 tree lhs = gimple_call_lhs (stmt);
2351 if (lhs)
2352 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2353 else
2354 target = const0_rtx;
2355 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
2356 if (lhs && val != target)
2357 emit_move_insn (target, val);
2360 /* IFN_VA_ARG is supposed to be expanded at pass_stdarg. So this dummy function
2361 should never be called. */
2363 static void
2364 expand_VA_ARG (internal_fn, gcall *)
2366 gcc_unreachable ();
2369 /* Expand the IFN_UNIQUE function according to its first argument. */
2371 static void
2372 expand_UNIQUE (internal_fn, gcall *stmt)
2374 rtx pattern = NULL_RTX;
2375 enum ifn_unique_kind kind
2376 = (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
2378 switch (kind)
2380 default:
2381 gcc_unreachable ();
2383 case IFN_UNIQUE_UNSPEC:
2384 if (targetm.have_unique ())
2385 pattern = targetm.gen_unique ();
2386 break;
2388 case IFN_UNIQUE_OACC_FORK:
2389 case IFN_UNIQUE_OACC_JOIN:
2390 if (targetm.have_oacc_fork () && targetm.have_oacc_join ())
2392 tree lhs = gimple_call_lhs (stmt);
2393 rtx target = const0_rtx;
2395 if (lhs)
2396 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2398 rtx data_dep = expand_normal (gimple_call_arg (stmt, 1));
2399 rtx axis = expand_normal (gimple_call_arg (stmt, 2));
2401 if (kind == IFN_UNIQUE_OACC_FORK)
2402 pattern = targetm.gen_oacc_fork (target, data_dep, axis);
2403 else
2404 pattern = targetm.gen_oacc_join (target, data_dep, axis);
2406 else
2407 gcc_unreachable ();
2408 break;
2411 if (pattern)
2412 emit_insn (pattern);
2415 /* The size of an OpenACC compute dimension. */
2417 static void
2418 expand_GOACC_DIM_SIZE (internal_fn, gcall *stmt)
2420 tree lhs = gimple_call_lhs (stmt);
2422 if (!lhs)
2423 return;
2425 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2426 if (targetm.have_oacc_dim_size ())
2428 rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
2429 VOIDmode, EXPAND_NORMAL);
2430 emit_insn (targetm.gen_oacc_dim_size (target, dim));
2432 else
2433 emit_move_insn (target, GEN_INT (1));
2436 /* The position of an OpenACC execution engine along one compute axis. */
2438 static void
2439 expand_GOACC_DIM_POS (internal_fn, gcall *stmt)
2441 tree lhs = gimple_call_lhs (stmt);
2443 if (!lhs)
2444 return;
2446 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2447 if (targetm.have_oacc_dim_pos ())
2449 rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
2450 VOIDmode, EXPAND_NORMAL);
2451 emit_insn (targetm.gen_oacc_dim_pos (target, dim));
2453 else
2454 emit_move_insn (target, const0_rtx);
2457 /* This is expanded by oacc_device_lower pass. */
2459 static void
2460 expand_GOACC_LOOP (internal_fn, gcall *)
2462 gcc_unreachable ();
2465 /* This is expanded by oacc_device_lower pass. */
2467 static void
2468 expand_GOACC_REDUCTION (internal_fn, gcall *)
2470 gcc_unreachable ();
2473 /* This is expanded by oacc_device_lower pass. */
2475 static void
2476 expand_GOACC_TILE (internal_fn, gcall *)
2478 gcc_unreachable ();
2481 /* Set errno to EDOM. */
2483 static void
2484 expand_SET_EDOM (internal_fn, gcall *)
2486 #ifdef TARGET_EDOM
2487 #ifdef GEN_ERRNO_RTX
2488 rtx errno_rtx = GEN_ERRNO_RTX;
2489 #else
2490 rtx errno_rtx = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2491 #endif
2492 emit_move_insn (errno_rtx,
2493 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2494 #else
2495 gcc_unreachable ();
2496 #endif
2499 /* Expand atomic bit test and set. */
2501 static void
2502 expand_ATOMIC_BIT_TEST_AND_SET (internal_fn, gcall *call)
2504 expand_ifn_atomic_bit_test_and (call);
2507 /* Expand atomic bit test and complement. */
2509 static void
2510 expand_ATOMIC_BIT_TEST_AND_COMPLEMENT (internal_fn, gcall *call)
2512 expand_ifn_atomic_bit_test_and (call);
2515 /* Expand atomic bit test and reset. */
2517 static void
2518 expand_ATOMIC_BIT_TEST_AND_RESET (internal_fn, gcall *call)
2520 expand_ifn_atomic_bit_test_and (call);
2523 /* Expand atomic bit test and set. */
2525 static void
2526 expand_ATOMIC_COMPARE_EXCHANGE (internal_fn, gcall *call)
2528 expand_ifn_atomic_compare_exchange (call);
2531 /* Expand LAUNDER to assignment, lhs = arg0. */
2533 static void
2534 expand_LAUNDER (internal_fn, gcall *call)
2536 tree lhs = gimple_call_lhs (call);
2538 if (!lhs)
2539 return;
2541 expand_assignment (lhs, gimple_call_arg (call, 0), false);
2544 /* Expand DIVMOD() using:
2545 a) optab handler for udivmod/sdivmod if it is available.
2546 b) If optab_handler doesn't exist, generate call to
2547 target-specific divmod libfunc. */
2549 static void
2550 expand_DIVMOD (internal_fn, gcall *call_stmt)
2552 tree lhs = gimple_call_lhs (call_stmt);
2553 tree arg0 = gimple_call_arg (call_stmt, 0);
2554 tree arg1 = gimple_call_arg (call_stmt, 1);
2556 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
2557 tree type = TREE_TYPE (TREE_TYPE (lhs));
2558 machine_mode mode = TYPE_MODE (type);
2559 bool unsignedp = TYPE_UNSIGNED (type);
2560 optab tab = (unsignedp) ? udivmod_optab : sdivmod_optab;
2562 rtx op0 = expand_normal (arg0);
2563 rtx op1 = expand_normal (arg1);
2564 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2566 rtx quotient, remainder, libfunc;
2568 /* Check if optab_handler exists for divmod_optab for given mode. */
2569 if (optab_handler (tab, mode) != CODE_FOR_nothing)
2571 quotient = gen_reg_rtx (mode);
2572 remainder = gen_reg_rtx (mode);
2573 expand_twoval_binop (tab, op0, op1, quotient, remainder, unsignedp);
2576 /* Generate call to divmod libfunc if it exists. */
2577 else if ((libfunc = optab_libfunc (tab, mode)) != NULL_RTX)
2578 targetm.expand_divmod_libfunc (libfunc, mode, op0, op1,
2579 &quotient, &remainder);
2581 else
2582 gcc_unreachable ();
2584 /* Wrap the return value (quotient, remainder) within COMPLEX_EXPR. */
2585 expand_expr (build2 (COMPLEX_EXPR, TREE_TYPE (lhs),
2586 make_tree (TREE_TYPE (arg0), quotient),
2587 make_tree (TREE_TYPE (arg1), remainder)),
2588 target, VOIDmode, EXPAND_NORMAL);
2591 /* Expand a call to FN using the operands in STMT. FN has a single
2592 output operand and NARGS input operands. */
2594 static void
2595 expand_direct_optab_fn (internal_fn fn, gcall *stmt, direct_optab optab,
2596 unsigned int nargs)
2598 expand_operand *ops = XALLOCAVEC (expand_operand, nargs + 1);
2600 tree_pair types = direct_internal_fn_types (fn, stmt);
2601 insn_code icode = direct_optab_handler (optab, TYPE_MODE (types.first));
2603 tree lhs = gimple_call_lhs (stmt);
2604 tree lhs_type = TREE_TYPE (lhs);
2605 rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2606 create_output_operand (&ops[0], lhs_rtx, insn_data[icode].operand[0].mode);
2608 for (unsigned int i = 0; i < nargs; ++i)
2610 tree rhs = gimple_call_arg (stmt, i);
2611 tree rhs_type = TREE_TYPE (rhs);
2612 rtx rhs_rtx = expand_normal (rhs);
2613 if (INTEGRAL_TYPE_P (rhs_type))
2614 create_convert_operand_from (&ops[i + 1], rhs_rtx,
2615 TYPE_MODE (rhs_type),
2616 TYPE_UNSIGNED (rhs_type));
2617 else
2618 create_input_operand (&ops[i + 1], rhs_rtx, TYPE_MODE (rhs_type));
2621 expand_insn (icode, nargs + 1, ops);
2622 if (!rtx_equal_p (lhs_rtx, ops[0].value))
2624 /* If the return value has an integral type, convert the instruction
2625 result to that type. This is useful for things that return an
2626 int regardless of the size of the input. If the instruction result
2627 is smaller than required, assume that it is signed.
2629 If the return value has a nonintegral type, its mode must match
2630 the instruction result. */
2631 if (GET_CODE (lhs_rtx) == SUBREG && SUBREG_PROMOTED_VAR_P (lhs_rtx))
2633 /* If this is a scalar in a register that is stored in a wider
2634 mode than the declared mode, compute the result into its
2635 declared mode and then convert to the wider mode. */
2636 gcc_checking_assert (INTEGRAL_TYPE_P (lhs_type));
2637 rtx tmp = convert_to_mode (GET_MODE (lhs_rtx), ops[0].value, 0);
2638 convert_move (SUBREG_REG (lhs_rtx), tmp,
2639 SUBREG_PROMOTED_SIGN (lhs_rtx));
2641 else if (GET_MODE (lhs_rtx) == GET_MODE (ops[0].value))
2642 emit_move_insn (lhs_rtx, ops[0].value);
2643 else
2645 gcc_checking_assert (INTEGRAL_TYPE_P (lhs_type));
2646 convert_move (lhs_rtx, ops[0].value, 0);
2651 /* Expanders for optabs that can use expand_direct_optab_fn. */
2653 #define expand_unary_optab_fn(FN, STMT, OPTAB) \
2654 expand_direct_optab_fn (FN, STMT, OPTAB, 1)
2656 #define expand_binary_optab_fn(FN, STMT, OPTAB) \
2657 expand_direct_optab_fn (FN, STMT, OPTAB, 2)
2659 /* RETURN_TYPE and ARGS are a return type and argument list that are
2660 in principle compatible with FN (which satisfies direct_internal_fn_p).
2661 Return the types that should be used to determine whether the
2662 target supports FN. */
2664 tree_pair
2665 direct_internal_fn_types (internal_fn fn, tree return_type, tree *args)
2667 const direct_internal_fn_info &info = direct_internal_fn (fn);
2668 tree type0 = (info.type0 < 0 ? return_type : TREE_TYPE (args[info.type0]));
2669 tree type1 = (info.type1 < 0 ? return_type : TREE_TYPE (args[info.type1]));
2670 return tree_pair (type0, type1);
2673 /* CALL is a call whose return type and arguments are in principle
2674 compatible with FN (which satisfies direct_internal_fn_p). Return the
2675 types that should be used to determine whether the target supports FN. */
2677 tree_pair
2678 direct_internal_fn_types (internal_fn fn, gcall *call)
2680 const direct_internal_fn_info &info = direct_internal_fn (fn);
2681 tree op0 = (info.type0 < 0
2682 ? gimple_call_lhs (call)
2683 : gimple_call_arg (call, info.type0));
2684 tree op1 = (info.type1 < 0
2685 ? gimple_call_lhs (call)
2686 : gimple_call_arg (call, info.type1));
2687 return tree_pair (TREE_TYPE (op0), TREE_TYPE (op1));
2690 /* Return true if OPTAB is supported for TYPES (whose modes should be
2691 the same) when the optimization type is OPT_TYPE. Used for simple
2692 direct optabs. */
2694 static bool
2695 direct_optab_supported_p (direct_optab optab, tree_pair types,
2696 optimization_type opt_type)
2698 machine_mode mode = TYPE_MODE (types.first);
2699 gcc_checking_assert (mode == TYPE_MODE (types.second));
2700 return direct_optab_handler (optab, mode, opt_type) != CODE_FOR_nothing;
2703 /* Return true if load/store lanes optab OPTAB is supported for
2704 array type TYPES.first when the optimization type is OPT_TYPE. */
2706 static bool
2707 multi_vector_optab_supported_p (convert_optab optab, tree_pair types,
2708 optimization_type opt_type)
2710 gcc_assert (TREE_CODE (types.first) == ARRAY_TYPE);
2711 machine_mode imode = TYPE_MODE (types.first);
2712 machine_mode vmode = TYPE_MODE (TREE_TYPE (types.first));
2713 return (convert_optab_handler (optab, imode, vmode, opt_type)
2714 != CODE_FOR_nothing);
2717 #define direct_unary_optab_supported_p direct_optab_supported_p
2718 #define direct_binary_optab_supported_p direct_optab_supported_p
2719 #define direct_mask_load_optab_supported_p direct_optab_supported_p
2720 #define direct_load_lanes_optab_supported_p multi_vector_optab_supported_p
2721 #define direct_mask_store_optab_supported_p direct_optab_supported_p
2722 #define direct_store_lanes_optab_supported_p multi_vector_optab_supported_p
2724 /* Return true if FN is supported for the types in TYPES when the
2725 optimization type is OPT_TYPE. The types are those associated with
2726 the "type0" and "type1" fields of FN's direct_internal_fn_info
2727 structure. */
2729 bool
2730 direct_internal_fn_supported_p (internal_fn fn, tree_pair types,
2731 optimization_type opt_type)
2733 switch (fn)
2735 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
2736 case IFN_##CODE: break;
2737 #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
2738 case IFN_##CODE: \
2739 return direct_##TYPE##_optab_supported_p (OPTAB##_optab, types, \
2740 opt_type);
2741 #include "internal-fn.def"
2743 case IFN_LAST:
2744 break;
2746 gcc_unreachable ();
2749 /* Return true if FN is supported for type TYPE when the optimization
2750 type is OPT_TYPE. The caller knows that the "type0" and "type1"
2751 fields of FN's direct_internal_fn_info structure are the same. */
2753 bool
2754 direct_internal_fn_supported_p (internal_fn fn, tree type,
2755 optimization_type opt_type)
2757 const direct_internal_fn_info &info = direct_internal_fn (fn);
2758 gcc_checking_assert (info.type0 == info.type1);
2759 return direct_internal_fn_supported_p (fn, tree_pair (type, type), opt_type);
2762 /* Return true if IFN_SET_EDOM is supported. */
2764 bool
2765 set_edom_supported_p (void)
2767 #ifdef TARGET_EDOM
2768 return true;
2769 #else
2770 return false;
2771 #endif
2774 #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
2775 static void \
2776 expand_##CODE (internal_fn fn, gcall *stmt) \
2778 expand_##TYPE##_optab_fn (fn, stmt, OPTAB##_optab); \
2780 #include "internal-fn.def"
2782 /* Routines to expand each internal function, indexed by function number.
2783 Each routine has the prototype:
2785 expand_<NAME> (gcall *stmt)
2787 where STMT is the statement that performs the call. */
2788 static void (*const internal_fn_expanders[]) (internal_fn, gcall *) = {
2789 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
2790 #include "internal-fn.def"
2794 /* Expand STMT as though it were a call to internal function FN. */
2796 void
2797 expand_internal_call (internal_fn fn, gcall *stmt)
2799 internal_fn_expanders[fn] (fn, stmt);
2802 /* Expand STMT, which is a call to internal function FN. */
2804 void
2805 expand_internal_call (gcall *stmt)
2807 expand_internal_call (gimple_call_internal_fn (stmt), stmt);
2810 void
2811 expand_PHI (internal_fn, gcall *)
2813 gcc_unreachable ();