* g++.dg/template/using30.C: Move ...
[official-gcc.git] / gcc / internal-fn.c
blob0bd24396c2e17308c2ff6c83bf921c6968d45323
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "insn-codes.h"
28 #include "optabs.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "machmode.h"
34 #include "tm.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "ubsan.h"
47 #include "target.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "diagnostic-core.h"
52 /* The names of each internal function, indexed by function number. */
53 const char *const internal_fn_name_array[] = {
54 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
55 #include "internal-fn.def"
56 #undef DEF_INTERNAL_FN
57 "<invalid-fn>"
60 /* The ECF_* flags of each internal function, indexed by function number. */
61 const int internal_fn_flags_array[] = {
62 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
63 #include "internal-fn.def"
64 #undef DEF_INTERNAL_FN
68 /* Fnspec of each internal function, indexed by function number. */
69 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
71 void
72 init_internal_fns ()
74 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
75 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
76 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
77 #include "internal-fn.def"
78 #undef DEF_INTERNAL_FN
79 internal_fn_fnspec_array[IFN_LAST] = 0;
82 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
83 for load-lanes-style optab OPTAB. The insn must exist. */
85 static enum insn_code
86 get_multi_vector_move (tree array_type, convert_optab optab)
88 enum insn_code icode;
89 machine_mode imode;
90 machine_mode vmode;
92 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
93 imode = TYPE_MODE (array_type);
94 vmode = TYPE_MODE (TREE_TYPE (array_type));
96 icode = convert_optab_handler (optab, imode, vmode);
97 gcc_assert (icode != CODE_FOR_nothing);
98 return icode;
101 /* Expand LOAD_LANES call STMT. */
103 static void
104 expand_LOAD_LANES (gcall *stmt)
106 struct expand_operand ops[2];
107 tree type, lhs, rhs;
108 rtx target, mem;
110 lhs = gimple_call_lhs (stmt);
111 rhs = gimple_call_arg (stmt, 0);
112 type = TREE_TYPE (lhs);
114 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
115 mem = expand_normal (rhs);
117 gcc_assert (MEM_P (mem));
118 PUT_MODE (mem, TYPE_MODE (type));
120 create_output_operand (&ops[0], target, TYPE_MODE (type));
121 create_fixed_operand (&ops[1], mem);
122 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
125 /* Expand STORE_LANES call STMT. */
127 static void
128 expand_STORE_LANES (gcall *stmt)
130 struct expand_operand ops[2];
131 tree type, lhs, rhs;
132 rtx target, reg;
134 lhs = gimple_call_lhs (stmt);
135 rhs = gimple_call_arg (stmt, 0);
136 type = TREE_TYPE (rhs);
138 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
139 reg = expand_normal (rhs);
141 gcc_assert (MEM_P (target));
142 PUT_MODE (target, TYPE_MODE (type));
144 create_fixed_operand (&ops[0], target);
145 create_input_operand (&ops[1], reg, TYPE_MODE (type));
146 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
149 static void
150 expand_ANNOTATE (gcall *stmt ATTRIBUTE_UNUSED)
152 gcc_unreachable ();
155 /* This should get expanded in adjust_simduid_builtins. */
157 static void
158 expand_GOMP_SIMD_LANE (gcall *stmt ATTRIBUTE_UNUSED)
160 gcc_unreachable ();
163 /* This should get expanded in adjust_simduid_builtins. */
165 static void
166 expand_GOMP_SIMD_VF (gcall *stmt ATTRIBUTE_UNUSED)
168 gcc_unreachable ();
171 /* This should get expanded in adjust_simduid_builtins. */
173 static void
174 expand_GOMP_SIMD_LAST_LANE (gcall *stmt ATTRIBUTE_UNUSED)
176 gcc_unreachable ();
179 /* This should get expanded in the sanopt pass. */
181 static void
182 expand_UBSAN_NULL (gcall *stmt ATTRIBUTE_UNUSED)
184 gcc_unreachable ();
187 /* This should get expanded in the sanopt pass. */
189 static void
190 expand_UBSAN_BOUNDS (gcall *stmt ATTRIBUTE_UNUSED)
192 gcc_unreachable ();
195 /* This should get expanded in the sanopt pass. */
197 static void
198 expand_UBSAN_OBJECT_SIZE (gcall *stmt ATTRIBUTE_UNUSED)
200 gcc_unreachable ();
203 /* This should get expanded in the sanopt pass. */
205 static void
206 expand_ASAN_CHECK (gcall *stmt ATTRIBUTE_UNUSED)
208 gcc_unreachable ();
211 /* Helper function for expand_addsub_overflow. Return 1
212 if ARG interpreted as signed in its precision is known to be always
213 positive or 2 if ARG is known to be always negative, or 3 if ARG may
214 be positive or negative. */
216 static int
217 get_range_pos_neg (tree arg)
219 if (arg == error_mark_node)
220 return 3;
222 int prec = TYPE_PRECISION (TREE_TYPE (arg));
223 int cnt = 0;
224 if (TREE_CODE (arg) == INTEGER_CST)
226 wide_int w = wi::sext (arg, prec);
227 if (wi::neg_p (w))
228 return 2;
229 else
230 return 1;
232 while (CONVERT_EXPR_P (arg)
233 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
234 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
236 arg = TREE_OPERAND (arg, 0);
237 /* Narrower value zero extended into wider type
238 will always result in positive values. */
239 if (TYPE_UNSIGNED (TREE_TYPE (arg))
240 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
241 return 1;
242 prec = TYPE_PRECISION (TREE_TYPE (arg));
243 if (++cnt > 30)
244 return 3;
247 if (TREE_CODE (arg) != SSA_NAME)
248 return 3;
249 wide_int arg_min, arg_max;
250 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
252 gimple g = SSA_NAME_DEF_STMT (arg);
253 if (is_gimple_assign (g)
254 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
256 tree t = gimple_assign_rhs1 (g);
257 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
258 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
260 if (TYPE_UNSIGNED (TREE_TYPE (t))
261 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
262 return 1;
263 prec = TYPE_PRECISION (TREE_TYPE (t));
264 arg = t;
265 if (++cnt > 30)
266 return 3;
267 continue;
270 return 3;
272 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
274 /* For unsigned values, the "positive" range comes
275 below the "negative" range. */
276 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
277 return 1;
278 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
279 return 2;
281 else
283 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
284 return 1;
285 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
286 return 2;
288 return 3;
291 /* Return minimum precision needed to represent all values
292 of ARG in SIGNed integral type. */
294 static int
295 get_min_precision (tree arg, signop sign)
297 int prec = TYPE_PRECISION (TREE_TYPE (arg));
298 int cnt = 0;
299 signop orig_sign = sign;
300 if (TREE_CODE (arg) == INTEGER_CST)
302 int p;
303 if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
305 widest_int w = wi::to_widest (arg);
306 w = wi::ext (w, prec, sign);
307 p = wi::min_precision (w, sign);
309 else
310 p = wi::min_precision (arg, sign);
311 return MIN (p, prec);
313 while (CONVERT_EXPR_P (arg)
314 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
315 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
317 arg = TREE_OPERAND (arg, 0);
318 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
320 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
321 sign = UNSIGNED;
322 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
323 return prec + (orig_sign != sign);
324 prec = TYPE_PRECISION (TREE_TYPE (arg));
326 if (++cnt > 30)
327 return prec + (orig_sign != sign);
329 if (TREE_CODE (arg) != SSA_NAME)
330 return prec + (orig_sign != sign);
331 wide_int arg_min, arg_max;
332 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
334 gimple g = SSA_NAME_DEF_STMT (arg);
335 if (is_gimple_assign (g)
336 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
338 tree t = gimple_assign_rhs1 (g);
339 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
340 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
342 arg = t;
343 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
345 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
346 sign = UNSIGNED;
347 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
348 return prec + (orig_sign != sign);
349 prec = TYPE_PRECISION (TREE_TYPE (arg));
351 if (++cnt > 30)
352 return prec + (orig_sign != sign);
353 continue;
356 return prec + (orig_sign != sign);
358 if (sign == TYPE_SIGN (TREE_TYPE (arg)))
360 int p1 = wi::min_precision (arg_min, sign);
361 int p2 = wi::min_precision (arg_max, sign);
362 p1 = MAX (p1, p2);
363 prec = MIN (prec, p1);
365 else if (sign == UNSIGNED && !wi::neg_p (arg_min, SIGNED))
367 int p = wi::min_precision (arg_max, SIGNED);
368 prec = MIN (prec, p);
370 return prec + (orig_sign != sign);
373 /* Helper for expand_*_overflow. Store RES into the __real__ part
374 of TARGET. If RES has larger MODE than __real__ part of TARGET,
375 set the __imag__ part to 1 if RES doesn't fit into it. */
377 static void
378 expand_arith_overflow_result_store (tree lhs, rtx target,
379 machine_mode mode, rtx res)
381 machine_mode tgtmode = GET_MODE_INNER (GET_MODE (target));
382 rtx lres = res;
383 if (tgtmode != mode)
385 rtx_code_label *done_label = gen_label_rtx ();
386 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
387 lres = convert_modes (tgtmode, mode, res, uns);
388 gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
389 do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
390 EQ, true, mode, NULL_RTX, NULL_RTX, done_label,
391 PROB_VERY_LIKELY);
392 write_complex_part (target, const1_rtx, true);
393 emit_label (done_label);
395 write_complex_part (target, lres, false);
398 /* Helper for expand_*_overflow. Store RES into TARGET. */
400 static void
401 expand_ubsan_result_store (rtx target, rtx res)
403 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
404 /* If this is a scalar in a register that is stored in a wider mode
405 than the declared mode, compute the result into its declared mode
406 and then convert to the wider mode. Our value is the computed
407 expression. */
408 convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
409 else
410 emit_move_insn (target, res);
413 /* Add sub/add overflow checking to the statement STMT.
414 CODE says whether the operation is +, or -. */
416 static void
417 expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
418 tree arg0, tree arg1, bool unsr_p, bool uns0_p,
419 bool uns1_p, bool is_ubsan)
421 rtx res, target = NULL_RTX;
422 tree fn;
423 rtx_code_label *done_label = gen_label_rtx ();
424 rtx_code_label *do_error = gen_label_rtx ();
425 do_pending_stack_adjust ();
426 rtx op0 = expand_normal (arg0);
427 rtx op1 = expand_normal (arg1);
428 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
429 int prec = GET_MODE_PRECISION (mode);
430 rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
431 bool do_xor = false;
433 if (is_ubsan)
434 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
436 if (lhs)
438 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
439 if (!is_ubsan)
440 write_complex_part (target, const0_rtx, true);
443 /* We assume both operands and result have the same precision
444 here (GET_MODE_BITSIZE (mode)), S stands for signed type
445 with that precision, U for unsigned type with that precision,
446 sgn for unsigned most significant bit in that precision.
447 s1 is signed first operand, u1 is unsigned first operand,
448 s2 is signed second operand, u2 is unsigned second operand,
449 sr is signed result, ur is unsigned result and the following
450 rules say how to compute result (which is always result of
451 the operands as if both were unsigned, cast to the right
452 signedness) and how to compute whether operation overflowed.
454 s1 + s2 -> sr
455 res = (S) ((U) s1 + (U) s2)
456 ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
457 s1 - s2 -> sr
458 res = (S) ((U) s1 - (U) s2)
459 ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
460 u1 + u2 -> ur
461 res = u1 + u2
462 ovf = res < u1 (or jump on carry, but RTL opts will handle it)
463 u1 - u2 -> ur
464 res = u1 - u2
465 ovf = res > u1 (or jump on carry, but RTL opts will handle it)
466 s1 + u2 -> sr
467 res = (S) ((U) s1 + u2)
468 ovf = ((U) res ^ sgn) < u2
469 s1 + u2 -> ur
470 t1 = (S) (u2 ^ sgn)
471 t2 = s1 + t1
472 res = (U) t2 ^ sgn
473 ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
474 s1 - u2 -> sr
475 res = (S) ((U) s1 - u2)
476 ovf = u2 > ((U) s1 ^ sgn)
477 s1 - u2 -> ur
478 res = (U) s1 - u2
479 ovf = s1 < 0 || u2 > (U) s1
480 u1 - s2 -> sr
481 res = u1 - (U) s2
482 ovf = u1 >= ((U) s2 ^ sgn)
483 u1 - s2 -> ur
484 t1 = u1 ^ sgn
485 t2 = t1 - (U) s2
486 res = t2 ^ sgn
487 ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
488 s1 + s2 -> ur
489 res = (U) s1 + (U) s2
490 ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
491 u1 + u2 -> sr
492 res = (S) (u1 + u2)
493 ovf = (U) res < u2 || res < 0
494 u1 - u2 -> sr
495 res = (S) (u1 - u2)
496 ovf = u1 >= u2 ? res < 0 : res >= 0
497 s1 - s2 -> ur
498 res = (U) s1 - (U) s2
499 ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0) */
501 if (code == PLUS_EXPR && uns0_p && !uns1_p)
503 /* PLUS_EXPR is commutative, if operand signedness differs,
504 canonicalize to the first operand being signed and second
505 unsigned to simplify following code. */
506 rtx tem = op1;
507 op1 = op0;
508 op0 = tem;
509 tree t = arg1;
510 arg1 = arg0;
511 arg0 = t;
512 uns0_p = 0;
513 uns1_p = 1;
516 /* u1 +- u2 -> ur */
517 if (uns0_p && uns1_p && unsr_p)
519 /* Compute the operation. On RTL level, the addition is always
520 unsigned. */
521 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
522 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
523 rtx tem = op0;
524 /* For PLUS_EXPR, the operation is commutative, so we can pick
525 operand to compare against. For prec <= BITS_PER_WORD, I think
526 preferring REG operand is better over CONST_INT, because
527 the CONST_INT might enlarge the instruction or CSE would need
528 to figure out we'd already loaded it into a register before.
529 For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
530 as then the multi-word comparison can be perhaps simplified. */
531 if (code == PLUS_EXPR
532 && (prec <= BITS_PER_WORD
533 ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
534 : CONST_SCALAR_INT_P (op1)))
535 tem = op1;
536 do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
537 true, mode, NULL_RTX, NULL_RTX, done_label,
538 PROB_VERY_LIKELY);
539 goto do_error_label;
542 /* s1 +- u2 -> sr */
543 if (!uns0_p && uns1_p && !unsr_p)
545 /* Compute the operation. On RTL level, the addition is always
546 unsigned. */
547 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
548 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
549 rtx tem = expand_binop (mode, add_optab,
550 code == PLUS_EXPR ? res : op0, sgn,
551 NULL_RTX, false, OPTAB_LIB_WIDEN);
552 do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
553 done_label, PROB_VERY_LIKELY);
554 goto do_error_label;
557 /* s1 + u2 -> ur */
558 if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
560 op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
561 OPTAB_LIB_WIDEN);
562 /* As we've changed op1, we have to avoid using the value range
563 for the original argument. */
564 arg1 = error_mark_node;
565 do_xor = true;
566 goto do_signed;
569 /* u1 - s2 -> ur */
570 if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
572 op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
573 OPTAB_LIB_WIDEN);
574 /* As we've changed op0, we have to avoid using the value range
575 for the original argument. */
576 arg0 = error_mark_node;
577 do_xor = true;
578 goto do_signed;
581 /* s1 - u2 -> ur */
582 if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
584 /* Compute the operation. On RTL level, the addition is always
585 unsigned. */
586 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
587 OPTAB_LIB_WIDEN);
588 int pos_neg = get_range_pos_neg (arg0);
589 if (pos_neg == 2)
590 /* If ARG0 is known to be always negative, this is always overflow. */
591 emit_jump (do_error);
592 else if (pos_neg == 3)
593 /* If ARG0 is not known to be always positive, check at runtime. */
594 do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
595 NULL_RTX, do_error, PROB_VERY_UNLIKELY);
596 do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL_RTX,
597 done_label, PROB_VERY_LIKELY);
598 goto do_error_label;
601 /* u1 - s2 -> sr */
602 if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
604 /* Compute the operation. On RTL level, the addition is always
605 unsigned. */
606 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
607 OPTAB_LIB_WIDEN);
608 rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
609 OPTAB_LIB_WIDEN);
610 do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL_RTX,
611 done_label, PROB_VERY_LIKELY);
612 goto do_error_label;
615 /* u1 + u2 -> sr */
616 if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
618 /* Compute the operation. On RTL level, the addition is always
619 unsigned. */
620 res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
621 OPTAB_LIB_WIDEN);
622 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
623 NULL_RTX, do_error, PROB_VERY_UNLIKELY);
624 rtx tem = op1;
625 /* The operation is commutative, so we can pick operand to compare
626 against. For prec <= BITS_PER_WORD, I think preferring REG operand
627 is better over CONST_INT, because the CONST_INT might enlarge the
628 instruction or CSE would need to figure out we'd already loaded it
629 into a register before. For prec > BITS_PER_WORD, I think CONST_INT
630 might be more beneficial, as then the multi-word comparison can be
631 perhaps simplified. */
632 if (prec <= BITS_PER_WORD
633 ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
634 : CONST_SCALAR_INT_P (op0))
635 tem = op0;
636 do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL_RTX,
637 done_label, PROB_VERY_LIKELY);
638 goto do_error_label;
641 /* s1 +- s2 -> ur */
642 if (!uns0_p && !uns1_p && unsr_p)
644 /* Compute the operation. On RTL level, the addition is always
645 unsigned. */
646 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
647 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
648 int pos_neg = get_range_pos_neg (arg1);
649 if (code == PLUS_EXPR)
651 int pos_neg0 = get_range_pos_neg (arg0);
652 if (pos_neg0 != 3 && pos_neg == 3)
654 rtx tem = op1;
655 op1 = op0;
656 op0 = tem;
657 pos_neg = pos_neg0;
660 rtx tem;
661 if (pos_neg != 3)
663 tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
664 ? and_optab : ior_optab,
665 op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
666 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
667 NULL_RTX, done_label, PROB_VERY_LIKELY);
669 else
671 rtx_code_label *do_ior_label = gen_label_rtx ();
672 do_compare_rtx_and_jump (op1, const0_rtx,
673 code == MINUS_EXPR ? GE : LT, false, mode,
674 NULL_RTX, NULL_RTX, do_ior_label,
675 PROB_EVEN);
676 tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
677 OPTAB_LIB_WIDEN);
678 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
679 NULL_RTX, done_label, PROB_VERY_LIKELY);
680 emit_jump (do_error);
681 emit_label (do_ior_label);
682 tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
683 OPTAB_LIB_WIDEN);
684 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
685 NULL_RTX, done_label, PROB_VERY_LIKELY);
687 goto do_error_label;
690 /* u1 - u2 -> sr */
691 if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
693 /* Compute the operation. On RTL level, the addition is always
694 unsigned. */
695 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
696 OPTAB_LIB_WIDEN);
697 rtx_code_label *op0_geu_op1 = gen_label_rtx ();
698 do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL_RTX,
699 op0_geu_op1, PROB_EVEN);
700 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
701 NULL_RTX, done_label, PROB_VERY_LIKELY);
702 emit_jump (do_error);
703 emit_label (op0_geu_op1);
704 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
705 NULL_RTX, done_label, PROB_VERY_LIKELY);
706 goto do_error_label;
709 gcc_assert (!uns0_p && !uns1_p && !unsr_p);
711 /* s1 +- s2 -> sr */
712 do_signed: ;
713 enum insn_code icode;
714 icode = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
715 if (icode != CODE_FOR_nothing)
717 struct expand_operand ops[4];
718 rtx_insn *last = get_last_insn ();
720 res = gen_reg_rtx (mode);
721 create_output_operand (&ops[0], res, mode);
722 create_input_operand (&ops[1], op0, mode);
723 create_input_operand (&ops[2], op1, mode);
724 create_fixed_operand (&ops[3], do_error);
725 if (maybe_expand_insn (icode, 4, ops))
727 last = get_last_insn ();
728 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
729 && JUMP_P (last)
730 && any_condjump_p (last)
731 && !find_reg_note (last, REG_BR_PROB, 0))
732 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
733 emit_jump (done_label);
735 else
737 delete_insns_since (last);
738 icode = CODE_FOR_nothing;
742 if (icode == CODE_FOR_nothing)
744 rtx_code_label *sub_check = gen_label_rtx ();
745 int pos_neg = 3;
747 /* Compute the operation. On RTL level, the addition is always
748 unsigned. */
749 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
750 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
752 /* If we can prove one of the arguments (for MINUS_EXPR only
753 the second operand, as subtraction is not commutative) is always
754 non-negative or always negative, we can do just one comparison
755 and conditional jump instead of 2 at runtime, 3 present in the
756 emitted code. If one of the arguments is CONST_INT, all we
757 need is to make sure it is op1, then the first
758 do_compare_rtx_and_jump will be just folded. Otherwise try
759 to use range info if available. */
760 if (code == PLUS_EXPR && CONST_INT_P (op0))
762 rtx tem = op0;
763 op0 = op1;
764 op1 = tem;
766 else if (CONST_INT_P (op1))
768 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
770 pos_neg = get_range_pos_neg (arg0);
771 if (pos_neg != 3)
773 rtx tem = op0;
774 op0 = op1;
775 op1 = tem;
778 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
779 pos_neg = get_range_pos_neg (arg1);
781 /* If the op1 is negative, we have to use a different check. */
782 if (pos_neg == 3)
783 do_compare_rtx_and_jump (op1, const0_rtx, LT, false, mode, NULL_RTX,
784 NULL_RTX, sub_check, PROB_EVEN);
786 /* Compare the result of the operation with one of the operands. */
787 if (pos_neg & 1)
788 do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? GE : LE,
789 false, mode, NULL_RTX, NULL_RTX, done_label,
790 PROB_VERY_LIKELY);
792 /* If we get here, we have to print the error. */
793 if (pos_neg == 3)
795 emit_jump (do_error);
797 emit_label (sub_check);
800 /* We have k = a + b for b < 0 here. k <= a must hold. */
801 if (pos_neg & 2)
802 do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? LE : GE,
803 false, mode, NULL_RTX, NULL_RTX, done_label,
804 PROB_VERY_LIKELY);
807 do_error_label:
808 emit_label (do_error);
809 if (is_ubsan)
811 /* Expand the ubsan builtin call. */
812 push_temp_slots ();
813 fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
814 arg0, arg1);
815 expand_normal (fn);
816 pop_temp_slots ();
817 do_pending_stack_adjust ();
819 else if (lhs)
820 write_complex_part (target, const1_rtx, true);
822 /* We're done. */
823 emit_label (done_label);
825 if (lhs)
827 if (is_ubsan)
828 expand_ubsan_result_store (target, res);
829 else
831 if (do_xor)
832 res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
833 OPTAB_LIB_WIDEN);
835 expand_arith_overflow_result_store (lhs, target, mode, res);
840 /* Add negate overflow checking to the statement STMT. */
842 static void
843 expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
845 rtx res, op1;
846 tree fn;
847 rtx_code_label *done_label, *do_error;
848 rtx target = NULL_RTX;
850 done_label = gen_label_rtx ();
851 do_error = gen_label_rtx ();
853 do_pending_stack_adjust ();
854 op1 = expand_normal (arg1);
856 machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
857 if (lhs)
859 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
860 if (!is_ubsan)
861 write_complex_part (target, const0_rtx, true);
864 enum insn_code icode = optab_handler (negv3_optab, mode);
865 if (icode != CODE_FOR_nothing)
867 struct expand_operand ops[3];
868 rtx_insn *last = get_last_insn ();
870 res = gen_reg_rtx (mode);
871 create_output_operand (&ops[0], res, mode);
872 create_input_operand (&ops[1], op1, mode);
873 create_fixed_operand (&ops[2], do_error);
874 if (maybe_expand_insn (icode, 3, ops))
876 last = get_last_insn ();
877 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
878 && JUMP_P (last)
879 && any_condjump_p (last)
880 && !find_reg_note (last, REG_BR_PROB, 0))
881 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
882 emit_jump (done_label);
884 else
886 delete_insns_since (last);
887 icode = CODE_FOR_nothing;
891 if (icode == CODE_FOR_nothing)
893 /* Compute the operation. On RTL level, the addition is always
894 unsigned. */
895 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
897 /* Compare the operand with the most negative value. */
898 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
899 do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL_RTX,
900 done_label, PROB_VERY_LIKELY);
903 emit_label (do_error);
904 if (is_ubsan)
906 /* Expand the ubsan builtin call. */
907 push_temp_slots ();
908 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
909 arg1, NULL_TREE);
910 expand_normal (fn);
911 pop_temp_slots ();
912 do_pending_stack_adjust ();
914 else if (lhs)
915 write_complex_part (target, const1_rtx, true);
917 /* We're done. */
918 emit_label (done_label);
920 if (lhs)
922 if (is_ubsan)
923 expand_ubsan_result_store (target, res);
924 else
925 expand_arith_overflow_result_store (lhs, target, mode, res);
929 /* Add mul overflow checking to the statement STMT. */
931 static void
932 expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
933 bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan)
935 rtx res, op0, op1;
936 tree fn, type;
937 rtx_code_label *done_label, *do_error;
938 rtx target = NULL_RTX;
939 signop sign;
940 enum insn_code icode;
942 done_label = gen_label_rtx ();
943 do_error = gen_label_rtx ();
945 do_pending_stack_adjust ();
946 op0 = expand_normal (arg0);
947 op1 = expand_normal (arg1);
949 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
950 bool uns = unsr_p;
951 if (lhs)
953 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
954 if (!is_ubsan)
955 write_complex_part (target, const0_rtx, true);
958 if (is_ubsan)
959 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
961 /* We assume both operands and result have the same precision
962 here (GET_MODE_BITSIZE (mode)), S stands for signed type
963 with that precision, U for unsigned type with that precision,
964 sgn for unsigned most significant bit in that precision.
965 s1 is signed first operand, u1 is unsigned first operand,
966 s2 is signed second operand, u2 is unsigned second operand,
967 sr is signed result, ur is unsigned result and the following
968 rules say how to compute result (which is always result of
969 the operands as if both were unsigned, cast to the right
970 signedness) and how to compute whether operation overflowed.
971 main_ovf (false) stands for jump on signed multiplication
972 overflow or the main algorithm with uns == false.
973 main_ovf (true) stands for jump on unsigned multiplication
974 overflow or the main algorithm with uns == true.
976 s1 * s2 -> sr
977 res = (S) ((U) s1 * (U) s2)
978 ovf = main_ovf (false)
979 u1 * u2 -> ur
980 res = u1 * u2
981 ovf = main_ovf (true)
982 s1 * u2 -> ur
983 res = (U) s1 * u2
984 ovf = (s1 < 0 && u2) || main_ovf (true)
985 u1 * u2 -> sr
986 res = (S) (u1 * u2)
987 ovf = res < 0 || main_ovf (true)
988 s1 * u2 -> sr
989 res = (S) ((U) s1 * u2)
990 ovf = (S) u2 >= 0 ? main_ovf (false)
991 : (s1 != 0 && (s1 != -1 || u2 != (U) res))
992 s1 * s2 -> ur
993 t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
994 t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
995 res = t1 * t2
996 ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true) */
998 if (uns0_p && !uns1_p)
1000 /* Multiplication is commutative, if operand signedness differs,
1001 canonicalize to the first operand being signed and second
1002 unsigned to simplify following code. */
1003 rtx tem = op1;
1004 op1 = op0;
1005 op0 = tem;
1006 tree t = arg1;
1007 arg1 = arg0;
1008 arg0 = t;
1009 uns0_p = 0;
1010 uns1_p = 1;
1013 int pos_neg0 = get_range_pos_neg (arg0);
1014 int pos_neg1 = get_range_pos_neg (arg1);
1016 /* s1 * u2 -> ur */
1017 if (!uns0_p && uns1_p && unsr_p)
1019 switch (pos_neg0)
1021 case 1:
1022 /* If s1 is non-negative, just perform normal u1 * u2 -> ur. */
1023 goto do_main;
1024 case 2:
1025 /* If s1 is negative, avoid the main code, just multiply and
1026 signal overflow if op1 is not 0. */
1027 struct separate_ops ops;
1028 ops.code = MULT_EXPR;
1029 ops.type = TREE_TYPE (arg1);
1030 ops.op0 = make_tree (ops.type, op0);
1031 ops.op1 = make_tree (ops.type, op1);
1032 ops.op2 = NULL_TREE;
1033 ops.location = loc;
1034 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1035 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1036 NULL_RTX, done_label, PROB_VERY_LIKELY);
1037 goto do_error_label;
1038 case 3:
1039 rtx_code_label *do_main_label;
1040 do_main_label = gen_label_rtx ();
1041 do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
1042 NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1043 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1044 NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1045 write_complex_part (target, const1_rtx, true);
1046 emit_label (do_main_label);
1047 goto do_main;
1048 default:
1049 gcc_unreachable ();
1053 /* u1 * u2 -> sr */
1054 if (uns0_p && uns1_p && !unsr_p)
1056 uns = true;
1057 /* Rest of handling of this case after res is computed. */
1058 goto do_main;
1061 /* s1 * u2 -> sr */
1062 if (!uns0_p && uns1_p && !unsr_p)
1064 switch (pos_neg1)
1066 case 1:
1067 goto do_main;
1068 case 2:
1069 /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
1070 avoid the main code, just multiply and signal overflow
1071 unless 0 * u2 or -1 * ((U) Smin). */
1072 struct separate_ops ops;
1073 ops.code = MULT_EXPR;
1074 ops.type = TREE_TYPE (arg1);
1075 ops.op0 = make_tree (ops.type, op0);
1076 ops.op1 = make_tree (ops.type, op1);
1077 ops.op2 = NULL_TREE;
1078 ops.location = loc;
1079 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1080 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1081 NULL_RTX, done_label, PROB_VERY_LIKELY);
1082 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1083 NULL_RTX, do_error, PROB_VERY_UNLIKELY);
1084 int prec;
1085 prec = GET_MODE_PRECISION (mode);
1086 rtx sgn;
1087 sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
1088 do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
1089 NULL_RTX, done_label, PROB_VERY_LIKELY);
1090 goto do_error_label;
1091 case 3:
1092 /* Rest of handling of this case after res is computed. */
1093 goto do_main;
1094 default:
1095 gcc_unreachable ();
1099 /* s1 * s2 -> ur */
1100 if (!uns0_p && !uns1_p && unsr_p)
1102 rtx tem, tem2;
1103 switch (pos_neg0 | pos_neg1)
1105 case 1: /* Both operands known to be non-negative. */
1106 goto do_main;
1107 case 2: /* Both operands known to be negative. */
1108 op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
1109 op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1110 /* Avoid looking at arg0/arg1 ranges, as we've changed
1111 the arguments. */
1112 arg0 = error_mark_node;
1113 arg1 = error_mark_node;
1114 goto do_main;
1115 case 3:
1116 if ((pos_neg0 ^ pos_neg1) == 3)
1118 /* If one operand is known to be negative and the other
1119 non-negative, this overflows always, unless the non-negative
1120 one is 0. Just do normal multiply and set overflow
1121 unless one of the operands is 0. */
1122 struct separate_ops ops;
1123 ops.code = MULT_EXPR;
1124 ops.type
1125 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
1127 ops.op0 = make_tree (ops.type, op0);
1128 ops.op1 = make_tree (ops.type, op1);
1129 ops.op2 = NULL_TREE;
1130 ops.location = loc;
1131 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1132 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1133 OPTAB_LIB_WIDEN);
1134 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode,
1135 NULL_RTX, NULL_RTX, done_label,
1136 PROB_VERY_LIKELY);
1137 goto do_error_label;
1139 /* The general case, do all the needed comparisons at runtime. */
1140 rtx_code_label *do_main_label, *after_negate_label;
1141 rtx rop0, rop1;
1142 rop0 = gen_reg_rtx (mode);
1143 rop1 = gen_reg_rtx (mode);
1144 emit_move_insn (rop0, op0);
1145 emit_move_insn (rop1, op1);
1146 op0 = rop0;
1147 op1 = rop1;
1148 do_main_label = gen_label_rtx ();
1149 after_negate_label = gen_label_rtx ();
1150 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1151 OPTAB_LIB_WIDEN);
1152 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1153 NULL_RTX, after_negate_label,
1154 PROB_VERY_LIKELY);
1155 /* Both arguments negative here, negate them and continue with
1156 normal unsigned overflow checking multiplication. */
1157 emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
1158 NULL_RTX, false));
1159 emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
1160 NULL_RTX, false));
1161 /* Avoid looking at arg0/arg1 ranges, as we might have changed
1162 the arguments. */
1163 arg0 = error_mark_node;
1164 arg1 = error_mark_node;
1165 emit_jump (do_main_label);
1166 emit_label (after_negate_label);
1167 tem2 = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1168 OPTAB_LIB_WIDEN);
1169 do_compare_rtx_and_jump (tem2, const0_rtx, GE, false, mode, NULL_RTX,
1170 NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1171 /* One argument is negative here, the other positive. This
1172 overflows always, unless one of the arguments is 0. But
1173 if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
1174 is, thus we can keep do_main code oring in overflow as is. */
1175 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode, NULL_RTX,
1176 NULL_RTX, do_main_label, PROB_VERY_LIKELY);
1177 write_complex_part (target, const1_rtx, true);
1178 emit_label (do_main_label);
1179 goto do_main;
1180 default:
1181 gcc_unreachable ();
1185 do_main:
1186 type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
1187 sign = uns ? UNSIGNED : SIGNED;
1188 icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
1189 if (icode != CODE_FOR_nothing)
1191 struct expand_operand ops[4];
1192 rtx_insn *last = get_last_insn ();
1194 res = gen_reg_rtx (mode);
1195 create_output_operand (&ops[0], res, mode);
1196 create_input_operand (&ops[1], op0, mode);
1197 create_input_operand (&ops[2], op1, mode);
1198 create_fixed_operand (&ops[3], do_error);
1199 if (maybe_expand_insn (icode, 4, ops))
1201 last = get_last_insn ();
1202 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1203 && JUMP_P (last)
1204 && any_condjump_p (last)
1205 && !find_reg_note (last, REG_BR_PROB, 0))
1206 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
1207 emit_jump (done_label);
1209 else
1211 delete_insns_since (last);
1212 icode = CODE_FOR_nothing;
1216 if (icode == CODE_FOR_nothing)
1218 struct separate_ops ops;
1219 int prec = GET_MODE_PRECISION (mode);
1220 machine_mode hmode = mode_for_size (prec / 2, MODE_INT, 1);
1221 ops.op0 = make_tree (type, op0);
1222 ops.op1 = make_tree (type, op1);
1223 ops.op2 = NULL_TREE;
1224 ops.location = loc;
1225 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
1226 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
1228 machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
1229 ops.code = WIDEN_MULT_EXPR;
1230 ops.type
1231 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
1233 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
1234 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
1235 NULL_RTX, uns);
1236 hipart = gen_lowpart (mode, hipart);
1237 res = gen_lowpart (mode, res);
1238 if (uns)
1239 /* For the unsigned multiplication, there was overflow if
1240 HIPART is non-zero. */
1241 do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1242 NULL_RTX, NULL_RTX, done_label,
1243 PROB_VERY_LIKELY);
1244 else
1246 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1247 NULL_RTX, 0);
1248 /* RES is low half of the double width result, HIPART
1249 the high half. There was overflow if
1250 HIPART is different from RES < 0 ? -1 : 0. */
1251 do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1252 NULL_RTX, NULL_RTX, done_label,
1253 PROB_VERY_LIKELY);
1256 else if (hmode != BLKmode && 2 * GET_MODE_PRECISION (hmode) == prec)
1258 rtx_code_label *large_op0 = gen_label_rtx ();
1259 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
1260 rtx_code_label *one_small_one_large = gen_label_rtx ();
1261 rtx_code_label *both_ops_large = gen_label_rtx ();
1262 rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
1263 rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
1264 rtx_code_label *do_overflow = gen_label_rtx ();
1265 rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
1267 unsigned int hprec = GET_MODE_PRECISION (hmode);
1268 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
1269 NULL_RTX, uns);
1270 hipart0 = gen_lowpart (hmode, hipart0);
1271 rtx lopart0 = gen_lowpart (hmode, op0);
1272 rtx signbit0 = const0_rtx;
1273 if (!uns)
1274 signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
1275 NULL_RTX, 0);
1276 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
1277 NULL_RTX, uns);
1278 hipart1 = gen_lowpart (hmode, hipart1);
1279 rtx lopart1 = gen_lowpart (hmode, op1);
1280 rtx signbit1 = const0_rtx;
1281 if (!uns)
1282 signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
1283 NULL_RTX, 0);
1285 res = gen_reg_rtx (mode);
1287 /* True if op0 resp. op1 are known to be in the range of
1288 halfstype. */
1289 bool op0_small_p = false;
1290 bool op1_small_p = false;
1291 /* True if op0 resp. op1 are known to have all zeros or all ones
1292 in the upper half of bits, but are not known to be
1293 op{0,1}_small_p. */
1294 bool op0_medium_p = false;
1295 bool op1_medium_p = false;
1296 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
1297 nonnegative, 1 if unknown. */
1298 int op0_sign = 1;
1299 int op1_sign = 1;
1301 if (pos_neg0 == 1)
1302 op0_sign = 0;
1303 else if (pos_neg0 == 2)
1304 op0_sign = -1;
1305 if (pos_neg1 == 1)
1306 op1_sign = 0;
1307 else if (pos_neg1 == 2)
1308 op1_sign = -1;
1310 unsigned int mprec0 = prec;
1311 if (arg0 != error_mark_node)
1312 mprec0 = get_min_precision (arg0, sign);
1313 if (mprec0 <= hprec)
1314 op0_small_p = true;
1315 else if (!uns && mprec0 <= hprec + 1)
1316 op0_medium_p = true;
1317 unsigned int mprec1 = prec;
1318 if (arg1 != error_mark_node)
1319 mprec1 = get_min_precision (arg1, sign);
1320 if (mprec1 <= hprec)
1321 op1_small_p = true;
1322 else if (!uns && mprec1 <= hprec + 1)
1323 op1_medium_p = true;
1325 int smaller_sign = 1;
1326 int larger_sign = 1;
1327 if (op0_small_p)
1329 smaller_sign = op0_sign;
1330 larger_sign = op1_sign;
1332 else if (op1_small_p)
1334 smaller_sign = op1_sign;
1335 larger_sign = op0_sign;
1337 else if (op0_sign == op1_sign)
1339 smaller_sign = op0_sign;
1340 larger_sign = op0_sign;
1343 if (!op0_small_p)
1344 do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
1345 NULL_RTX, NULL_RTX, large_op0,
1346 PROB_UNLIKELY);
1348 if (!op1_small_p)
1349 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1350 NULL_RTX, NULL_RTX, small_op0_large_op1,
1351 PROB_UNLIKELY);
1353 /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
1354 hmode to mode, the multiplication will never overflow. We can
1355 do just one hmode x hmode => mode widening multiplication. */
1356 rtx lopart0s = lopart0, lopart1s = lopart1;
1357 if (GET_CODE (lopart0) == SUBREG)
1359 lopart0s = shallow_copy_rtx (lopart0);
1360 SUBREG_PROMOTED_VAR_P (lopart0s) = 1;
1361 SUBREG_PROMOTED_SET (lopart0s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1363 if (GET_CODE (lopart1) == SUBREG)
1365 lopart1s = shallow_copy_rtx (lopart1);
1366 SUBREG_PROMOTED_VAR_P (lopart1s) = 1;
1367 SUBREG_PROMOTED_SET (lopart1s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1369 tree halfstype = build_nonstandard_integer_type (hprec, uns);
1370 ops.op0 = make_tree (halfstype, lopart0s);
1371 ops.op1 = make_tree (halfstype, lopart1s);
1372 ops.code = WIDEN_MULT_EXPR;
1373 ops.type = type;
1374 rtx thisres
1375 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1376 emit_move_insn (res, thisres);
1377 emit_jump (done_label);
1379 emit_label (small_op0_large_op1);
1381 /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
1382 but op1 is not, just swap the arguments and handle it as op1
1383 sign/zero extended, op0 not. */
1384 rtx larger = gen_reg_rtx (mode);
1385 rtx hipart = gen_reg_rtx (hmode);
1386 rtx lopart = gen_reg_rtx (hmode);
1387 emit_move_insn (larger, op1);
1388 emit_move_insn (hipart, hipart1);
1389 emit_move_insn (lopart, lopart0);
1390 emit_jump (one_small_one_large);
1392 emit_label (large_op0);
1394 if (!op1_small_p)
1395 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1396 NULL_RTX, NULL_RTX, both_ops_large,
1397 PROB_UNLIKELY);
1399 /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
1400 but op0 is not, prepare larger, hipart and lopart pseudos and
1401 handle it together with small_op0_large_op1. */
1402 emit_move_insn (larger, op0);
1403 emit_move_insn (hipart, hipart0);
1404 emit_move_insn (lopart, lopart1);
1406 emit_label (one_small_one_large);
1408 /* lopart is the low part of the operand that is sign extended
1409 to mode, larger is the the other operand, hipart is the
1410 high part of larger and lopart0 and lopart1 are the low parts
1411 of both operands.
1412 We perform lopart0 * lopart1 and lopart * hipart widening
1413 multiplications. */
1414 tree halfutype = build_nonstandard_integer_type (hprec, 1);
1415 ops.op0 = make_tree (halfutype, lopart0);
1416 ops.op1 = make_tree (halfutype, lopart1);
1417 rtx lo0xlo1
1418 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1420 ops.op0 = make_tree (halfutype, lopart);
1421 ops.op1 = make_tree (halfutype, hipart);
1422 rtx loxhi = gen_reg_rtx (mode);
1423 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1424 emit_move_insn (loxhi, tem);
1426 if (!uns)
1428 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
1429 if (larger_sign == 0)
1430 emit_jump (after_hipart_neg);
1431 else if (larger_sign != -1)
1432 do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
1433 NULL_RTX, NULL_RTX, after_hipart_neg,
1434 PROB_EVEN);
1436 tem = convert_modes (mode, hmode, lopart, 1);
1437 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
1438 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
1439 1, OPTAB_DIRECT);
1440 emit_move_insn (loxhi, tem);
1442 emit_label (after_hipart_neg);
1444 /* if (lopart < 0) loxhi -= larger; */
1445 if (smaller_sign == 0)
1446 emit_jump (after_lopart_neg);
1447 else if (smaller_sign != -1)
1448 do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
1449 NULL_RTX, NULL_RTX, after_lopart_neg,
1450 PROB_EVEN);
1452 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
1453 1, OPTAB_DIRECT);
1454 emit_move_insn (loxhi, tem);
1456 emit_label (after_lopart_neg);
1459 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
1460 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
1461 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
1462 1, OPTAB_DIRECT);
1463 emit_move_insn (loxhi, tem);
1465 /* if (loxhi >> (bitsize / 2)
1466 == (hmode) loxhi >> (bitsize / 2 - 1)) (if !uns)
1467 if (loxhi >> (bitsize / 2) == 0 (if uns). */
1468 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
1469 NULL_RTX, 0);
1470 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
1471 rtx signbitloxhi = const0_rtx;
1472 if (!uns)
1473 signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
1474 gen_lowpart (hmode, loxhi),
1475 hprec - 1, NULL_RTX, 0);
1477 do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
1478 NULL_RTX, NULL_RTX, do_overflow,
1479 PROB_VERY_UNLIKELY);
1481 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
1482 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
1483 NULL_RTX, 1);
1484 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
1486 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
1487 1, OPTAB_DIRECT);
1488 if (tem != res)
1489 emit_move_insn (res, tem);
1490 emit_jump (done_label);
1492 emit_label (both_ops_large);
1494 /* If both operands are large (not sign (!uns) or zero (uns)
1495 extended from hmode), then perform the full multiplication
1496 which will be the result of the operation.
1497 The only cases which don't overflow are for signed multiplication
1498 some cases where both hipart0 and highpart1 are 0 or -1.
1499 For unsigned multiplication when high parts are both non-zero
1500 this overflows always. */
1501 ops.code = MULT_EXPR;
1502 ops.op0 = make_tree (type, op0);
1503 ops.op1 = make_tree (type, op1);
1504 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1505 emit_move_insn (res, tem);
1507 if (!uns)
1509 if (!op0_medium_p)
1511 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
1512 NULL_RTX, 1, OPTAB_DIRECT);
1513 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1514 NULL_RTX, NULL_RTX, do_error,
1515 PROB_VERY_UNLIKELY);
1518 if (!op1_medium_p)
1520 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
1521 NULL_RTX, 1, OPTAB_DIRECT);
1522 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1523 NULL_RTX, NULL_RTX, do_error,
1524 PROB_VERY_UNLIKELY);
1527 /* At this point hipart{0,1} are both in [-1, 0]. If they are
1528 the same, overflow happened if res is negative, if they are
1529 different, overflow happened if res is positive. */
1530 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
1531 emit_jump (hipart_different);
1532 else if (op0_sign == 1 || op1_sign == 1)
1533 do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
1534 NULL_RTX, NULL_RTX, hipart_different,
1535 PROB_EVEN);
1537 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode,
1538 NULL_RTX, NULL_RTX, do_error,
1539 PROB_VERY_UNLIKELY);
1540 emit_jump (done_label);
1542 emit_label (hipart_different);
1544 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
1545 NULL_RTX, NULL_RTX, do_error,
1546 PROB_VERY_UNLIKELY);
1547 emit_jump (done_label);
1550 emit_label (do_overflow);
1552 /* Overflow, do full multiplication and fallthru into do_error. */
1553 ops.op0 = make_tree (type, op0);
1554 ops.op1 = make_tree (type, op1);
1555 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1556 emit_move_insn (res, tem);
1558 else
1560 gcc_assert (!is_ubsan);
1561 ops.code = MULT_EXPR;
1562 ops.type = type;
1563 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1564 emit_jump (done_label);
1568 do_error_label:
1569 emit_label (do_error);
1570 if (is_ubsan)
1572 /* Expand the ubsan builtin call. */
1573 push_temp_slots ();
1574 fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
1575 arg0, arg1);
1576 expand_normal (fn);
1577 pop_temp_slots ();
1578 do_pending_stack_adjust ();
1580 else if (lhs)
1581 write_complex_part (target, const1_rtx, true);
1583 /* We're done. */
1584 emit_label (done_label);
1586 /* u1 * u2 -> sr */
1587 if (uns0_p && uns1_p && !unsr_p)
1589 rtx_code_label *all_done_label = gen_label_rtx ();
1590 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
1591 NULL_RTX, all_done_label, PROB_VERY_LIKELY);
1592 write_complex_part (target, const1_rtx, true);
1593 emit_label (all_done_label);
1596 /* s1 * u2 -> sr */
1597 if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
1599 rtx_code_label *all_done_label = gen_label_rtx ();
1600 rtx_code_label *set_noovf = gen_label_rtx ();
1601 do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
1602 NULL_RTX, all_done_label, PROB_VERY_LIKELY);
1603 write_complex_part (target, const1_rtx, true);
1604 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1605 NULL_RTX, set_noovf, PROB_VERY_LIKELY);
1606 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1607 NULL_RTX, all_done_label, PROB_VERY_UNLIKELY);
1608 do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL_RTX,
1609 all_done_label, PROB_VERY_UNLIKELY);
1610 emit_label (set_noovf);
1611 write_complex_part (target, const0_rtx, true);
1612 emit_label (all_done_label);
1615 if (lhs)
1617 if (is_ubsan)
1618 expand_ubsan_result_store (target, res);
1619 else
1620 expand_arith_overflow_result_store (lhs, target, mode, res);
1624 /* Expand UBSAN_CHECK_ADD call STMT. */
1626 static void
1627 expand_UBSAN_CHECK_ADD (gcall *stmt)
1629 location_t loc = gimple_location (stmt);
1630 tree lhs = gimple_call_lhs (stmt);
1631 tree arg0 = gimple_call_arg (stmt, 0);
1632 tree arg1 = gimple_call_arg (stmt, 1);
1633 expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
1634 false, false, false, true);
1637 /* Expand UBSAN_CHECK_SUB call STMT. */
1639 static void
1640 expand_UBSAN_CHECK_SUB (gcall *stmt)
1642 location_t loc = gimple_location (stmt);
1643 tree lhs = gimple_call_lhs (stmt);
1644 tree arg0 = gimple_call_arg (stmt, 0);
1645 tree arg1 = gimple_call_arg (stmt, 1);
1646 if (integer_zerop (arg0))
1647 expand_neg_overflow (loc, lhs, arg1, true);
1648 else
1649 expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
1650 false, false, false, true);
1653 /* Expand UBSAN_CHECK_MUL call STMT. */
1655 static void
1656 expand_UBSAN_CHECK_MUL (gcall *stmt)
1658 location_t loc = gimple_location (stmt);
1659 tree lhs = gimple_call_lhs (stmt);
1660 tree arg0 = gimple_call_arg (stmt, 0);
1661 tree arg1 = gimple_call_arg (stmt, 1);
1662 expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true);
1665 /* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
1667 static void
1668 expand_arith_overflow (enum tree_code code, gimple stmt)
1670 tree lhs = gimple_call_lhs (stmt);
1671 if (lhs == NULL_TREE)
1672 return;
1673 tree arg0 = gimple_call_arg (stmt, 0);
1674 tree arg1 = gimple_call_arg (stmt, 1);
1675 tree type = TREE_TYPE (TREE_TYPE (lhs));
1676 int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
1677 int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
1678 int unsr_p = TYPE_UNSIGNED (type);
1679 int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
1680 int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
1681 int precres = TYPE_PRECISION (type);
1682 location_t loc = gimple_location (stmt);
1683 if (!uns0_p && get_range_pos_neg (arg0) == 1)
1684 uns0_p = true;
1685 if (!uns1_p && get_range_pos_neg (arg1) == 1)
1686 uns1_p = true;
1687 int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
1688 prec0 = MIN (prec0, pr);
1689 pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
1690 prec1 = MIN (prec1, pr);
1692 /* If uns0_p && uns1_p, precop is minimum needed precision
1693 of unsigned type to hold the exact result, otherwise
1694 precop is minimum needed precision of signed type to
1695 hold the exact result. */
1696 int precop;
1697 if (code == MULT_EXPR)
1698 precop = prec0 + prec1 + (uns0_p != uns1_p);
1699 else
1701 if (uns0_p == uns1_p)
1702 precop = MAX (prec0, prec1) + 1;
1703 else if (uns0_p)
1704 precop = MAX (prec0 + 1, prec1) + 1;
1705 else
1706 precop = MAX (prec0, prec1 + 1) + 1;
1708 int orig_precres = precres;
1712 if ((uns0_p && uns1_p)
1713 ? ((precop + !unsr_p) <= precres
1714 /* u1 - u2 -> ur can overflow, no matter what precision
1715 the result has. */
1716 && (code != MINUS_EXPR || !unsr_p))
1717 : (!unsr_p && precop <= precres))
1719 /* The infinity precision result will always fit into result. */
1720 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1721 write_complex_part (target, const0_rtx, true);
1722 enum machine_mode mode = TYPE_MODE (type);
1723 struct separate_ops ops;
1724 ops.code = code;
1725 ops.type = type;
1726 ops.op0 = fold_convert_loc (loc, type, arg0);
1727 ops.op1 = fold_convert_loc (loc, type, arg1);
1728 ops.op2 = NULL_TREE;
1729 ops.location = loc;
1730 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1731 expand_arith_overflow_result_store (lhs, target, mode, tem);
1732 return;
1735 #ifdef WORD_REGISTER_OPERATIONS
1736 /* For sub-word operations, if target doesn't have them, start
1737 with precres widening right away, otherwise do it only
1738 if the most simple cases can't be used. */
1739 if (orig_precres == precres && precres < BITS_PER_WORD)
1741 else
1742 #endif
1743 if ((uns0_p && uns1_p && unsr_p && prec0 <= precres && prec1 <= precres)
1744 || ((!uns0_p || !uns1_p) && !unsr_p
1745 && prec0 + uns0_p <= precres
1746 && prec1 + uns1_p <= precres))
1748 arg0 = fold_convert_loc (loc, type, arg0);
1749 arg1 = fold_convert_loc (loc, type, arg1);
1750 switch (code)
1752 case MINUS_EXPR:
1753 if (integer_zerop (arg0) && !unsr_p)
1754 expand_neg_overflow (loc, lhs, arg1, false);
1755 /* FALLTHRU */
1756 case PLUS_EXPR:
1757 expand_addsub_overflow (loc, code, lhs, arg0, arg1,
1758 unsr_p, unsr_p, unsr_p, false);
1759 return;
1760 case MULT_EXPR:
1761 expand_mul_overflow (loc, lhs, arg0, arg1,
1762 unsr_p, unsr_p, unsr_p, false);
1763 return;
1764 default:
1765 gcc_unreachable ();
1769 /* For sub-word operations, retry with a wider type first. */
1770 if (orig_precres == precres && precop <= BITS_PER_WORD)
1772 #ifdef WORD_REGISTER_OPERATIONS
1773 int p = BITS_PER_WORD;
1774 #else
1775 int p = precop;
1776 #endif
1777 enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1778 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1779 uns0_p && uns1_p
1780 && unsr_p);
1781 p = TYPE_PRECISION (optype);
1782 if (p > precres)
1784 precres = p;
1785 unsr_p = TYPE_UNSIGNED (optype);
1786 type = optype;
1787 continue;
1791 if (prec0 <= precres && prec1 <= precres)
1793 tree types[2];
1794 if (unsr_p)
1796 types[0] = build_nonstandard_integer_type (precres, 0);
1797 types[1] = type;
1799 else
1801 types[0] = type;
1802 types[1] = build_nonstandard_integer_type (precres, 1);
1804 arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
1805 arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
1806 if (code != MULT_EXPR)
1807 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
1808 uns0_p, uns1_p, false);
1809 else
1810 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
1811 uns0_p, uns1_p, false);
1812 return;
1815 /* Retry with a wider type. */
1816 if (orig_precres == precres)
1818 int p = MAX (prec0, prec1);
1819 enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1820 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1821 uns0_p && uns1_p
1822 && unsr_p);
1823 p = TYPE_PRECISION (optype);
1824 if (p > precres)
1826 precres = p;
1827 unsr_p = TYPE_UNSIGNED (optype);
1828 type = optype;
1829 continue;
1833 gcc_unreachable ();
1835 while (1);
1838 /* Expand ADD_OVERFLOW STMT. */
1840 static void
1841 expand_ADD_OVERFLOW (gcall *stmt)
1843 expand_arith_overflow (PLUS_EXPR, stmt);
1846 /* Expand SUB_OVERFLOW STMT. */
1848 static void
1849 expand_SUB_OVERFLOW (gcall *stmt)
1851 expand_arith_overflow (MINUS_EXPR, stmt);
1854 /* Expand MUL_OVERFLOW STMT. */
1856 static void
1857 expand_MUL_OVERFLOW (gcall *stmt)
1859 expand_arith_overflow (MULT_EXPR, stmt);
1862 /* This should get folded in tree-vectorizer.c. */
1864 static void
1865 expand_LOOP_VECTORIZED (gcall *stmt ATTRIBUTE_UNUSED)
1867 gcc_unreachable ();
1870 static void
1871 expand_MASK_LOAD (gcall *stmt)
1873 struct expand_operand ops[3];
1874 tree type, lhs, rhs, maskt;
1875 rtx mem, target, mask;
1877 maskt = gimple_call_arg (stmt, 2);
1878 lhs = gimple_call_lhs (stmt);
1879 if (lhs == NULL_TREE)
1880 return;
1881 type = TREE_TYPE (lhs);
1882 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1883 gimple_call_arg (stmt, 1));
1885 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1886 gcc_assert (MEM_P (mem));
1887 mask = expand_normal (maskt);
1888 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1889 create_output_operand (&ops[0], target, TYPE_MODE (type));
1890 create_fixed_operand (&ops[1], mem);
1891 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1892 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
1895 static void
1896 expand_MASK_STORE (gcall *stmt)
1898 struct expand_operand ops[3];
1899 tree type, lhs, rhs, maskt;
1900 rtx mem, reg, mask;
1902 maskt = gimple_call_arg (stmt, 2);
1903 rhs = gimple_call_arg (stmt, 3);
1904 type = TREE_TYPE (rhs);
1905 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1906 gimple_call_arg (stmt, 1));
1908 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1909 gcc_assert (MEM_P (mem));
1910 mask = expand_normal (maskt);
1911 reg = expand_normal (rhs);
1912 create_fixed_operand (&ops[0], mem);
1913 create_input_operand (&ops[1], reg, TYPE_MODE (type));
1914 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1915 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
1918 static void
1919 expand_ABNORMAL_DISPATCHER (gcall *)
1923 static void
1924 expand_BUILTIN_EXPECT (gcall *stmt)
1926 /* When guessing was done, the hints should be already stripped away. */
1927 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
1929 rtx target;
1930 tree lhs = gimple_call_lhs (stmt);
1931 if (lhs)
1932 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1933 else
1934 target = const0_rtx;
1935 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
1936 if (lhs && val != target)
1937 emit_move_insn (target, val);
1940 /* Routines to expand each internal function, indexed by function number.
1941 Each routine has the prototype:
1943 expand_<NAME> (gcall *stmt)
1945 where STMT is the statement that performs the call. */
1946 static void (*const internal_fn_expanders[]) (gcall *) = {
1947 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
1948 #include "internal-fn.def"
1949 #undef DEF_INTERNAL_FN
1953 /* Expand STMT, which is a call to internal function FN. */
1955 void
1956 expand_internal_call (gcall *stmt)
1958 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);