2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / internal-fn.c
blob9ac882733a559878828831eba9fad535f4a1f783
1 /* Internal functions.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "input.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "internal-fn.h"
30 #include "stor-layout.h"
31 #include "tm.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "rtl.h"
35 #include "flags.h"
36 #include "insn-config.h"
37 #include "expmed.h"
38 #include "dojump.h"
39 #include "explow.h"
40 #include "calls.h"
41 #include "emit-rtl.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "insn-codes.h"
46 #include "optabs.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "gimple-expr.h"
53 #include "is-a.h"
54 #include "gimple.h"
55 #include "ubsan.h"
56 #include "target.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
59 #include "diagnostic-core.h"
61 /* The names of each internal function, indexed by function number. */
62 const char *const internal_fn_name_array[] = {
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
64 #include "internal-fn.def"
65 #undef DEF_INTERNAL_FN
66 "<invalid-fn>"
69 /* The ECF_* flags of each internal function, indexed by function number. */
70 const int internal_fn_flags_array[] = {
71 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
72 #include "internal-fn.def"
73 #undef DEF_INTERNAL_FN
77 /* Fnspec of each internal function, indexed by function number. */
78 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
80 void
81 init_internal_fns ()
83 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
84 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
85 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
86 #include "internal-fn.def"
87 #undef DEF_INTERNAL_FN
88 internal_fn_fnspec_array[IFN_LAST] = 0;
91 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
92 for load-lanes-style optab OPTAB. The insn must exist. */
94 static enum insn_code
95 get_multi_vector_move (tree array_type, convert_optab optab)
97 enum insn_code icode;
98 machine_mode imode;
99 machine_mode vmode;
101 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
102 imode = TYPE_MODE (array_type);
103 vmode = TYPE_MODE (TREE_TYPE (array_type));
105 icode = convert_optab_handler (optab, imode, vmode);
106 gcc_assert (icode != CODE_FOR_nothing);
107 return icode;
110 /* Expand LOAD_LANES call STMT. */
112 static void
113 expand_LOAD_LANES (gcall *stmt)
115 struct expand_operand ops[2];
116 tree type, lhs, rhs;
117 rtx target, mem;
119 lhs = gimple_call_lhs (stmt);
120 rhs = gimple_call_arg (stmt, 0);
121 type = TREE_TYPE (lhs);
123 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
124 mem = expand_normal (rhs);
126 gcc_assert (MEM_P (mem));
127 PUT_MODE (mem, TYPE_MODE (type));
129 create_output_operand (&ops[0], target, TYPE_MODE (type));
130 create_fixed_operand (&ops[1], mem);
131 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
134 /* Expand STORE_LANES call STMT. */
136 static void
137 expand_STORE_LANES (gcall *stmt)
139 struct expand_operand ops[2];
140 tree type, lhs, rhs;
141 rtx target, reg;
143 lhs = gimple_call_lhs (stmt);
144 rhs = gimple_call_arg (stmt, 0);
145 type = TREE_TYPE (rhs);
147 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
148 reg = expand_normal (rhs);
150 gcc_assert (MEM_P (target));
151 PUT_MODE (target, TYPE_MODE (type));
153 create_fixed_operand (&ops[0], target);
154 create_input_operand (&ops[1], reg, TYPE_MODE (type));
155 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
158 static void
159 expand_ANNOTATE (gcall *)
161 gcc_unreachable ();
164 /* This should get expanded in adjust_simduid_builtins. */
166 static void
167 expand_GOMP_SIMD_LANE (gcall *)
169 gcc_unreachable ();
172 /* This should get expanded in adjust_simduid_builtins. */
174 static void
175 expand_GOMP_SIMD_VF (gcall *)
177 gcc_unreachable ();
180 /* This should get expanded in adjust_simduid_builtins. */
182 static void
183 expand_GOMP_SIMD_LAST_LANE (gcall *)
185 gcc_unreachable ();
188 /* This should get expanded in the sanopt pass. */
190 static void
191 expand_UBSAN_NULL (gcall *)
193 gcc_unreachable ();
196 /* This should get expanded in the sanopt pass. */
198 static void
199 expand_UBSAN_BOUNDS (gcall *)
201 gcc_unreachable ();
204 /* This should get expanded in the sanopt pass. */
206 static void
207 expand_UBSAN_VPTR (gcall *)
209 gcc_unreachable ();
212 /* This should get expanded in the sanopt pass. */
214 static void
215 expand_UBSAN_OBJECT_SIZE (gcall *)
217 gcc_unreachable ();
220 /* This should get expanded in the sanopt pass. */
222 static void
223 expand_ASAN_CHECK (gcall *)
225 gcc_unreachable ();
228 /* This should get expanded in the tsan pass. */
230 static void
231 expand_TSAN_FUNC_EXIT (gcall *)
233 gcc_unreachable ();
236 /* Helper function for expand_addsub_overflow. Return 1
237 if ARG interpreted as signed in its precision is known to be always
238 positive or 2 if ARG is known to be always negative, or 3 if ARG may
239 be positive or negative. */
241 static int
242 get_range_pos_neg (tree arg)
244 if (arg == error_mark_node)
245 return 3;
247 int prec = TYPE_PRECISION (TREE_TYPE (arg));
248 int cnt = 0;
249 if (TREE_CODE (arg) == INTEGER_CST)
251 wide_int w = wi::sext (arg, prec);
252 if (wi::neg_p (w))
253 return 2;
254 else
255 return 1;
257 while (CONVERT_EXPR_P (arg)
258 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
259 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
261 arg = TREE_OPERAND (arg, 0);
262 /* Narrower value zero extended into wider type
263 will always result in positive values. */
264 if (TYPE_UNSIGNED (TREE_TYPE (arg))
265 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
266 return 1;
267 prec = TYPE_PRECISION (TREE_TYPE (arg));
268 if (++cnt > 30)
269 return 3;
272 if (TREE_CODE (arg) != SSA_NAME)
273 return 3;
274 wide_int arg_min, arg_max;
275 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
277 gimple g = SSA_NAME_DEF_STMT (arg);
278 if (is_gimple_assign (g)
279 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
281 tree t = gimple_assign_rhs1 (g);
282 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
283 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
285 if (TYPE_UNSIGNED (TREE_TYPE (t))
286 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
287 return 1;
288 prec = TYPE_PRECISION (TREE_TYPE (t));
289 arg = t;
290 if (++cnt > 30)
291 return 3;
292 continue;
295 return 3;
297 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
299 /* For unsigned values, the "positive" range comes
300 below the "negative" range. */
301 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
302 return 1;
303 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
304 return 2;
306 else
308 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
309 return 1;
310 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
311 return 2;
313 return 3;
316 /* Return minimum precision needed to represent all values
317 of ARG in SIGNed integral type. */
319 static int
320 get_min_precision (tree arg, signop sign)
322 int prec = TYPE_PRECISION (TREE_TYPE (arg));
323 int cnt = 0;
324 signop orig_sign = sign;
325 if (TREE_CODE (arg) == INTEGER_CST)
327 int p;
328 if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
330 widest_int w = wi::to_widest (arg);
331 w = wi::ext (w, prec, sign);
332 p = wi::min_precision (w, sign);
334 else
335 p = wi::min_precision (arg, sign);
336 return MIN (p, prec);
338 while (CONVERT_EXPR_P (arg)
339 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
340 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
342 arg = TREE_OPERAND (arg, 0);
343 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
345 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
346 sign = UNSIGNED;
347 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
348 return prec + (orig_sign != sign);
349 prec = TYPE_PRECISION (TREE_TYPE (arg));
351 if (++cnt > 30)
352 return prec + (orig_sign != sign);
354 if (TREE_CODE (arg) != SSA_NAME)
355 return prec + (orig_sign != sign);
356 wide_int arg_min, arg_max;
357 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
359 gimple g = SSA_NAME_DEF_STMT (arg);
360 if (is_gimple_assign (g)
361 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
363 tree t = gimple_assign_rhs1 (g);
364 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
365 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
367 arg = t;
368 if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
370 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
371 sign = UNSIGNED;
372 else if (sign == UNSIGNED && get_range_pos_neg (arg) != 1)
373 return prec + (orig_sign != sign);
374 prec = TYPE_PRECISION (TREE_TYPE (arg));
376 if (++cnt > 30)
377 return prec + (orig_sign != sign);
378 continue;
381 return prec + (orig_sign != sign);
383 if (sign == TYPE_SIGN (TREE_TYPE (arg)))
385 int p1 = wi::min_precision (arg_min, sign);
386 int p2 = wi::min_precision (arg_max, sign);
387 p1 = MAX (p1, p2);
388 prec = MIN (prec, p1);
390 else if (sign == UNSIGNED && !wi::neg_p (arg_min, SIGNED))
392 int p = wi::min_precision (arg_max, UNSIGNED);
393 prec = MIN (prec, p);
395 return prec + (orig_sign != sign);
398 /* Helper for expand_*_overflow. Store RES into the __real__ part
399 of TARGET. If RES has larger MODE than __real__ part of TARGET,
400 set the __imag__ part to 1 if RES doesn't fit into it. */
402 static void
403 expand_arith_overflow_result_store (tree lhs, rtx target,
404 machine_mode mode, rtx res)
406 machine_mode tgtmode = GET_MODE_INNER (GET_MODE (target));
407 rtx lres = res;
408 if (tgtmode != mode)
410 rtx_code_label *done_label = gen_label_rtx ();
411 int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
412 lres = convert_modes (tgtmode, mode, res, uns);
413 gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
414 do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
415 EQ, true, mode, NULL_RTX, NULL, done_label,
416 PROB_VERY_LIKELY);
417 write_complex_part (target, const1_rtx, true);
418 emit_label (done_label);
420 write_complex_part (target, lres, false);
423 /* Helper for expand_*_overflow. Store RES into TARGET. */
425 static void
426 expand_ubsan_result_store (rtx target, rtx res)
428 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
429 /* If this is a scalar in a register that is stored in a wider mode
430 than the declared mode, compute the result into its declared mode
431 and then convert to the wider mode. Our value is the computed
432 expression. */
433 convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
434 else
435 emit_move_insn (target, res);
438 /* Add sub/add overflow checking to the statement STMT.
439 CODE says whether the operation is +, or -. */
441 static void
442 expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
443 tree arg0, tree arg1, bool unsr_p, bool uns0_p,
444 bool uns1_p, bool is_ubsan)
446 rtx res, target = NULL_RTX;
447 tree fn;
448 rtx_code_label *done_label = gen_label_rtx ();
449 rtx_code_label *do_error = gen_label_rtx ();
450 do_pending_stack_adjust ();
451 rtx op0 = expand_normal (arg0);
452 rtx op1 = expand_normal (arg1);
453 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
454 int prec = GET_MODE_PRECISION (mode);
455 rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
456 bool do_xor = false;
458 if (is_ubsan)
459 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
461 if (lhs)
463 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
464 if (!is_ubsan)
465 write_complex_part (target, const0_rtx, true);
468 /* We assume both operands and result have the same precision
469 here (GET_MODE_BITSIZE (mode)), S stands for signed type
470 with that precision, U for unsigned type with that precision,
471 sgn for unsigned most significant bit in that precision.
472 s1 is signed first operand, u1 is unsigned first operand,
473 s2 is signed second operand, u2 is unsigned second operand,
474 sr is signed result, ur is unsigned result and the following
475 rules say how to compute result (which is always result of
476 the operands as if both were unsigned, cast to the right
477 signedness) and how to compute whether operation overflowed.
479 s1 + s2 -> sr
480 res = (S) ((U) s1 + (U) s2)
481 ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
482 s1 - s2 -> sr
483 res = (S) ((U) s1 - (U) s2)
484 ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
485 u1 + u2 -> ur
486 res = u1 + u2
487 ovf = res < u1 (or jump on carry, but RTL opts will handle it)
488 u1 - u2 -> ur
489 res = u1 - u2
490 ovf = res > u1 (or jump on carry, but RTL opts will handle it)
491 s1 + u2 -> sr
492 res = (S) ((U) s1 + u2)
493 ovf = ((U) res ^ sgn) < u2
494 s1 + u2 -> ur
495 t1 = (S) (u2 ^ sgn)
496 t2 = s1 + t1
497 res = (U) t2 ^ sgn
498 ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
499 s1 - u2 -> sr
500 res = (S) ((U) s1 - u2)
501 ovf = u2 > ((U) s1 ^ sgn)
502 s1 - u2 -> ur
503 res = (U) s1 - u2
504 ovf = s1 < 0 || u2 > (U) s1
505 u1 - s2 -> sr
506 res = u1 - (U) s2
507 ovf = u1 >= ((U) s2 ^ sgn)
508 u1 - s2 -> ur
509 t1 = u1 ^ sgn
510 t2 = t1 - (U) s2
511 res = t2 ^ sgn
512 ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
513 s1 + s2 -> ur
514 res = (U) s1 + (U) s2
515 ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
516 u1 + u2 -> sr
517 res = (S) (u1 + u2)
518 ovf = (U) res < u2 || res < 0
519 u1 - u2 -> sr
520 res = (S) (u1 - u2)
521 ovf = u1 >= u2 ? res < 0 : res >= 0
522 s1 - s2 -> ur
523 res = (U) s1 - (U) s2
524 ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0) */
526 if (code == PLUS_EXPR && uns0_p && !uns1_p)
528 /* PLUS_EXPR is commutative, if operand signedness differs,
529 canonicalize to the first operand being signed and second
530 unsigned to simplify following code. */
531 rtx tem = op1;
532 op1 = op0;
533 op0 = tem;
534 tree t = arg1;
535 arg1 = arg0;
536 arg0 = t;
537 uns0_p = 0;
538 uns1_p = 1;
541 /* u1 +- u2 -> ur */
542 if (uns0_p && uns1_p && unsr_p)
544 /* Compute the operation. On RTL level, the addition is always
545 unsigned. */
546 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
547 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
548 rtx tem = op0;
549 /* For PLUS_EXPR, the operation is commutative, so we can pick
550 operand to compare against. For prec <= BITS_PER_WORD, I think
551 preferring REG operand is better over CONST_INT, because
552 the CONST_INT might enlarge the instruction or CSE would need
553 to figure out we'd already loaded it into a register before.
554 For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
555 as then the multi-word comparison can be perhaps simplified. */
556 if (code == PLUS_EXPR
557 && (prec <= BITS_PER_WORD
558 ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
559 : CONST_SCALAR_INT_P (op1)))
560 tem = op1;
561 do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
562 true, mode, NULL_RTX, NULL, done_label,
563 PROB_VERY_LIKELY);
564 goto do_error_label;
567 /* s1 +- u2 -> sr */
568 if (!uns0_p && uns1_p && !unsr_p)
570 /* Compute the operation. On RTL level, the addition is always
571 unsigned. */
572 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
573 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
574 rtx tem = expand_binop (mode, add_optab,
575 code == PLUS_EXPR ? res : op0, sgn,
576 NULL_RTX, false, OPTAB_LIB_WIDEN);
577 do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL,
578 done_label, PROB_VERY_LIKELY);
579 goto do_error_label;
582 /* s1 + u2 -> ur */
583 if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
585 op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
586 OPTAB_LIB_WIDEN);
587 /* As we've changed op1, we have to avoid using the value range
588 for the original argument. */
589 arg1 = error_mark_node;
590 do_xor = true;
591 goto do_signed;
594 /* u1 - s2 -> ur */
595 if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
597 op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
598 OPTAB_LIB_WIDEN);
599 /* As we've changed op0, we have to avoid using the value range
600 for the original argument. */
601 arg0 = error_mark_node;
602 do_xor = true;
603 goto do_signed;
606 /* s1 - u2 -> ur */
607 if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
609 /* Compute the operation. On RTL level, the addition is always
610 unsigned. */
611 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
612 OPTAB_LIB_WIDEN);
613 int pos_neg = get_range_pos_neg (arg0);
614 if (pos_neg == 2)
615 /* If ARG0 is known to be always negative, this is always overflow. */
616 emit_jump (do_error);
617 else if (pos_neg == 3)
618 /* If ARG0 is not known to be always positive, check at runtime. */
619 do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
620 NULL, do_error, PROB_VERY_UNLIKELY);
621 do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL,
622 done_label, PROB_VERY_LIKELY);
623 goto do_error_label;
626 /* u1 - s2 -> sr */
627 if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
629 /* Compute the operation. On RTL level, the addition is always
630 unsigned. */
631 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
632 OPTAB_LIB_WIDEN);
633 rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
634 OPTAB_LIB_WIDEN);
635 do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL,
636 done_label, PROB_VERY_LIKELY);
637 goto do_error_label;
640 /* u1 + u2 -> sr */
641 if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
643 /* Compute the operation. On RTL level, the addition is always
644 unsigned. */
645 res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
646 OPTAB_LIB_WIDEN);
647 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
648 NULL, do_error, PROB_VERY_UNLIKELY);
649 rtx tem = op1;
650 /* The operation is commutative, so we can pick operand to compare
651 against. For prec <= BITS_PER_WORD, I think preferring REG operand
652 is better over CONST_INT, because the CONST_INT might enlarge the
653 instruction or CSE would need to figure out we'd already loaded it
654 into a register before. For prec > BITS_PER_WORD, I think CONST_INT
655 might be more beneficial, as then the multi-word comparison can be
656 perhaps simplified. */
657 if (prec <= BITS_PER_WORD
658 ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
659 : CONST_SCALAR_INT_P (op0))
660 tem = op0;
661 do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL,
662 done_label, PROB_VERY_LIKELY);
663 goto do_error_label;
666 /* s1 +- s2 -> ur */
667 if (!uns0_p && !uns1_p && unsr_p)
669 /* Compute the operation. On RTL level, the addition is always
670 unsigned. */
671 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
672 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
673 int pos_neg = get_range_pos_neg (arg1);
674 if (code == PLUS_EXPR)
676 int pos_neg0 = get_range_pos_neg (arg0);
677 if (pos_neg0 != 3 && pos_neg == 3)
679 rtx tem = op1;
680 op1 = op0;
681 op0 = tem;
682 pos_neg = pos_neg0;
685 rtx tem;
686 if (pos_neg != 3)
688 tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
689 ? and_optab : ior_optab,
690 op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
691 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL,
692 NULL, done_label, PROB_VERY_LIKELY);
694 else
696 rtx_code_label *do_ior_label = gen_label_rtx ();
697 do_compare_rtx_and_jump (op1, const0_rtx,
698 code == MINUS_EXPR ? GE : LT, false, mode,
699 NULL_RTX, NULL, do_ior_label,
700 PROB_EVEN);
701 tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
702 OPTAB_LIB_WIDEN);
703 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
704 NULL, done_label, PROB_VERY_LIKELY);
705 emit_jump (do_error);
706 emit_label (do_ior_label);
707 tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
708 OPTAB_LIB_WIDEN);
709 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
710 NULL, done_label, PROB_VERY_LIKELY);
712 goto do_error_label;
715 /* u1 - u2 -> sr */
716 if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
718 /* Compute the operation. On RTL level, the addition is always
719 unsigned. */
720 res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
721 OPTAB_LIB_WIDEN);
722 rtx_code_label *op0_geu_op1 = gen_label_rtx ();
723 do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL,
724 op0_geu_op1, PROB_EVEN);
725 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
726 NULL, done_label, PROB_VERY_LIKELY);
727 emit_jump (do_error);
728 emit_label (op0_geu_op1);
729 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
730 NULL, done_label, PROB_VERY_LIKELY);
731 goto do_error_label;
734 gcc_assert (!uns0_p && !uns1_p && !unsr_p);
736 /* s1 +- s2 -> sr */
737 do_signed: ;
738 enum insn_code icode;
739 icode = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
740 if (icode != CODE_FOR_nothing)
742 struct expand_operand ops[4];
743 rtx_insn *last = get_last_insn ();
745 res = gen_reg_rtx (mode);
746 create_output_operand (&ops[0], res, mode);
747 create_input_operand (&ops[1], op0, mode);
748 create_input_operand (&ops[2], op1, mode);
749 create_fixed_operand (&ops[3], do_error);
750 if (maybe_expand_insn (icode, 4, ops))
752 last = get_last_insn ();
753 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
754 && JUMP_P (last)
755 && any_condjump_p (last)
756 && !find_reg_note (last, REG_BR_PROB, 0))
757 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
758 emit_jump (done_label);
760 else
762 delete_insns_since (last);
763 icode = CODE_FOR_nothing;
767 if (icode == CODE_FOR_nothing)
769 rtx_code_label *sub_check = gen_label_rtx ();
770 int pos_neg = 3;
772 /* Compute the operation. On RTL level, the addition is always
773 unsigned. */
774 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
775 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
777 /* If we can prove one of the arguments (for MINUS_EXPR only
778 the second operand, as subtraction is not commutative) is always
779 non-negative or always negative, we can do just one comparison
780 and conditional jump instead of 2 at runtime, 3 present in the
781 emitted code. If one of the arguments is CONST_INT, all we
782 need is to make sure it is op1, then the first
783 do_compare_rtx_and_jump will be just folded. Otherwise try
784 to use range info if available. */
785 if (code == PLUS_EXPR && CONST_INT_P (op0))
787 rtx tem = op0;
788 op0 = op1;
789 op1 = tem;
791 else if (CONST_INT_P (op1))
793 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
795 pos_neg = get_range_pos_neg (arg0);
796 if (pos_neg != 3)
798 rtx tem = op0;
799 op0 = op1;
800 op1 = tem;
803 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
804 pos_neg = get_range_pos_neg (arg1);
806 /* If the op1 is negative, we have to use a different check. */
807 if (pos_neg == 3)
808 do_compare_rtx_and_jump (op1, const0_rtx, LT, false, mode, NULL_RTX,
809 NULL, sub_check, PROB_EVEN);
811 /* Compare the result of the operation with one of the operands. */
812 if (pos_neg & 1)
813 do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? GE : LE,
814 false, mode, NULL_RTX, NULL, done_label,
815 PROB_VERY_LIKELY);
817 /* If we get here, we have to print the error. */
818 if (pos_neg == 3)
820 emit_jump (do_error);
822 emit_label (sub_check);
825 /* We have k = a + b for b < 0 here. k <= a must hold. */
826 if (pos_neg & 2)
827 do_compare_rtx_and_jump (res, op0, code == PLUS_EXPR ? LE : GE,
828 false, mode, NULL_RTX, NULL, done_label,
829 PROB_VERY_LIKELY);
832 do_error_label:
833 emit_label (do_error);
834 if (is_ubsan)
836 /* Expand the ubsan builtin call. */
837 push_temp_slots ();
838 fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
839 arg0, arg1);
840 expand_normal (fn);
841 pop_temp_slots ();
842 do_pending_stack_adjust ();
844 else if (lhs)
845 write_complex_part (target, const1_rtx, true);
847 /* We're done. */
848 emit_label (done_label);
850 if (lhs)
852 if (is_ubsan)
853 expand_ubsan_result_store (target, res);
854 else
856 if (do_xor)
857 res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
858 OPTAB_LIB_WIDEN);
860 expand_arith_overflow_result_store (lhs, target, mode, res);
865 /* Add negate overflow checking to the statement STMT. */
867 static void
868 expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
870 rtx res, op1;
871 tree fn;
872 rtx_code_label *done_label, *do_error;
873 rtx target = NULL_RTX;
875 done_label = gen_label_rtx ();
876 do_error = gen_label_rtx ();
878 do_pending_stack_adjust ();
879 op1 = expand_normal (arg1);
881 machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
882 if (lhs)
884 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
885 if (!is_ubsan)
886 write_complex_part (target, const0_rtx, true);
889 enum insn_code icode = optab_handler (negv3_optab, mode);
890 if (icode != CODE_FOR_nothing)
892 struct expand_operand ops[3];
893 rtx_insn *last = get_last_insn ();
895 res = gen_reg_rtx (mode);
896 create_output_operand (&ops[0], res, mode);
897 create_input_operand (&ops[1], op1, mode);
898 create_fixed_operand (&ops[2], do_error);
899 if (maybe_expand_insn (icode, 3, ops))
901 last = get_last_insn ();
902 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
903 && JUMP_P (last)
904 && any_condjump_p (last)
905 && !find_reg_note (last, REG_BR_PROB, 0))
906 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
907 emit_jump (done_label);
909 else
911 delete_insns_since (last);
912 icode = CODE_FOR_nothing;
916 if (icode == CODE_FOR_nothing)
918 /* Compute the operation. On RTL level, the addition is always
919 unsigned. */
920 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
922 /* Compare the operand with the most negative value. */
923 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
924 do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL,
925 done_label, PROB_VERY_LIKELY);
928 emit_label (do_error);
929 if (is_ubsan)
931 /* Expand the ubsan builtin call. */
932 push_temp_slots ();
933 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
934 arg1, NULL_TREE);
935 expand_normal (fn);
936 pop_temp_slots ();
937 do_pending_stack_adjust ();
939 else if (lhs)
940 write_complex_part (target, const1_rtx, true);
942 /* We're done. */
943 emit_label (done_label);
945 if (lhs)
947 if (is_ubsan)
948 expand_ubsan_result_store (target, res);
949 else
950 expand_arith_overflow_result_store (lhs, target, mode, res);
954 /* Add mul overflow checking to the statement STMT. */
956 static void
957 expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
958 bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan)
960 rtx res, op0, op1;
961 tree fn, type;
962 rtx_code_label *done_label, *do_error;
963 rtx target = NULL_RTX;
964 signop sign;
965 enum insn_code icode;
967 done_label = gen_label_rtx ();
968 do_error = gen_label_rtx ();
970 do_pending_stack_adjust ();
971 op0 = expand_normal (arg0);
972 op1 = expand_normal (arg1);
974 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
975 bool uns = unsr_p;
976 if (lhs)
978 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
979 if (!is_ubsan)
980 write_complex_part (target, const0_rtx, true);
983 if (is_ubsan)
984 gcc_assert (!unsr_p && !uns0_p && !uns1_p);
986 /* We assume both operands and result have the same precision
987 here (GET_MODE_BITSIZE (mode)), S stands for signed type
988 with that precision, U for unsigned type with that precision,
989 sgn for unsigned most significant bit in that precision.
990 s1 is signed first operand, u1 is unsigned first operand,
991 s2 is signed second operand, u2 is unsigned second operand,
992 sr is signed result, ur is unsigned result and the following
993 rules say how to compute result (which is always result of
994 the operands as if both were unsigned, cast to the right
995 signedness) and how to compute whether operation overflowed.
996 main_ovf (false) stands for jump on signed multiplication
997 overflow or the main algorithm with uns == false.
998 main_ovf (true) stands for jump on unsigned multiplication
999 overflow or the main algorithm with uns == true.
1001 s1 * s2 -> sr
1002 res = (S) ((U) s1 * (U) s2)
1003 ovf = main_ovf (false)
1004 u1 * u2 -> ur
1005 res = u1 * u2
1006 ovf = main_ovf (true)
1007 s1 * u2 -> ur
1008 res = (U) s1 * u2
1009 ovf = (s1 < 0 && u2) || main_ovf (true)
1010 u1 * u2 -> sr
1011 res = (S) (u1 * u2)
1012 ovf = res < 0 || main_ovf (true)
1013 s1 * u2 -> sr
1014 res = (S) ((U) s1 * u2)
1015 ovf = (S) u2 >= 0 ? main_ovf (false)
1016 : (s1 != 0 && (s1 != -1 || u2 != (U) res))
1017 s1 * s2 -> ur
1018 t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
1019 t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
1020 res = t1 * t2
1021 ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true) */
1023 if (uns0_p && !uns1_p)
1025 /* Multiplication is commutative, if operand signedness differs,
1026 canonicalize to the first operand being signed and second
1027 unsigned to simplify following code. */
1028 rtx tem = op1;
1029 op1 = op0;
1030 op0 = tem;
1031 tree t = arg1;
1032 arg1 = arg0;
1033 arg0 = t;
1034 uns0_p = 0;
1035 uns1_p = 1;
1038 int pos_neg0 = get_range_pos_neg (arg0);
1039 int pos_neg1 = get_range_pos_neg (arg1);
1041 /* s1 * u2 -> ur */
1042 if (!uns0_p && uns1_p && unsr_p)
1044 switch (pos_neg0)
1046 case 1:
1047 /* If s1 is non-negative, just perform normal u1 * u2 -> ur. */
1048 goto do_main;
1049 case 2:
1050 /* If s1 is negative, avoid the main code, just multiply and
1051 signal overflow if op1 is not 0. */
1052 struct separate_ops ops;
1053 ops.code = MULT_EXPR;
1054 ops.type = TREE_TYPE (arg1);
1055 ops.op0 = make_tree (ops.type, op0);
1056 ops.op1 = make_tree (ops.type, op1);
1057 ops.op2 = NULL_TREE;
1058 ops.location = loc;
1059 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1060 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1061 NULL, done_label, PROB_VERY_LIKELY);
1062 goto do_error_label;
1063 case 3:
1064 rtx_code_label *do_main_label;
1065 do_main_label = gen_label_rtx ();
1066 do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
1067 NULL, do_main_label, PROB_VERY_LIKELY);
1068 do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
1069 NULL, do_main_label, PROB_VERY_LIKELY);
1070 write_complex_part (target, const1_rtx, true);
1071 emit_label (do_main_label);
1072 goto do_main;
1073 default:
1074 gcc_unreachable ();
1078 /* u1 * u2 -> sr */
1079 if (uns0_p && uns1_p && !unsr_p)
1081 uns = true;
1082 /* Rest of handling of this case after res is computed. */
1083 goto do_main;
1086 /* s1 * u2 -> sr */
1087 if (!uns0_p && uns1_p && !unsr_p)
1089 switch (pos_neg1)
1091 case 1:
1092 goto do_main;
1093 case 2:
1094 /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
1095 avoid the main code, just multiply and signal overflow
1096 unless 0 * u2 or -1 * ((U) Smin). */
1097 struct separate_ops ops;
1098 ops.code = MULT_EXPR;
1099 ops.type = TREE_TYPE (arg1);
1100 ops.op0 = make_tree (ops.type, op0);
1101 ops.op1 = make_tree (ops.type, op1);
1102 ops.op2 = NULL_TREE;
1103 ops.location = loc;
1104 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1105 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1106 NULL, done_label, PROB_VERY_LIKELY);
1107 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1108 NULL, do_error, PROB_VERY_UNLIKELY);
1109 int prec;
1110 prec = GET_MODE_PRECISION (mode);
1111 rtx sgn;
1112 sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
1113 do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
1114 NULL, done_label, PROB_VERY_LIKELY);
1115 goto do_error_label;
1116 case 3:
1117 /* Rest of handling of this case after res is computed. */
1118 goto do_main;
1119 default:
1120 gcc_unreachable ();
1124 /* s1 * s2 -> ur */
1125 if (!uns0_p && !uns1_p && unsr_p)
1127 rtx tem, tem2;
1128 switch (pos_neg0 | pos_neg1)
1130 case 1: /* Both operands known to be non-negative. */
1131 goto do_main;
1132 case 2: /* Both operands known to be negative. */
1133 op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
1134 op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
1135 /* Avoid looking at arg0/arg1 ranges, as we've changed
1136 the arguments. */
1137 arg0 = error_mark_node;
1138 arg1 = error_mark_node;
1139 goto do_main;
1140 case 3:
1141 if ((pos_neg0 ^ pos_neg1) == 3)
1143 /* If one operand is known to be negative and the other
1144 non-negative, this overflows always, unless the non-negative
1145 one is 0. Just do normal multiply and set overflow
1146 unless one of the operands is 0. */
1147 struct separate_ops ops;
1148 ops.code = MULT_EXPR;
1149 ops.type
1150 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
1152 ops.op0 = make_tree (ops.type, op0);
1153 ops.op1 = make_tree (ops.type, op1);
1154 ops.op2 = NULL_TREE;
1155 ops.location = loc;
1156 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1157 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1158 OPTAB_LIB_WIDEN);
1159 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode,
1160 NULL_RTX, NULL, done_label,
1161 PROB_VERY_LIKELY);
1162 goto do_error_label;
1164 /* The general case, do all the needed comparisons at runtime. */
1165 rtx_code_label *do_main_label, *after_negate_label;
1166 rtx rop0, rop1;
1167 rop0 = gen_reg_rtx (mode);
1168 rop1 = gen_reg_rtx (mode);
1169 emit_move_insn (rop0, op0);
1170 emit_move_insn (rop1, op1);
1171 op0 = rop0;
1172 op1 = rop1;
1173 do_main_label = gen_label_rtx ();
1174 after_negate_label = gen_label_rtx ();
1175 tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
1176 OPTAB_LIB_WIDEN);
1177 do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
1178 NULL, after_negate_label, PROB_VERY_LIKELY);
1179 /* Both arguments negative here, negate them and continue with
1180 normal unsigned overflow checking multiplication. */
1181 emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
1182 NULL_RTX, false));
1183 emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
1184 NULL_RTX, false));
1185 /* Avoid looking at arg0/arg1 ranges, as we might have changed
1186 the arguments. */
1187 arg0 = error_mark_node;
1188 arg1 = error_mark_node;
1189 emit_jump (do_main_label);
1190 emit_label (after_negate_label);
1191 tem2 = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
1192 OPTAB_LIB_WIDEN);
1193 do_compare_rtx_and_jump (tem2, const0_rtx, GE, false, mode, NULL_RTX,
1194 NULL, do_main_label, PROB_VERY_LIKELY);
1195 /* One argument is negative here, the other positive. This
1196 overflows always, unless one of the arguments is 0. But
1197 if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
1198 is, thus we can keep do_main code oring in overflow as is. */
1199 do_compare_rtx_and_jump (tem, const0_rtx, EQ, true, mode, NULL_RTX,
1200 NULL, do_main_label, PROB_VERY_LIKELY);
1201 write_complex_part (target, const1_rtx, true);
1202 emit_label (do_main_label);
1203 goto do_main;
1204 default:
1205 gcc_unreachable ();
1209 do_main:
1210 type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
1211 sign = uns ? UNSIGNED : SIGNED;
1212 icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
1213 if (icode != CODE_FOR_nothing)
1215 struct expand_operand ops[4];
1216 rtx_insn *last = get_last_insn ();
1218 res = gen_reg_rtx (mode);
1219 create_output_operand (&ops[0], res, mode);
1220 create_input_operand (&ops[1], op0, mode);
1221 create_input_operand (&ops[2], op1, mode);
1222 create_fixed_operand (&ops[3], do_error);
1223 if (maybe_expand_insn (icode, 4, ops))
1225 last = get_last_insn ();
1226 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
1227 && JUMP_P (last)
1228 && any_condjump_p (last)
1229 && !find_reg_note (last, REG_BR_PROB, 0))
1230 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
1231 emit_jump (done_label);
1233 else
1235 delete_insns_since (last);
1236 icode = CODE_FOR_nothing;
1240 if (icode == CODE_FOR_nothing)
1242 struct separate_ops ops;
1243 int prec = GET_MODE_PRECISION (mode);
1244 machine_mode hmode = mode_for_size (prec / 2, MODE_INT, 1);
1245 ops.op0 = make_tree (type, op0);
1246 ops.op1 = make_tree (type, op1);
1247 ops.op2 = NULL_TREE;
1248 ops.location = loc;
1249 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
1250 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
1252 machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
1253 ops.code = WIDEN_MULT_EXPR;
1254 ops.type
1255 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
1257 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
1258 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
1259 NULL_RTX, uns);
1260 hipart = gen_lowpart (mode, hipart);
1261 res = gen_lowpart (mode, res);
1262 if (uns)
1263 /* For the unsigned multiplication, there was overflow if
1264 HIPART is non-zero. */
1265 do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
1266 NULL_RTX, NULL, done_label,
1267 PROB_VERY_LIKELY);
1268 else
1270 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
1271 NULL_RTX, 0);
1272 /* RES is low half of the double width result, HIPART
1273 the high half. There was overflow if
1274 HIPART is different from RES < 0 ? -1 : 0. */
1275 do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
1276 NULL_RTX, NULL, done_label,
1277 PROB_VERY_LIKELY);
1280 else if (hmode != BLKmode && 2 * GET_MODE_PRECISION (hmode) == prec)
1282 rtx_code_label *large_op0 = gen_label_rtx ();
1283 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
1284 rtx_code_label *one_small_one_large = gen_label_rtx ();
1285 rtx_code_label *both_ops_large = gen_label_rtx ();
1286 rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
1287 rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
1288 rtx_code_label *do_overflow = gen_label_rtx ();
1289 rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
1291 unsigned int hprec = GET_MODE_PRECISION (hmode);
1292 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
1293 NULL_RTX, uns);
1294 hipart0 = gen_lowpart (hmode, hipart0);
1295 rtx lopart0 = gen_lowpart (hmode, op0);
1296 rtx signbit0 = const0_rtx;
1297 if (!uns)
1298 signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
1299 NULL_RTX, 0);
1300 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
1301 NULL_RTX, uns);
1302 hipart1 = gen_lowpart (hmode, hipart1);
1303 rtx lopart1 = gen_lowpart (hmode, op1);
1304 rtx signbit1 = const0_rtx;
1305 if (!uns)
1306 signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
1307 NULL_RTX, 0);
1309 res = gen_reg_rtx (mode);
1311 /* True if op0 resp. op1 are known to be in the range of
1312 halfstype. */
1313 bool op0_small_p = false;
1314 bool op1_small_p = false;
1315 /* True if op0 resp. op1 are known to have all zeros or all ones
1316 in the upper half of bits, but are not known to be
1317 op{0,1}_small_p. */
1318 bool op0_medium_p = false;
1319 bool op1_medium_p = false;
1320 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
1321 nonnegative, 1 if unknown. */
1322 int op0_sign = 1;
1323 int op1_sign = 1;
1325 if (pos_neg0 == 1)
1326 op0_sign = 0;
1327 else if (pos_neg0 == 2)
1328 op0_sign = -1;
1329 if (pos_neg1 == 1)
1330 op1_sign = 0;
1331 else if (pos_neg1 == 2)
1332 op1_sign = -1;
1334 unsigned int mprec0 = prec;
1335 if (arg0 != error_mark_node)
1336 mprec0 = get_min_precision (arg0, sign);
1337 if (mprec0 <= hprec)
1338 op0_small_p = true;
1339 else if (!uns && mprec0 <= hprec + 1)
1340 op0_medium_p = true;
1341 unsigned int mprec1 = prec;
1342 if (arg1 != error_mark_node)
1343 mprec1 = get_min_precision (arg1, sign);
1344 if (mprec1 <= hprec)
1345 op1_small_p = true;
1346 else if (!uns && mprec1 <= hprec + 1)
1347 op1_medium_p = true;
1349 int smaller_sign = 1;
1350 int larger_sign = 1;
1351 if (op0_small_p)
1353 smaller_sign = op0_sign;
1354 larger_sign = op1_sign;
1356 else if (op1_small_p)
1358 smaller_sign = op1_sign;
1359 larger_sign = op0_sign;
1361 else if (op0_sign == op1_sign)
1363 smaller_sign = op0_sign;
1364 larger_sign = op0_sign;
1367 if (!op0_small_p)
1368 do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
1369 NULL_RTX, NULL, large_op0,
1370 PROB_UNLIKELY);
1372 if (!op1_small_p)
1373 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1374 NULL_RTX, NULL, small_op0_large_op1,
1375 PROB_UNLIKELY);
1377 /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
1378 hmode to mode, the multiplication will never overflow. We can
1379 do just one hmode x hmode => mode widening multiplication. */
1380 rtx lopart0s = lopart0, lopart1s = lopart1;
1381 if (GET_CODE (lopart0) == SUBREG)
1383 lopart0s = shallow_copy_rtx (lopart0);
1384 SUBREG_PROMOTED_VAR_P (lopart0s) = 1;
1385 SUBREG_PROMOTED_SET (lopart0s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1387 if (GET_CODE (lopart1) == SUBREG)
1389 lopart1s = shallow_copy_rtx (lopart1);
1390 SUBREG_PROMOTED_VAR_P (lopart1s) = 1;
1391 SUBREG_PROMOTED_SET (lopart1s, uns ? SRP_UNSIGNED : SRP_SIGNED);
1393 tree halfstype = build_nonstandard_integer_type (hprec, uns);
1394 ops.op0 = make_tree (halfstype, lopart0s);
1395 ops.op1 = make_tree (halfstype, lopart1s);
1396 ops.code = WIDEN_MULT_EXPR;
1397 ops.type = type;
1398 rtx thisres
1399 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1400 emit_move_insn (res, thisres);
1401 emit_jump (done_label);
1403 emit_label (small_op0_large_op1);
1405 /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
1406 but op1 is not, just swap the arguments and handle it as op1
1407 sign/zero extended, op0 not. */
1408 rtx larger = gen_reg_rtx (mode);
1409 rtx hipart = gen_reg_rtx (hmode);
1410 rtx lopart = gen_reg_rtx (hmode);
1411 emit_move_insn (larger, op1);
1412 emit_move_insn (hipart, hipart1);
1413 emit_move_insn (lopart, lopart0);
1414 emit_jump (one_small_one_large);
1416 emit_label (large_op0);
1418 if (!op1_small_p)
1419 do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
1420 NULL_RTX, NULL, both_ops_large,
1421 PROB_UNLIKELY);
1423 /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
1424 but op0 is not, prepare larger, hipart and lopart pseudos and
1425 handle it together with small_op0_large_op1. */
1426 emit_move_insn (larger, op0);
1427 emit_move_insn (hipart, hipart0);
1428 emit_move_insn (lopart, lopart1);
1430 emit_label (one_small_one_large);
1432 /* lopart is the low part of the operand that is sign extended
1433 to mode, larger is the the other operand, hipart is the
1434 high part of larger and lopart0 and lopart1 are the low parts
1435 of both operands.
1436 We perform lopart0 * lopart1 and lopart * hipart widening
1437 multiplications. */
1438 tree halfutype = build_nonstandard_integer_type (hprec, 1);
1439 ops.op0 = make_tree (halfutype, lopart0);
1440 ops.op1 = make_tree (halfutype, lopart1);
1441 rtx lo0xlo1
1442 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1444 ops.op0 = make_tree (halfutype, lopart);
1445 ops.op1 = make_tree (halfutype, hipart);
1446 rtx loxhi = gen_reg_rtx (mode);
1447 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1448 emit_move_insn (loxhi, tem);
1450 if (!uns)
1452 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
1453 if (larger_sign == 0)
1454 emit_jump (after_hipart_neg);
1455 else if (larger_sign != -1)
1456 do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
1457 NULL_RTX, NULL, after_hipart_neg,
1458 PROB_EVEN);
1460 tem = convert_modes (mode, hmode, lopart, 1);
1461 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
1462 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
1463 1, OPTAB_DIRECT);
1464 emit_move_insn (loxhi, tem);
1466 emit_label (after_hipart_neg);
1468 /* if (lopart < 0) loxhi -= larger; */
1469 if (smaller_sign == 0)
1470 emit_jump (after_lopart_neg);
1471 else if (smaller_sign != -1)
1472 do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
1473 NULL_RTX, NULL, after_lopart_neg,
1474 PROB_EVEN);
1476 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
1477 1, OPTAB_DIRECT);
1478 emit_move_insn (loxhi, tem);
1480 emit_label (after_lopart_neg);
1483 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
1484 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
1485 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
1486 1, OPTAB_DIRECT);
1487 emit_move_insn (loxhi, tem);
1489 /* if (loxhi >> (bitsize / 2)
1490 == (hmode) loxhi >> (bitsize / 2 - 1)) (if !uns)
1491 if (loxhi >> (bitsize / 2) == 0 (if uns). */
1492 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
1493 NULL_RTX, 0);
1494 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
1495 rtx signbitloxhi = const0_rtx;
1496 if (!uns)
1497 signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
1498 gen_lowpart (hmode, loxhi),
1499 hprec - 1, NULL_RTX, 0);
1501 do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
1502 NULL_RTX, NULL, do_overflow,
1503 PROB_VERY_UNLIKELY);
1505 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
1506 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
1507 NULL_RTX, 1);
1508 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
1510 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
1511 1, OPTAB_DIRECT);
1512 if (tem != res)
1513 emit_move_insn (res, tem);
1514 emit_jump (done_label);
1516 emit_label (both_ops_large);
1518 /* If both operands are large (not sign (!uns) or zero (uns)
1519 extended from hmode), then perform the full multiplication
1520 which will be the result of the operation.
1521 The only cases which don't overflow are for signed multiplication
1522 some cases where both hipart0 and highpart1 are 0 or -1.
1523 For unsigned multiplication when high parts are both non-zero
1524 this overflows always. */
1525 ops.code = MULT_EXPR;
1526 ops.op0 = make_tree (type, op0);
1527 ops.op1 = make_tree (type, op1);
1528 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1529 emit_move_insn (res, tem);
1531 if (!uns)
1533 if (!op0_medium_p)
1535 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
1536 NULL_RTX, 1, OPTAB_DIRECT);
1537 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1538 NULL_RTX, NULL, do_error,
1539 PROB_VERY_UNLIKELY);
1542 if (!op1_medium_p)
1544 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
1545 NULL_RTX, 1, OPTAB_DIRECT);
1546 do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
1547 NULL_RTX, NULL, do_error,
1548 PROB_VERY_UNLIKELY);
1551 /* At this point hipart{0,1} are both in [-1, 0]. If they are
1552 the same, overflow happened if res is negative, if they are
1553 different, overflow happened if res is positive. */
1554 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
1555 emit_jump (hipart_different);
1556 else if (op0_sign == 1 || op1_sign == 1)
1557 do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
1558 NULL_RTX, NULL, hipart_different,
1559 PROB_EVEN);
1561 do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode,
1562 NULL_RTX, NULL, do_error,
1563 PROB_VERY_UNLIKELY);
1564 emit_jump (done_label);
1566 emit_label (hipart_different);
1568 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
1569 NULL_RTX, NULL, do_error,
1570 PROB_VERY_UNLIKELY);
1571 emit_jump (done_label);
1574 emit_label (do_overflow);
1576 /* Overflow, do full multiplication and fallthru into do_error. */
1577 ops.op0 = make_tree (type, op0);
1578 ops.op1 = make_tree (type, op1);
1579 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1580 emit_move_insn (res, tem);
1582 else
1584 gcc_assert (!is_ubsan);
1585 ops.code = MULT_EXPR;
1586 ops.type = type;
1587 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1588 emit_jump (done_label);
1592 do_error_label:
1593 emit_label (do_error);
1594 if (is_ubsan)
1596 /* Expand the ubsan builtin call. */
1597 push_temp_slots ();
1598 fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
1599 arg0, arg1);
1600 expand_normal (fn);
1601 pop_temp_slots ();
1602 do_pending_stack_adjust ();
1604 else if (lhs)
1605 write_complex_part (target, const1_rtx, true);
1607 /* We're done. */
1608 emit_label (done_label);
1610 /* u1 * u2 -> sr */
1611 if (uns0_p && uns1_p && !unsr_p)
1613 rtx_code_label *all_done_label = gen_label_rtx ();
1614 do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
1615 NULL, all_done_label, PROB_VERY_LIKELY);
1616 write_complex_part (target, const1_rtx, true);
1617 emit_label (all_done_label);
1620 /* s1 * u2 -> sr */
1621 if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
1623 rtx_code_label *all_done_label = gen_label_rtx ();
1624 rtx_code_label *set_noovf = gen_label_rtx ();
1625 do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
1626 NULL, all_done_label, PROB_VERY_LIKELY);
1627 write_complex_part (target, const1_rtx, true);
1628 do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
1629 NULL, set_noovf, PROB_VERY_LIKELY);
1630 do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
1631 NULL, all_done_label, PROB_VERY_UNLIKELY);
1632 do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL,
1633 all_done_label, PROB_VERY_UNLIKELY);
1634 emit_label (set_noovf);
1635 write_complex_part (target, const0_rtx, true);
1636 emit_label (all_done_label);
1639 if (lhs)
1641 if (is_ubsan)
1642 expand_ubsan_result_store (target, res);
1643 else
1644 expand_arith_overflow_result_store (lhs, target, mode, res);
1648 /* Expand UBSAN_CHECK_ADD call STMT. */
1650 static void
1651 expand_UBSAN_CHECK_ADD (gcall *stmt)
1653 location_t loc = gimple_location (stmt);
1654 tree lhs = gimple_call_lhs (stmt);
1655 tree arg0 = gimple_call_arg (stmt, 0);
1656 tree arg1 = gimple_call_arg (stmt, 1);
1657 expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
1658 false, false, false, true);
1661 /* Expand UBSAN_CHECK_SUB call STMT. */
1663 static void
1664 expand_UBSAN_CHECK_SUB (gcall *stmt)
1666 location_t loc = gimple_location (stmt);
1667 tree lhs = gimple_call_lhs (stmt);
1668 tree arg0 = gimple_call_arg (stmt, 0);
1669 tree arg1 = gimple_call_arg (stmt, 1);
1670 if (integer_zerop (arg0))
1671 expand_neg_overflow (loc, lhs, arg1, true);
1672 else
1673 expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
1674 false, false, false, true);
1677 /* Expand UBSAN_CHECK_MUL call STMT. */
1679 static void
1680 expand_UBSAN_CHECK_MUL (gcall *stmt)
1682 location_t loc = gimple_location (stmt);
1683 tree lhs = gimple_call_lhs (stmt);
1684 tree arg0 = gimple_call_arg (stmt, 0);
1685 tree arg1 = gimple_call_arg (stmt, 1);
1686 expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true);
1689 /* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
1691 static void
1692 expand_arith_overflow (enum tree_code code, gimple stmt)
1694 tree lhs = gimple_call_lhs (stmt);
1695 if (lhs == NULL_TREE)
1696 return;
1697 tree arg0 = gimple_call_arg (stmt, 0);
1698 tree arg1 = gimple_call_arg (stmt, 1);
1699 tree type = TREE_TYPE (TREE_TYPE (lhs));
1700 int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
1701 int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
1702 int unsr_p = TYPE_UNSIGNED (type);
1703 int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
1704 int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
1705 int precres = TYPE_PRECISION (type);
1706 location_t loc = gimple_location (stmt);
1707 if (!uns0_p && get_range_pos_neg (arg0) == 1)
1708 uns0_p = true;
1709 if (!uns1_p && get_range_pos_neg (arg1) == 1)
1710 uns1_p = true;
1711 int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
1712 prec0 = MIN (prec0, pr);
1713 pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
1714 prec1 = MIN (prec1, pr);
1716 /* If uns0_p && uns1_p, precop is minimum needed precision
1717 of unsigned type to hold the exact result, otherwise
1718 precop is minimum needed precision of signed type to
1719 hold the exact result. */
1720 int precop;
1721 if (code == MULT_EXPR)
1722 precop = prec0 + prec1 + (uns0_p != uns1_p);
1723 else
1725 if (uns0_p == uns1_p)
1726 precop = MAX (prec0, prec1) + 1;
1727 else if (uns0_p)
1728 precop = MAX (prec0 + 1, prec1) + 1;
1729 else
1730 precop = MAX (prec0, prec1 + 1) + 1;
1732 int orig_precres = precres;
1736 if ((uns0_p && uns1_p)
1737 ? ((precop + !unsr_p) <= precres
1738 /* u1 - u2 -> ur can overflow, no matter what precision
1739 the result has. */
1740 && (code != MINUS_EXPR || !unsr_p))
1741 : (!unsr_p && precop <= precres))
1743 /* The infinity precision result will always fit into result. */
1744 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1745 write_complex_part (target, const0_rtx, true);
1746 enum machine_mode mode = TYPE_MODE (type);
1747 struct separate_ops ops;
1748 ops.code = code;
1749 ops.type = type;
1750 ops.op0 = fold_convert_loc (loc, type, arg0);
1751 ops.op1 = fold_convert_loc (loc, type, arg1);
1752 ops.op2 = NULL_TREE;
1753 ops.location = loc;
1754 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
1755 expand_arith_overflow_result_store (lhs, target, mode, tem);
1756 return;
1759 #ifdef WORD_REGISTER_OPERATIONS
1760 /* For sub-word operations, if target doesn't have them, start
1761 with precres widening right away, otherwise do it only
1762 if the most simple cases can't be used. */
1763 if (orig_precres == precres && precres < BITS_PER_WORD)
1765 else
1766 #endif
1767 if ((uns0_p && uns1_p && unsr_p && prec0 <= precres && prec1 <= precres)
1768 || ((!uns0_p || !uns1_p) && !unsr_p
1769 && prec0 + uns0_p <= precres
1770 && prec1 + uns1_p <= precres))
1772 arg0 = fold_convert_loc (loc, type, arg0);
1773 arg1 = fold_convert_loc (loc, type, arg1);
1774 switch (code)
1776 case MINUS_EXPR:
1777 if (integer_zerop (arg0) && !unsr_p)
1778 expand_neg_overflow (loc, lhs, arg1, false);
1779 /* FALLTHRU */
1780 case PLUS_EXPR:
1781 expand_addsub_overflow (loc, code, lhs, arg0, arg1,
1782 unsr_p, unsr_p, unsr_p, false);
1783 return;
1784 case MULT_EXPR:
1785 expand_mul_overflow (loc, lhs, arg0, arg1,
1786 unsr_p, unsr_p, unsr_p, false);
1787 return;
1788 default:
1789 gcc_unreachable ();
1793 /* For sub-word operations, retry with a wider type first. */
1794 if (orig_precres == precres && precop <= BITS_PER_WORD)
1796 #ifdef WORD_REGISTER_OPERATIONS
1797 int p = BITS_PER_WORD;
1798 #else
1799 int p = precop;
1800 #endif
1801 enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1802 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1803 uns0_p && uns1_p
1804 && unsr_p);
1805 p = TYPE_PRECISION (optype);
1806 if (p > precres)
1808 precres = p;
1809 unsr_p = TYPE_UNSIGNED (optype);
1810 type = optype;
1811 continue;
1815 if (prec0 <= precres && prec1 <= precres)
1817 tree types[2];
1818 if (unsr_p)
1820 types[0] = build_nonstandard_integer_type (precres, 0);
1821 types[1] = type;
1823 else
1825 types[0] = type;
1826 types[1] = build_nonstandard_integer_type (precres, 1);
1828 arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
1829 arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
1830 if (code != MULT_EXPR)
1831 expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
1832 uns0_p, uns1_p, false);
1833 else
1834 expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
1835 uns0_p, uns1_p, false);
1836 return;
1839 /* Retry with a wider type. */
1840 if (orig_precres == precres)
1842 int p = MAX (prec0, prec1);
1843 enum machine_mode m = smallest_mode_for_size (p, MODE_INT);
1844 tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
1845 uns0_p && uns1_p
1846 && unsr_p);
1847 p = TYPE_PRECISION (optype);
1848 if (p > precres)
1850 precres = p;
1851 unsr_p = TYPE_UNSIGNED (optype);
1852 type = optype;
1853 continue;
1857 gcc_unreachable ();
1859 while (1);
1862 /* Expand ADD_OVERFLOW STMT. */
1864 static void
1865 expand_ADD_OVERFLOW (gcall *stmt)
1867 expand_arith_overflow (PLUS_EXPR, stmt);
1870 /* Expand SUB_OVERFLOW STMT. */
1872 static void
1873 expand_SUB_OVERFLOW (gcall *stmt)
1875 expand_arith_overflow (MINUS_EXPR, stmt);
1878 /* Expand MUL_OVERFLOW STMT. */
1880 static void
1881 expand_MUL_OVERFLOW (gcall *stmt)
1883 expand_arith_overflow (MULT_EXPR, stmt);
1886 /* This should get folded in tree-vectorizer.c. */
1888 static void
1889 expand_LOOP_VECTORIZED (gcall *)
1891 gcc_unreachable ();
1894 static void
1895 expand_MASK_LOAD (gcall *stmt)
1897 struct expand_operand ops[3];
1898 tree type, lhs, rhs, maskt;
1899 rtx mem, target, mask;
1901 maskt = gimple_call_arg (stmt, 2);
1902 lhs = gimple_call_lhs (stmt);
1903 if (lhs == NULL_TREE)
1904 return;
1905 type = TREE_TYPE (lhs);
1906 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1907 gimple_call_arg (stmt, 1));
1909 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1910 gcc_assert (MEM_P (mem));
1911 mask = expand_normal (maskt);
1912 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1913 create_output_operand (&ops[0], target, TYPE_MODE (type));
1914 create_fixed_operand (&ops[1], mem);
1915 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1916 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
1919 static void
1920 expand_MASK_STORE (gcall *stmt)
1922 struct expand_operand ops[3];
1923 tree type, lhs, rhs, maskt;
1924 rtx mem, reg, mask;
1926 maskt = gimple_call_arg (stmt, 2);
1927 rhs = gimple_call_arg (stmt, 3);
1928 type = TREE_TYPE (rhs);
1929 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
1930 gimple_call_arg (stmt, 1));
1932 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1933 gcc_assert (MEM_P (mem));
1934 mask = expand_normal (maskt);
1935 reg = expand_normal (rhs);
1936 create_fixed_operand (&ops[0], mem);
1937 create_input_operand (&ops[1], reg, TYPE_MODE (type));
1938 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
1939 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
1942 static void
1943 expand_ABNORMAL_DISPATCHER (gcall *)
1947 static void
1948 expand_BUILTIN_EXPECT (gcall *stmt)
1950 /* When guessing was done, the hints should be already stripped away. */
1951 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
1953 rtx target;
1954 tree lhs = gimple_call_lhs (stmt);
1955 if (lhs)
1956 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1957 else
1958 target = const0_rtx;
1959 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
1960 if (lhs && val != target)
1961 emit_move_insn (target, val);
1964 /* IFN_VA_ARG is supposed to be expanded at pass_stdarg. So this dummy function
1965 should never be called. */
1967 static void
1968 expand_VA_ARG (gcall *stmt ATTRIBUTE_UNUSED)
1970 gcc_unreachable ();
1973 /* Routines to expand each internal function, indexed by function number.
1974 Each routine has the prototype:
1976 expand_<NAME> (gcall *stmt)
1978 where STMT is the statement that performs the call. */
1979 static void (*const internal_fn_expanders[]) (gcall *) = {
1980 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
1981 #include "internal-fn.def"
1982 #undef DEF_INTERNAL_FN
1986 /* Expand STMT, which is a call to internal function FN. */
1988 void
1989 expand_internal_call (gcall *stmt)
1991 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);