2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
27 #include "insn-codes.h"
35 #include "hard-reg-set.h"
38 #include "dominance.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-expr.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "diagnostic-core.h"
52 /* The names of each internal function, indexed by function number. */
53 const char *const internal_fn_name_array
[] = {
54 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
55 #include "internal-fn.def"
56 #undef DEF_INTERNAL_FN
60 /* The ECF_* flags of each internal function, indexed by function number. */
61 const int internal_fn_flags_array
[] = {
62 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
63 #include "internal-fn.def"
64 #undef DEF_INTERNAL_FN
68 /* Fnspec of each internal function, indexed by function number. */
69 const_tree internal_fn_fnspec_array
[IFN_LAST
+ 1];
74 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
75 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
76 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
77 #include "internal-fn.def"
78 #undef DEF_INTERNAL_FN
79 internal_fn_fnspec_array
[IFN_LAST
] = 0;
82 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
83 for load-lanes-style optab OPTAB. The insn must exist. */
86 get_multi_vector_move (tree array_type
, convert_optab optab
)
92 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
93 imode
= TYPE_MODE (array_type
);
94 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
96 icode
= convert_optab_handler (optab
, imode
, vmode
);
97 gcc_assert (icode
!= CODE_FOR_nothing
);
101 /* Expand LOAD_LANES call STMT. */
104 expand_LOAD_LANES (gimple stmt
)
106 struct expand_operand ops
[2];
110 lhs
= gimple_call_lhs (stmt
);
111 rhs
= gimple_call_arg (stmt
, 0);
112 type
= TREE_TYPE (lhs
);
114 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
115 mem
= expand_normal (rhs
);
117 gcc_assert (MEM_P (mem
));
118 PUT_MODE (mem
, TYPE_MODE (type
));
120 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
121 create_fixed_operand (&ops
[1], mem
);
122 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
125 /* Expand STORE_LANES call STMT. */
128 expand_STORE_LANES (gimple stmt
)
130 struct expand_operand ops
[2];
134 lhs
= gimple_call_lhs (stmt
);
135 rhs
= gimple_call_arg (stmt
, 0);
136 type
= TREE_TYPE (rhs
);
138 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
139 reg
= expand_normal (rhs
);
141 gcc_assert (MEM_P (target
));
142 PUT_MODE (target
, TYPE_MODE (type
));
144 create_fixed_operand (&ops
[0], target
);
145 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
146 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
150 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED
)
155 /* This should get expanded in adjust_simduid_builtins. */
158 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED
)
163 /* This should get expanded in adjust_simduid_builtins. */
166 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED
)
171 /* This should get expanded in adjust_simduid_builtins. */
174 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED
)
179 /* This should get expanded in the sanopt pass. */
182 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED
)
187 /* This should get expanded in the sanopt pass. */
190 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED
)
195 /* This should get expanded in the sanopt pass. */
198 expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED
)
203 /* This should get expanded in the sanopt pass. */
206 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED
)
211 /* Add sub/add overflow checking to the statement STMT.
212 CODE says whether the operation is +, or -. */
215 ubsan_expand_si_overflow_addsub_check (tree_code code
, gimple stmt
)
218 tree lhs
, fn
, arg0
, arg1
;
219 rtx_code_label
*done_label
, *do_error
;
220 rtx target
= NULL_RTX
;
222 lhs
= gimple_call_lhs (stmt
);
223 arg0
= gimple_call_arg (stmt
, 0);
224 arg1
= gimple_call_arg (stmt
, 1);
225 done_label
= gen_label_rtx ();
226 do_error
= gen_label_rtx ();
227 do_pending_stack_adjust ();
228 op0
= expand_normal (arg0
);
229 op1
= expand_normal (arg1
);
231 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
233 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
236 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
237 if (icode
!= CODE_FOR_nothing
)
239 struct expand_operand ops
[4];
240 rtx_insn
*last
= get_last_insn ();
242 res
= gen_reg_rtx (mode
);
243 create_output_operand (&ops
[0], res
, mode
);
244 create_input_operand (&ops
[1], op0
, mode
);
245 create_input_operand (&ops
[2], op1
, mode
);
246 create_fixed_operand (&ops
[3], do_error
);
247 if (maybe_expand_insn (icode
, 4, ops
))
249 last
= get_last_insn ();
250 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
252 && any_condjump_p (last
)
253 && !find_reg_note (last
, REG_BR_PROB
, 0))
254 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
255 emit_jump (done_label
);
259 delete_insns_since (last
);
260 icode
= CODE_FOR_nothing
;
264 if (icode
== CODE_FOR_nothing
)
266 rtx_code_label
*sub_check
= gen_label_rtx ();
269 /* Compute the operation. On RTL level, the addition is always
271 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
272 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
274 /* If we can prove one of the arguments (for MINUS_EXPR only
275 the second operand, as subtraction is not commutative) is always
276 non-negative or always negative, we can do just one comparison
277 and conditional jump instead of 2 at runtime, 3 present in the
278 emitted code. If one of the arguments is CONST_INT, all we
279 need is to make sure it is op1, then the first
280 emit_cmp_and_jump_insns will be just folded. Otherwise try
281 to use range info if available. */
282 if (code
== PLUS_EXPR
&& CONST_INT_P (op0
))
288 else if (CONST_INT_P (op1
))
290 else if (code
== PLUS_EXPR
&& TREE_CODE (arg0
) == SSA_NAME
)
292 wide_int arg0_min
, arg0_max
;
293 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
295 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
297 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
307 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
309 wide_int arg1_min
, arg1_max
;
310 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
312 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
314 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
319 /* If the op1 is negative, we have to use a different check. */
321 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
322 false, sub_check
, PROB_EVEN
);
324 /* Compare the result of the operation with one of the operands. */
326 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
327 NULL_RTX
, mode
, false, done_label
,
330 /* If we get here, we have to print the error. */
333 emit_jump (do_error
);
335 emit_label (sub_check
);
338 /* We have k = a + b for b < 0 here. k <= a must hold. */
340 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
341 NULL_RTX
, mode
, false, done_label
,
345 emit_label (do_error
);
346 /* Expand the ubsan builtin call. */
348 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
349 TREE_TYPE (arg0
), arg0
, arg1
);
352 do_pending_stack_adjust ();
355 emit_label (done_label
);
358 emit_move_insn (target
, res
);
361 /* Add negate overflow checking to the statement STMT. */
364 ubsan_expand_si_overflow_neg_check (gimple stmt
)
368 rtx_code_label
*done_label
, *do_error
;
369 rtx target
= NULL_RTX
;
371 lhs
= gimple_call_lhs (stmt
);
372 arg1
= gimple_call_arg (stmt
, 1);
373 done_label
= gen_label_rtx ();
374 do_error
= gen_label_rtx ();
376 do_pending_stack_adjust ();
377 op1
= expand_normal (arg1
);
379 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
381 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
383 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
384 if (icode
!= CODE_FOR_nothing
)
386 struct expand_operand ops
[3];
387 rtx_insn
*last
= get_last_insn ();
389 res
= gen_reg_rtx (mode
);
390 create_output_operand (&ops
[0], res
, mode
);
391 create_input_operand (&ops
[1], op1
, mode
);
392 create_fixed_operand (&ops
[2], do_error
);
393 if (maybe_expand_insn (icode
, 3, ops
))
395 last
= get_last_insn ();
396 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
398 && any_condjump_p (last
)
399 && !find_reg_note (last
, REG_BR_PROB
, 0))
400 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
401 emit_jump (done_label
);
405 delete_insns_since (last
);
406 icode
= CODE_FOR_nothing
;
410 if (icode
== CODE_FOR_nothing
)
412 /* Compute the operation. On RTL level, the addition is always
414 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
416 /* Compare the operand with the most negative value. */
417 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
418 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
419 done_label
, PROB_VERY_LIKELY
);
422 emit_label (do_error
);
423 /* Expand the ubsan builtin call. */
425 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
426 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
429 do_pending_stack_adjust ();
432 emit_label (done_label
);
435 emit_move_insn (target
, res
);
438 /* Add mul overflow checking to the statement STMT. */
441 ubsan_expand_si_overflow_mul_check (gimple stmt
)
444 tree lhs
, fn
, arg0
, arg1
;
445 rtx_code_label
*done_label
, *do_error
;
446 rtx target
= NULL_RTX
;
448 lhs
= gimple_call_lhs (stmt
);
449 arg0
= gimple_call_arg (stmt
, 0);
450 arg1
= gimple_call_arg (stmt
, 1);
451 done_label
= gen_label_rtx ();
452 do_error
= gen_label_rtx ();
454 do_pending_stack_adjust ();
455 op0
= expand_normal (arg0
);
456 op1
= expand_normal (arg1
);
458 machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
460 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
462 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
463 if (icode
!= CODE_FOR_nothing
)
465 struct expand_operand ops
[4];
466 rtx_insn
*last
= get_last_insn ();
468 res
= gen_reg_rtx (mode
);
469 create_output_operand (&ops
[0], res
, mode
);
470 create_input_operand (&ops
[1], op0
, mode
);
471 create_input_operand (&ops
[2], op1
, mode
);
472 create_fixed_operand (&ops
[3], do_error
);
473 if (maybe_expand_insn (icode
, 4, ops
))
475 last
= get_last_insn ();
476 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
478 && any_condjump_p (last
)
479 && !find_reg_note (last
, REG_BR_PROB
, 0))
480 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
481 emit_jump (done_label
);
485 delete_insns_since (last
);
486 icode
= CODE_FOR_nothing
;
490 if (icode
== CODE_FOR_nothing
)
492 struct separate_ops ops
;
494 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
498 ops
.location
= gimple_location (stmt
);
499 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
500 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
502 machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
503 ops
.code
= WIDEN_MULT_EXPR
;
505 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
507 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
508 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
509 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
510 hipart
= gen_lowpart (mode
, hipart
);
511 res
= gen_lowpart (mode
, res
);
512 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
513 GET_MODE_PRECISION (mode
) - 1,
515 /* RES is low half of the double width result, HIPART
516 the high half. There was overflow if
517 HIPART is different from RES < 0 ? -1 : 0. */
518 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
519 false, done_label
, PROB_VERY_LIKELY
);
521 else if (hmode
!= BLKmode
522 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
524 rtx_code_label
*large_op0
= gen_label_rtx ();
525 rtx_code_label
*small_op0_large_op1
= gen_label_rtx ();
526 rtx_code_label
*one_small_one_large
= gen_label_rtx ();
527 rtx_code_label
*both_ops_large
= gen_label_rtx ();
528 rtx_code_label
*after_hipart_neg
= gen_label_rtx ();
529 rtx_code_label
*after_lopart_neg
= gen_label_rtx ();
530 rtx_code_label
*do_overflow
= gen_label_rtx ();
531 rtx_code_label
*hipart_different
= gen_label_rtx ();
533 unsigned int hprec
= GET_MODE_PRECISION (hmode
);
534 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
536 hipart0
= gen_lowpart (hmode
, hipart0
);
537 rtx lopart0
= gen_lowpart (hmode
, op0
);
538 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
540 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
542 hipart1
= gen_lowpart (hmode
, hipart1
);
543 rtx lopart1
= gen_lowpart (hmode
, op1
);
544 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
547 res
= gen_reg_rtx (mode
);
549 /* True if op0 resp. op1 are known to be in the range of
551 bool op0_small_p
= false;
552 bool op1_small_p
= false;
553 /* True if op0 resp. op1 are known to have all zeros or all ones
554 in the upper half of bits, but are not known to be
556 bool op0_medium_p
= false;
557 bool op1_medium_p
= false;
558 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
559 nonnegative, 1 if unknown. */
563 if (TREE_CODE (arg0
) == SSA_NAME
)
565 wide_int arg0_min
, arg0_max
;
566 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
568 unsigned int mprec0
= wi::min_precision (arg0_min
, SIGNED
);
569 unsigned int mprec1
= wi::min_precision (arg0_max
, SIGNED
);
570 if (mprec0
<= hprec
&& mprec1
<= hprec
)
572 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
574 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
576 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
580 if (TREE_CODE (arg1
) == SSA_NAME
)
582 wide_int arg1_min
, arg1_max
;
583 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
585 unsigned int mprec0
= wi::min_precision (arg1_min
, SIGNED
);
586 unsigned int mprec1
= wi::min_precision (arg1_max
, SIGNED
);
587 if (mprec0
<= hprec
&& mprec1
<= hprec
)
589 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
591 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
593 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
598 int smaller_sign
= 1;
602 smaller_sign
= op0_sign
;
603 larger_sign
= op1_sign
;
605 else if (op1_small_p
)
607 smaller_sign
= op1_sign
;
608 larger_sign
= op0_sign
;
610 else if (op0_sign
== op1_sign
)
612 smaller_sign
= op0_sign
;
613 larger_sign
= op0_sign
;
617 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
618 false, large_op0
, PROB_UNLIKELY
);
621 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
622 false, small_op0_large_op1
,
625 /* If both op0 and op1 are sign extended from hmode to mode,
626 the multiplication will never overflow. We can do just one
627 hmode x hmode => mode widening multiplication. */
628 if (GET_CODE (lopart0
) == SUBREG
)
630 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
631 SUBREG_PROMOTED_SET (lopart0
, 0);
633 if (GET_CODE (lopart1
) == SUBREG
)
635 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
636 SUBREG_PROMOTED_SET (lopart1
, 0);
638 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
639 ops
.op0
= make_tree (halfstype
, lopart0
);
640 ops
.op1
= make_tree (halfstype
, lopart1
);
641 ops
.code
= WIDEN_MULT_EXPR
;
642 ops
.type
= TREE_TYPE (arg0
);
644 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
645 emit_move_insn (res
, thisres
);
646 emit_jump (done_label
);
648 emit_label (small_op0_large_op1
);
650 /* If op0 is sign extended from hmode to mode, but op1 is not,
651 just swap the arguments and handle it as op1 sign extended,
653 rtx larger
= gen_reg_rtx (mode
);
654 rtx hipart
= gen_reg_rtx (hmode
);
655 rtx lopart
= gen_reg_rtx (hmode
);
656 emit_move_insn (larger
, op1
);
657 emit_move_insn (hipart
, hipart1
);
658 emit_move_insn (lopart
, lopart0
);
659 emit_jump (one_small_one_large
);
661 emit_label (large_op0
);
664 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
665 false, both_ops_large
, PROB_UNLIKELY
);
667 /* If op1 is sign extended from hmode to mode, but op0 is not,
668 prepare larger, hipart and lopart pseudos and handle it together
669 with small_op0_large_op1. */
670 emit_move_insn (larger
, op0
);
671 emit_move_insn (hipart
, hipart0
);
672 emit_move_insn (lopart
, lopart1
);
674 emit_label (one_small_one_large
);
676 /* lopart is the low part of the operand that is sign extended
677 to mode, larger is the the other operand, hipart is the
678 high part of larger and lopart0 and lopart1 are the low parts
680 We perform lopart0 * lopart1 and lopart * hipart widening
682 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
683 ops
.op0
= make_tree (halfutype
, lopart0
);
684 ops
.op1
= make_tree (halfutype
, lopart1
);
686 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
688 ops
.op0
= make_tree (halfutype
, lopart
);
689 ops
.op1
= make_tree (halfutype
, hipart
);
690 rtx loxhi
= gen_reg_rtx (mode
);
691 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
692 emit_move_insn (loxhi
, tem
);
694 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
695 if (larger_sign
== 0)
696 emit_jump (after_hipart_neg
);
697 else if (larger_sign
!= -1)
698 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
699 false, after_hipart_neg
, PROB_EVEN
);
701 tem
= convert_modes (mode
, hmode
, lopart
, 1);
702 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
703 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
705 emit_move_insn (loxhi
, tem
);
707 emit_label (after_hipart_neg
);
709 /* if (lopart < 0) loxhi -= larger; */
710 if (smaller_sign
== 0)
711 emit_jump (after_lopart_neg
);
712 else if (smaller_sign
!= -1)
713 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
714 false, after_lopart_neg
, PROB_EVEN
);
716 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
718 emit_move_insn (loxhi
, tem
);
720 emit_label (after_lopart_neg
);
722 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
723 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
724 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
726 emit_move_insn (loxhi
, tem
);
728 /* if (loxhi >> (bitsize / 2)
729 == (hmode) loxhi >> (bitsize / 2 - 1)) */
730 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
732 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
733 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
734 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
735 hprec
- 1, NULL_RTX
, 0);
737 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
738 hmode
, false, do_overflow
,
741 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
742 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
744 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
746 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
749 emit_move_insn (res
, tem
);
750 emit_jump (done_label
);
752 emit_label (both_ops_large
);
754 /* If both operands are large (not sign extended from hmode),
755 then perform the full multiplication which will be the result
756 of the operation. The only cases which don't overflow are
757 some cases where both hipart0 and highpart1 are 0 or -1. */
758 ops
.code
= MULT_EXPR
;
759 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
760 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
761 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
762 emit_move_insn (res
, tem
);
766 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
767 NULL_RTX
, 1, OPTAB_DIRECT
);
768 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
769 true, do_error
, PROB_VERY_UNLIKELY
);
774 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
775 NULL_RTX
, 1, OPTAB_DIRECT
);
776 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
777 true, do_error
, PROB_VERY_UNLIKELY
);
780 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
781 same, overflow happened if res is negative, if they are different,
782 overflow happened if res is positive. */
783 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
784 emit_jump (hipart_different
);
785 else if (op0_sign
== 1 || op1_sign
== 1)
786 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
787 true, hipart_different
, PROB_EVEN
);
789 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
790 do_error
, PROB_VERY_UNLIKELY
);
791 emit_jump (done_label
);
793 emit_label (hipart_different
);
795 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
796 do_error
, PROB_VERY_UNLIKELY
);
797 emit_jump (done_label
);
799 emit_label (do_overflow
);
801 /* Overflow, do full multiplication and fallthru into do_error. */
802 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
803 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
804 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
805 emit_move_insn (res
, tem
);
809 ops
.code
= MULT_EXPR
;
810 ops
.type
= TREE_TYPE (arg0
);
811 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
812 emit_jump (done_label
);
816 emit_label (do_error
);
817 /* Expand the ubsan builtin call. */
819 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
820 TREE_TYPE (arg0
), arg0
, arg1
);
823 do_pending_stack_adjust ();
826 emit_label (done_label
);
829 emit_move_insn (target
, res
);
832 /* Expand UBSAN_CHECK_ADD call STMT. */
835 expand_UBSAN_CHECK_ADD (gimple stmt
)
837 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
840 /* Expand UBSAN_CHECK_SUB call STMT. */
843 expand_UBSAN_CHECK_SUB (gimple stmt
)
845 if (integer_zerop (gimple_call_arg (stmt
, 0)))
846 ubsan_expand_si_overflow_neg_check (stmt
);
848 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
851 /* Expand UBSAN_CHECK_MUL call STMT. */
854 expand_UBSAN_CHECK_MUL (gimple stmt
)
856 ubsan_expand_si_overflow_mul_check (stmt
);
859 /* This should get folded in tree-vectorizer.c. */
862 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED
)
868 expand_MASK_LOAD (gimple stmt
)
870 struct expand_operand ops
[3];
871 tree type
, lhs
, rhs
, maskt
;
872 rtx mem
, target
, mask
;
874 maskt
= gimple_call_arg (stmt
, 2);
875 lhs
= gimple_call_lhs (stmt
);
876 if (lhs
== NULL_TREE
)
878 type
= TREE_TYPE (lhs
);
879 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
880 gimple_call_arg (stmt
, 1));
882 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
883 gcc_assert (MEM_P (mem
));
884 mask
= expand_normal (maskt
);
885 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
886 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
887 create_fixed_operand (&ops
[1], mem
);
888 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
889 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
893 expand_MASK_STORE (gimple stmt
)
895 struct expand_operand ops
[3];
896 tree type
, lhs
, rhs
, maskt
;
899 maskt
= gimple_call_arg (stmt
, 2);
900 rhs
= gimple_call_arg (stmt
, 3);
901 type
= TREE_TYPE (rhs
);
902 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
903 gimple_call_arg (stmt
, 1));
905 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
906 gcc_assert (MEM_P (mem
));
907 mask
= expand_normal (maskt
);
908 reg
= expand_normal (rhs
);
909 create_fixed_operand (&ops
[0], mem
);
910 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
911 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
912 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
916 expand_ABNORMAL_DISPATCHER (gimple
)
921 expand_BUILTIN_EXPECT (gimple stmt
)
923 /* When guessing was done, the hints should be already stripped away. */
924 gcc_assert (!flag_guess_branch_prob
|| optimize
== 0 || seen_error ());
927 tree lhs
= gimple_call_lhs (stmt
);
929 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
932 rtx val
= expand_expr (gimple_call_arg (stmt
, 0), target
, VOIDmode
, EXPAND_NORMAL
);
933 if (lhs
&& val
!= target
)
934 emit_move_insn (target
, val
);
937 /* Routines to expand each internal function, indexed by function number.
938 Each routine has the prototype:
940 expand_<NAME> (gimple stmt)
942 where STMT is the statement that performs the call. */
943 static void (*const internal_fn_expanders
[]) (gimple
) = {
944 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
945 #include "internal-fn.def"
946 #undef DEF_INTERNAL_FN
950 /* Expand STMT, which is a call to internal function FN. */
953 expand_internal_call (gimple stmt
)
955 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);