2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array
[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array
[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* Fnspec of each internal function, indexed by function number. */
58 const_tree internal_fn_fnspec_array
[IFN_LAST
+ 1];
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
66 #include "internal-fn.def"
67 #undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array
[IFN_LAST
] = 0;
71 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
75 get_multi_vector_move (tree array_type
, convert_optab optab
)
78 enum machine_mode imode
;
79 enum machine_mode vmode
;
81 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
82 imode
= TYPE_MODE (array_type
);
83 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
85 icode
= convert_optab_handler (optab
, imode
, vmode
);
86 gcc_assert (icode
!= CODE_FOR_nothing
);
90 /* Expand LOAD_LANES call STMT. */
93 expand_LOAD_LANES (gimple stmt
)
95 struct expand_operand ops
[2];
99 lhs
= gimple_call_lhs (stmt
);
100 rhs
= gimple_call_arg (stmt
, 0);
101 type
= TREE_TYPE (lhs
);
103 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
104 mem
= expand_normal (rhs
);
106 gcc_assert (MEM_P (mem
));
107 PUT_MODE (mem
, TYPE_MODE (type
));
109 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
110 create_fixed_operand (&ops
[1], mem
);
111 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
114 /* Expand STORE_LANES call STMT. */
117 expand_STORE_LANES (gimple stmt
)
119 struct expand_operand ops
[2];
123 lhs
= gimple_call_lhs (stmt
);
124 rhs
= gimple_call_arg (stmt
, 0);
125 type
= TREE_TYPE (rhs
);
127 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
128 reg
= expand_normal (rhs
);
130 gcc_assert (MEM_P (target
));
131 PUT_MODE (target
, TYPE_MODE (type
));
133 create_fixed_operand (&ops
[0], target
);
134 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
135 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
139 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED
)
144 /* This should get expanded in adjust_simduid_builtins. */
147 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED
)
152 /* This should get expanded in adjust_simduid_builtins. */
155 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED
)
160 /* This should get expanded in adjust_simduid_builtins. */
163 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED
)
168 /* This should get expanded in the sanopt pass. */
171 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED
)
176 /* This should get expanded in the sanopt pass. */
179 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED
)
184 /* This should get expanded in the sanopt pass. */
187 expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED
)
192 /* This should get expanded in the sanopt pass. */
195 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED
)
200 /* Add sub/add overflow checking to the statement STMT.
201 CODE says whether the operation is +, or -. */
204 ubsan_expand_si_overflow_addsub_check (tree_code code
, gimple stmt
)
207 tree lhs
, fn
, arg0
, arg1
;
208 rtx_code_label
*done_label
, *do_error
;
209 rtx target
= NULL_RTX
;
211 lhs
= gimple_call_lhs (stmt
);
212 arg0
= gimple_call_arg (stmt
, 0);
213 arg1
= gimple_call_arg (stmt
, 1);
214 done_label
= gen_label_rtx ();
215 do_error
= gen_label_rtx ();
216 do_pending_stack_adjust ();
217 op0
= expand_normal (arg0
);
218 op1
= expand_normal (arg1
);
220 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
222 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
225 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
226 if (icode
!= CODE_FOR_nothing
)
228 struct expand_operand ops
[4];
229 rtx_insn
*last
= get_last_insn ();
231 res
= gen_reg_rtx (mode
);
232 create_output_operand (&ops
[0], res
, mode
);
233 create_input_operand (&ops
[1], op0
, mode
);
234 create_input_operand (&ops
[2], op1
, mode
);
235 create_fixed_operand (&ops
[3], do_error
);
236 if (maybe_expand_insn (icode
, 4, ops
))
238 last
= get_last_insn ();
239 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
241 && any_condjump_p (last
)
242 && !find_reg_note (last
, REG_BR_PROB
, 0))
243 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
244 emit_jump (done_label
);
248 delete_insns_since (last
);
249 icode
= CODE_FOR_nothing
;
253 if (icode
== CODE_FOR_nothing
)
255 rtx_code_label
*sub_check
= gen_label_rtx ();
258 /* Compute the operation. On RTL level, the addition is always
260 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
261 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
263 /* If we can prove one of the arguments (for MINUS_EXPR only
264 the second operand, as subtraction is not commutative) is always
265 non-negative or always negative, we can do just one comparison
266 and conditional jump instead of 2 at runtime, 3 present in the
267 emitted code. If one of the arguments is CONST_INT, all we
268 need is to make sure it is op1, then the first
269 emit_cmp_and_jump_insns will be just folded. Otherwise try
270 to use range info if available. */
271 if (code
== PLUS_EXPR
&& CONST_INT_P (op0
))
277 else if (CONST_INT_P (op1
))
279 else if (code
== PLUS_EXPR
&& TREE_CODE (arg0
) == SSA_NAME
)
281 wide_int arg0_min
, arg0_max
;
282 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
284 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
286 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
296 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
298 wide_int arg1_min
, arg1_max
;
299 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
301 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
303 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
308 /* If the op1 is negative, we have to use a different check. */
310 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
311 false, sub_check
, PROB_EVEN
);
313 /* Compare the result of the operation with one of the operands. */
315 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
316 NULL_RTX
, mode
, false, done_label
,
319 /* If we get here, we have to print the error. */
322 emit_jump (do_error
);
324 emit_label (sub_check
);
327 /* We have k = a + b for b < 0 here. k <= a must hold. */
329 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
330 NULL_RTX
, mode
, false, done_label
,
334 emit_label (do_error
);
335 /* Expand the ubsan builtin call. */
337 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
338 TREE_TYPE (arg0
), arg0
, arg1
);
341 do_pending_stack_adjust ();
344 emit_label (done_label
);
347 emit_move_insn (target
, res
);
350 /* Add negate overflow checking to the statement STMT. */
353 ubsan_expand_si_overflow_neg_check (gimple stmt
)
357 rtx_code_label
*done_label
, *do_error
;
358 rtx target
= NULL_RTX
;
360 lhs
= gimple_call_lhs (stmt
);
361 arg1
= gimple_call_arg (stmt
, 1);
362 done_label
= gen_label_rtx ();
363 do_error
= gen_label_rtx ();
365 do_pending_stack_adjust ();
366 op1
= expand_normal (arg1
);
368 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
370 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
372 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
373 if (icode
!= CODE_FOR_nothing
)
375 struct expand_operand ops
[3];
376 rtx_insn
*last
= get_last_insn ();
378 res
= gen_reg_rtx (mode
);
379 create_output_operand (&ops
[0], res
, mode
);
380 create_input_operand (&ops
[1], op1
, mode
);
381 create_fixed_operand (&ops
[2], do_error
);
382 if (maybe_expand_insn (icode
, 3, ops
))
384 last
= get_last_insn ();
385 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
387 && any_condjump_p (last
)
388 && !find_reg_note (last
, REG_BR_PROB
, 0))
389 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
390 emit_jump (done_label
);
394 delete_insns_since (last
);
395 icode
= CODE_FOR_nothing
;
399 if (icode
== CODE_FOR_nothing
)
401 /* Compute the operation. On RTL level, the addition is always
403 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
405 /* Compare the operand with the most negative value. */
406 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
407 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
408 done_label
, PROB_VERY_LIKELY
);
411 emit_label (do_error
);
412 /* Expand the ubsan builtin call. */
414 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
415 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
418 do_pending_stack_adjust ();
421 emit_label (done_label
);
424 emit_move_insn (target
, res
);
427 /* Add mul overflow checking to the statement STMT. */
430 ubsan_expand_si_overflow_mul_check (gimple stmt
)
433 tree lhs
, fn
, arg0
, arg1
;
434 rtx_code_label
*done_label
, *do_error
;
435 rtx target
= NULL_RTX
;
437 lhs
= gimple_call_lhs (stmt
);
438 arg0
= gimple_call_arg (stmt
, 0);
439 arg1
= gimple_call_arg (stmt
, 1);
440 done_label
= gen_label_rtx ();
441 do_error
= gen_label_rtx ();
443 do_pending_stack_adjust ();
444 op0
= expand_normal (arg0
);
445 op1
= expand_normal (arg1
);
447 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
449 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
451 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
452 if (icode
!= CODE_FOR_nothing
)
454 struct expand_operand ops
[4];
455 rtx_insn
*last
= get_last_insn ();
457 res
= gen_reg_rtx (mode
);
458 create_output_operand (&ops
[0], res
, mode
);
459 create_input_operand (&ops
[1], op0
, mode
);
460 create_input_operand (&ops
[2], op1
, mode
);
461 create_fixed_operand (&ops
[3], do_error
);
462 if (maybe_expand_insn (icode
, 4, ops
))
464 last
= get_last_insn ();
465 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
467 && any_condjump_p (last
)
468 && !find_reg_note (last
, REG_BR_PROB
, 0))
469 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
470 emit_jump (done_label
);
474 delete_insns_since (last
);
475 icode
= CODE_FOR_nothing
;
479 if (icode
== CODE_FOR_nothing
)
481 struct separate_ops ops
;
482 enum machine_mode hmode
483 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
487 ops
.location
= gimple_location (stmt
);
488 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
489 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
491 enum machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
492 ops
.code
= WIDEN_MULT_EXPR
;
494 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
496 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
497 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
498 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
499 hipart
= gen_lowpart (mode
, hipart
);
500 res
= gen_lowpart (mode
, res
);
501 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
502 GET_MODE_PRECISION (mode
) - 1,
504 /* RES is low half of the double width result, HIPART
505 the high half. There was overflow if
506 HIPART is different from RES < 0 ? -1 : 0. */
507 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
508 false, done_label
, PROB_VERY_LIKELY
);
510 else if (hmode
!= BLKmode
511 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
513 rtx_code_label
*large_op0
= gen_label_rtx ();
514 rtx_code_label
*small_op0_large_op1
= gen_label_rtx ();
515 rtx_code_label
*one_small_one_large
= gen_label_rtx ();
516 rtx_code_label
*both_ops_large
= gen_label_rtx ();
517 rtx_code_label
*after_hipart_neg
= gen_label_rtx ();
518 rtx_code_label
*after_lopart_neg
= gen_label_rtx ();
519 rtx_code_label
*do_overflow
= gen_label_rtx ();
520 rtx_code_label
*hipart_different
= gen_label_rtx ();
522 unsigned int hprec
= GET_MODE_PRECISION (hmode
);
523 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
525 hipart0
= gen_lowpart (hmode
, hipart0
);
526 rtx lopart0
= gen_lowpart (hmode
, op0
);
527 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
529 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
531 hipart1
= gen_lowpart (hmode
, hipart1
);
532 rtx lopart1
= gen_lowpart (hmode
, op1
);
533 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
536 res
= gen_reg_rtx (mode
);
538 /* True if op0 resp. op1 are known to be in the range of
540 bool op0_small_p
= false;
541 bool op1_small_p
= false;
542 /* True if op0 resp. op1 are known to have all zeros or all ones
543 in the upper half of bits, but are not known to be
545 bool op0_medium_p
= false;
546 bool op1_medium_p
= false;
547 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
548 nonnegative, 1 if unknown. */
552 if (TREE_CODE (arg0
) == SSA_NAME
)
554 wide_int arg0_min
, arg0_max
;
555 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
557 unsigned int mprec0
= wi::min_precision (arg0_min
, SIGNED
);
558 unsigned int mprec1
= wi::min_precision (arg0_max
, SIGNED
);
559 if (mprec0
<= hprec
&& mprec1
<= hprec
)
561 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
563 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
565 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
569 if (TREE_CODE (arg1
) == SSA_NAME
)
571 wide_int arg1_min
, arg1_max
;
572 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
574 unsigned int mprec0
= wi::min_precision (arg1_min
, SIGNED
);
575 unsigned int mprec1
= wi::min_precision (arg1_max
, SIGNED
);
576 if (mprec0
<= hprec
&& mprec1
<= hprec
)
578 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
580 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
582 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
587 int smaller_sign
= 1;
591 smaller_sign
= op0_sign
;
592 larger_sign
= op1_sign
;
594 else if (op1_small_p
)
596 smaller_sign
= op1_sign
;
597 larger_sign
= op0_sign
;
599 else if (op0_sign
== op1_sign
)
601 smaller_sign
= op0_sign
;
602 larger_sign
= op0_sign
;
606 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
607 false, large_op0
, PROB_UNLIKELY
);
610 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
611 false, small_op0_large_op1
,
614 /* If both op0 and op1 are sign extended from hmode to mode,
615 the multiplication will never overflow. We can do just one
616 hmode x hmode => mode widening multiplication. */
617 if (GET_CODE (lopart0
) == SUBREG
)
619 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
620 SUBREG_PROMOTED_SET (lopart0
, 0);
622 if (GET_CODE (lopart1
) == SUBREG
)
624 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
625 SUBREG_PROMOTED_SET (lopart1
, 0);
627 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
628 ops
.op0
= make_tree (halfstype
, lopart0
);
629 ops
.op1
= make_tree (halfstype
, lopart1
);
630 ops
.code
= WIDEN_MULT_EXPR
;
631 ops
.type
= TREE_TYPE (arg0
);
633 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
634 emit_move_insn (res
, thisres
);
635 emit_jump (done_label
);
637 emit_label (small_op0_large_op1
);
639 /* If op0 is sign extended from hmode to mode, but op1 is not,
640 just swap the arguments and handle it as op1 sign extended,
642 rtx larger
= gen_reg_rtx (mode
);
643 rtx hipart
= gen_reg_rtx (hmode
);
644 rtx lopart
= gen_reg_rtx (hmode
);
645 emit_move_insn (larger
, op1
);
646 emit_move_insn (hipart
, hipart1
);
647 emit_move_insn (lopart
, lopart0
);
648 emit_jump (one_small_one_large
);
650 emit_label (large_op0
);
653 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
654 false, both_ops_large
, PROB_UNLIKELY
);
656 /* If op1 is sign extended from hmode to mode, but op0 is not,
657 prepare larger, hipart and lopart pseudos and handle it together
658 with small_op0_large_op1. */
659 emit_move_insn (larger
, op0
);
660 emit_move_insn (hipart
, hipart0
);
661 emit_move_insn (lopart
, lopart1
);
663 emit_label (one_small_one_large
);
665 /* lopart is the low part of the operand that is sign extended
666 to mode, larger is the the other operand, hipart is the
667 high part of larger and lopart0 and lopart1 are the low parts
669 We perform lopart0 * lopart1 and lopart * hipart widening
671 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
672 ops
.op0
= make_tree (halfutype
, lopart0
);
673 ops
.op1
= make_tree (halfutype
, lopart1
);
675 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
677 ops
.op0
= make_tree (halfutype
, lopart
);
678 ops
.op1
= make_tree (halfutype
, hipart
);
679 rtx loxhi
= gen_reg_rtx (mode
);
680 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
681 emit_move_insn (loxhi
, tem
);
683 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
684 if (larger_sign
== 0)
685 emit_jump (after_hipart_neg
);
686 else if (larger_sign
!= -1)
687 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
688 false, after_hipart_neg
, PROB_EVEN
);
690 tem
= convert_modes (mode
, hmode
, lopart
, 1);
691 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
692 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
694 emit_move_insn (loxhi
, tem
);
696 emit_label (after_hipart_neg
);
698 /* if (lopart < 0) loxhi -= larger; */
699 if (smaller_sign
== 0)
700 emit_jump (after_lopart_neg
);
701 else if (smaller_sign
!= -1)
702 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
703 false, after_lopart_neg
, PROB_EVEN
);
705 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
707 emit_move_insn (loxhi
, tem
);
709 emit_label (after_lopart_neg
);
711 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
712 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
713 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
715 emit_move_insn (loxhi
, tem
);
717 /* if (loxhi >> (bitsize / 2)
718 == (hmode) loxhi >> (bitsize / 2 - 1)) */
719 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
721 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
722 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
723 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
724 hprec
- 1, NULL_RTX
, 0);
726 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
727 hmode
, false, do_overflow
,
730 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
731 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
733 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
735 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
738 emit_move_insn (res
, tem
);
739 emit_jump (done_label
);
741 emit_label (both_ops_large
);
743 /* If both operands are large (not sign extended from hmode),
744 then perform the full multiplication which will be the result
745 of the operation. The only cases which don't overflow are
746 some cases where both hipart0 and highpart1 are 0 or -1. */
747 ops
.code
= MULT_EXPR
;
748 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
749 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
750 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
751 emit_move_insn (res
, tem
);
755 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
756 NULL_RTX
, 1, OPTAB_DIRECT
);
757 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
758 true, do_error
, PROB_VERY_UNLIKELY
);
763 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
764 NULL_RTX
, 1, OPTAB_DIRECT
);
765 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
766 true, do_error
, PROB_VERY_UNLIKELY
);
769 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
770 same, overflow happened if res is negative, if they are different,
771 overflow happened if res is positive. */
772 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
773 emit_jump (hipart_different
);
774 else if (op0_sign
== 1 || op1_sign
== 1)
775 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
776 true, hipart_different
, PROB_EVEN
);
778 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
779 do_error
, PROB_VERY_UNLIKELY
);
780 emit_jump (done_label
);
782 emit_label (hipart_different
);
784 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
785 do_error
, PROB_VERY_UNLIKELY
);
786 emit_jump (done_label
);
788 emit_label (do_overflow
);
790 /* Overflow, do full multiplication and fallthru into do_error. */
791 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
792 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
793 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
794 emit_move_insn (res
, tem
);
798 ops
.code
= MULT_EXPR
;
799 ops
.type
= TREE_TYPE (arg0
);
800 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
801 emit_jump (done_label
);
805 emit_label (do_error
);
806 /* Expand the ubsan builtin call. */
808 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
809 TREE_TYPE (arg0
), arg0
, arg1
);
812 do_pending_stack_adjust ();
815 emit_label (done_label
);
818 emit_move_insn (target
, res
);
821 /* Expand UBSAN_CHECK_ADD call STMT. */
824 expand_UBSAN_CHECK_ADD (gimple stmt
)
826 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
829 /* Expand UBSAN_CHECK_SUB call STMT. */
832 expand_UBSAN_CHECK_SUB (gimple stmt
)
834 if (integer_zerop (gimple_call_arg (stmt
, 0)))
835 ubsan_expand_si_overflow_neg_check (stmt
);
837 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
840 /* Expand UBSAN_CHECK_MUL call STMT. */
843 expand_UBSAN_CHECK_MUL (gimple stmt
)
845 ubsan_expand_si_overflow_mul_check (stmt
);
848 /* This should get folded in tree-vectorizer.c. */
851 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED
)
857 expand_MASK_LOAD (gimple stmt
)
859 struct expand_operand ops
[3];
860 tree type
, lhs
, rhs
, maskt
;
861 rtx mem
, target
, mask
;
863 maskt
= gimple_call_arg (stmt
, 2);
864 lhs
= gimple_call_lhs (stmt
);
865 if (lhs
== NULL_TREE
)
867 type
= TREE_TYPE (lhs
);
868 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
869 gimple_call_arg (stmt
, 1));
871 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
872 gcc_assert (MEM_P (mem
));
873 mask
= expand_normal (maskt
);
874 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
875 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
876 create_fixed_operand (&ops
[1], mem
);
877 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
878 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
882 expand_MASK_STORE (gimple stmt
)
884 struct expand_operand ops
[3];
885 tree type
, lhs
, rhs
, maskt
;
888 maskt
= gimple_call_arg (stmt
, 2);
889 rhs
= gimple_call_arg (stmt
, 3);
890 type
= TREE_TYPE (rhs
);
891 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
892 gimple_call_arg (stmt
, 1));
894 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
895 gcc_assert (MEM_P (mem
));
896 mask
= expand_normal (maskt
);
897 reg
= expand_normal (rhs
);
898 create_fixed_operand (&ops
[0], mem
);
899 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
900 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
901 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
905 expand_ABNORMAL_DISPATCHER (gimple
)
910 expand_BUILTIN_EXPECT (gimple stmt
)
912 /* When guessing was done, the hints should be already stripped away. */
913 gcc_assert (!flag_guess_branch_prob
|| optimize
== 0 || seen_error ());
916 tree lhs
= gimple_call_lhs (stmt
);
918 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
921 rtx val
= expand_expr (gimple_call_arg (stmt
, 0), target
, VOIDmode
, EXPAND_NORMAL
);
922 if (lhs
&& val
!= target
)
923 emit_move_insn (target
, val
);
926 /* Routines to expand each internal function, indexed by function number.
927 Each routine has the prototype:
929 expand_<NAME> (gimple stmt)
931 where STMT is the statement that performs the call. */
932 static void (*const internal_fn_expanders
[]) (gimple
) = {
933 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
934 #include "internal-fn.def"
935 #undef DEF_INTERNAL_FN
939 /* Expand STMT, which is a call to internal function FN. */
942 expand_internal_call (gimple stmt
)
944 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);