2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
23 #include "internal-fn.h"
25 #include "stor-layout.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array
[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array
[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* Fnspec of each internal function, indexed by function number. */
58 const_tree internal_fn_fnspec_array
[IFN_LAST
+ 1];
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
66 #include "internal-fn.def"
67 #undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array
[IFN_LAST
] = 0;
71 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
75 get_multi_vector_move (tree array_type
, convert_optab optab
)
78 enum machine_mode imode
;
79 enum machine_mode vmode
;
81 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
82 imode
= TYPE_MODE (array_type
);
83 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
85 icode
= convert_optab_handler (optab
, imode
, vmode
);
86 gcc_assert (icode
!= CODE_FOR_nothing
);
90 /* Expand LOAD_LANES call STMT. */
93 expand_LOAD_LANES (gimple stmt
)
95 struct expand_operand ops
[2];
99 lhs
= gimple_call_lhs (stmt
);
100 rhs
= gimple_call_arg (stmt
, 0);
101 type
= TREE_TYPE (lhs
);
103 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
104 mem
= expand_normal (rhs
);
106 gcc_assert (MEM_P (mem
));
107 PUT_MODE (mem
, TYPE_MODE (type
));
109 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
110 create_fixed_operand (&ops
[1], mem
);
111 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
114 /* Expand STORE_LANES call STMT. */
117 expand_STORE_LANES (gimple stmt
)
119 struct expand_operand ops
[2];
123 lhs
= gimple_call_lhs (stmt
);
124 rhs
= gimple_call_arg (stmt
, 0);
125 type
= TREE_TYPE (rhs
);
127 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
128 reg
= expand_normal (rhs
);
130 gcc_assert (MEM_P (target
));
131 PUT_MODE (target
, TYPE_MODE (type
));
133 create_fixed_operand (&ops
[0], target
);
134 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
135 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
139 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED
)
144 /* This should get expanded in adjust_simduid_builtins. */
147 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED
)
152 /* This should get expanded in adjust_simduid_builtins. */
155 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED
)
160 /* This should get expanded in adjust_simduid_builtins. */
163 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED
)
168 /* This should get expanded in the sanopt pass. */
171 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED
)
176 /* This should get expanded in the sanopt pass. */
179 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED
)
184 /* Add sub/add overflow checking to the statement STMT.
185 CODE says whether the operation is +, or -. */
188 ubsan_expand_si_overflow_addsub_check (tree_code code
, gimple stmt
)
191 tree lhs
, fn
, arg0
, arg1
;
192 rtx done_label
, do_error
, target
= NULL_RTX
;
194 lhs
= gimple_call_lhs (stmt
);
195 arg0
= gimple_call_arg (stmt
, 0);
196 arg1
= gimple_call_arg (stmt
, 1);
197 done_label
= gen_label_rtx ();
198 do_error
= gen_label_rtx ();
199 do_pending_stack_adjust ();
200 op0
= expand_normal (arg0
);
201 op1
= expand_normal (arg1
);
203 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
205 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
208 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
209 if (icode
!= CODE_FOR_nothing
)
211 struct expand_operand ops
[4];
212 rtx last
= get_last_insn ();
214 res
= gen_reg_rtx (mode
);
215 create_output_operand (&ops
[0], res
, mode
);
216 create_input_operand (&ops
[1], op0
, mode
);
217 create_input_operand (&ops
[2], op1
, mode
);
218 create_fixed_operand (&ops
[3], do_error
);
219 if (maybe_expand_insn (icode
, 4, ops
))
221 last
= get_last_insn ();
222 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
224 && any_condjump_p (last
)
225 && !find_reg_note (last
, REG_BR_PROB
, 0))
226 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
227 emit_jump (done_label
);
231 delete_insns_since (last
);
232 icode
= CODE_FOR_nothing
;
236 if (icode
== CODE_FOR_nothing
)
238 rtx sub_check
= gen_label_rtx ();
241 /* Compute the operation. On RTL level, the addition is always
243 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
244 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
246 /* If we can prove one of the arguments (for MINUS_EXPR only
247 the second operand, as subtraction is not commutative) is always
248 non-negative or always negative, we can do just one comparison
249 and conditional jump instead of 2 at runtime, 3 present in the
250 emitted code. If one of the arguments is CONST_INT, all we
251 need is to make sure it is op1, then the first
252 emit_cmp_and_jump_insns will be just folded. Otherwise try
253 to use range info if available. */
254 if (code
== PLUS_EXPR
&& CONST_INT_P (op0
))
260 else if (CONST_INT_P (op1
))
262 else if (code
== PLUS_EXPR
&& TREE_CODE (arg0
) == SSA_NAME
)
264 double_int arg0_min
, arg0_max
;
265 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
267 if (!arg0_min
.is_negative ())
269 else if (arg0_max
.is_negative ())
279 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
281 double_int arg1_min
, arg1_max
;
282 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
284 if (!arg1_min
.is_negative ())
286 else if (arg1_max
.is_negative ())
291 /* If the op1 is negative, we have to use a different check. */
293 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
294 false, sub_check
, PROB_EVEN
);
296 /* Compare the result of the operation with one of the operands. */
298 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
299 NULL_RTX
, mode
, false, done_label
,
302 /* If we get here, we have to print the error. */
305 emit_jump (do_error
);
307 emit_label (sub_check
);
310 /* We have k = a + b for b < 0 here. k <= a must hold. */
312 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
313 NULL_RTX
, mode
, false, done_label
,
317 emit_label (do_error
);
318 /* Expand the ubsan builtin call. */
320 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
321 TREE_TYPE (arg0
), arg0
, arg1
);
324 do_pending_stack_adjust ();
327 emit_label (done_label
);
330 emit_move_insn (target
, res
);
333 /* Add negate overflow checking to the statement STMT. */
336 ubsan_expand_si_overflow_neg_check (gimple stmt
)
340 rtx done_label
, do_error
, target
= NULL_RTX
;
342 lhs
= gimple_call_lhs (stmt
);
343 arg1
= gimple_call_arg (stmt
, 1);
344 done_label
= gen_label_rtx ();
345 do_error
= gen_label_rtx ();
347 do_pending_stack_adjust ();
348 op1
= expand_normal (arg1
);
350 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
352 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
354 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
355 if (icode
!= CODE_FOR_nothing
)
357 struct expand_operand ops
[3];
358 rtx last
= get_last_insn ();
360 res
= gen_reg_rtx (mode
);
361 create_output_operand (&ops
[0], res
, mode
);
362 create_input_operand (&ops
[1], op1
, mode
);
363 create_fixed_operand (&ops
[2], do_error
);
364 if (maybe_expand_insn (icode
, 3, ops
))
366 last
= get_last_insn ();
367 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
369 && any_condjump_p (last
)
370 && !find_reg_note (last
, REG_BR_PROB
, 0))
371 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
372 emit_jump (done_label
);
376 delete_insns_since (last
);
377 icode
= CODE_FOR_nothing
;
381 if (icode
== CODE_FOR_nothing
)
383 /* Compute the operation. On RTL level, the addition is always
385 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
387 /* Compare the operand with the most negative value. */
388 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
389 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
390 done_label
, PROB_VERY_LIKELY
);
393 emit_label (do_error
);
394 /* Expand the ubsan builtin call. */
396 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
397 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
400 do_pending_stack_adjust ();
403 emit_label (done_label
);
406 emit_move_insn (target
, res
);
409 /* Add mul overflow checking to the statement STMT. */
412 ubsan_expand_si_overflow_mul_check (gimple stmt
)
415 tree lhs
, fn
, arg0
, arg1
;
416 rtx done_label
, do_error
, target
= NULL_RTX
;
418 lhs
= gimple_call_lhs (stmt
);
419 arg0
= gimple_call_arg (stmt
, 0);
420 arg1
= gimple_call_arg (stmt
, 1);
421 done_label
= gen_label_rtx ();
422 do_error
= gen_label_rtx ();
424 do_pending_stack_adjust ();
425 op0
= expand_normal (arg0
);
426 op1
= expand_normal (arg1
);
428 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
430 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
432 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
433 if (icode
!= CODE_FOR_nothing
)
435 struct expand_operand ops
[4];
436 rtx last
= get_last_insn ();
438 res
= gen_reg_rtx (mode
);
439 create_output_operand (&ops
[0], res
, mode
);
440 create_input_operand (&ops
[1], op0
, mode
);
441 create_input_operand (&ops
[2], op1
, mode
);
442 create_fixed_operand (&ops
[3], do_error
);
443 if (maybe_expand_insn (icode
, 4, ops
))
445 last
= get_last_insn ();
446 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
448 && any_condjump_p (last
)
449 && !find_reg_note (last
, REG_BR_PROB
, 0))
450 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
451 emit_jump (done_label
);
455 delete_insns_since (last
);
456 icode
= CODE_FOR_nothing
;
460 if (icode
== CODE_FOR_nothing
)
462 struct separate_ops ops
;
463 enum machine_mode hmode
464 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
468 ops
.location
= gimple_location (stmt
);
469 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
470 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
472 enum machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
473 ops
.code
= WIDEN_MULT_EXPR
;
475 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
477 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
478 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
479 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
480 hipart
= gen_lowpart (mode
, hipart
);
481 res
= gen_lowpart (mode
, res
);
482 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
483 GET_MODE_PRECISION (mode
) - 1,
485 /* RES is low half of the double width result, HIPART
486 the high half. There was overflow if
487 HIPART is different from RES < 0 ? -1 : 0. */
488 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
489 false, done_label
, PROB_VERY_LIKELY
);
491 else if (hmode
!= BLKmode
492 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
494 rtx large_op0
= gen_label_rtx ();
495 rtx small_op0_large_op1
= gen_label_rtx ();
496 rtx one_small_one_large
= gen_label_rtx ();
497 rtx both_ops_large
= gen_label_rtx ();
498 rtx after_hipart_neg
= gen_label_rtx ();
499 rtx after_lopart_neg
= gen_label_rtx ();
500 rtx do_overflow
= gen_label_rtx ();
501 rtx hipart_different
= gen_label_rtx ();
503 int hprec
= GET_MODE_PRECISION (hmode
);
504 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
506 hipart0
= gen_lowpart (hmode
, hipart0
);
507 rtx lopart0
= gen_lowpart (hmode
, op0
);
508 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
510 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
512 hipart1
= gen_lowpart (hmode
, hipart1
);
513 rtx lopart1
= gen_lowpart (hmode
, op1
);
514 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
517 res
= gen_reg_rtx (mode
);
519 /* True if op0 resp. op1 are known to be in the range of
521 bool op0_small_p
= false;
522 bool op1_small_p
= false;
523 /* True if op0 resp. op1 are known to have all zeros or all ones
524 in the upper half of bits, but are not known to be
526 bool op0_medium_p
= false;
527 bool op1_medium_p
= false;
528 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
529 nonnegative, 1 if unknown. */
533 if (TREE_CODE (arg0
) == SSA_NAME
)
535 double_int arg0_min
, arg0_max
;
536 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
538 if (arg0_max
.sle (double_int::max_value (hprec
, false))
539 && double_int::min_value (hprec
, false).sle (arg0_min
))
541 else if (arg0_max
.sle (double_int::max_value (hprec
, true))
542 && (~double_int::max_value (hprec
,
543 true)).sle (arg0_min
))
545 if (!arg0_min
.is_negative ())
547 else if (arg0_max
.is_negative ())
551 if (TREE_CODE (arg1
) == SSA_NAME
)
553 double_int arg1_min
, arg1_max
;
554 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
556 if (arg1_max
.sle (double_int::max_value (hprec
, false))
557 && double_int::min_value (hprec
, false).sle (arg1_min
))
559 else if (arg1_max
.sle (double_int::max_value (hprec
, true))
560 && (~double_int::max_value (hprec
,
561 true)).sle (arg1_min
))
563 if (!arg1_min
.is_negative ())
565 else if (arg1_max
.is_negative ())
570 int smaller_sign
= 1;
574 smaller_sign
= op0_sign
;
575 larger_sign
= op1_sign
;
577 else if (op1_small_p
)
579 smaller_sign
= op1_sign
;
580 larger_sign
= op0_sign
;
582 else if (op0_sign
== op1_sign
)
584 smaller_sign
= op0_sign
;
585 larger_sign
= op0_sign
;
589 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
590 false, large_op0
, PROB_UNLIKELY
);
593 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
594 false, small_op0_large_op1
,
597 /* If both op0 and op1 are sign extended from hmode to mode,
598 the multiplication will never overflow. We can do just one
599 hmode x hmode => mode widening multiplication. */
600 if (GET_CODE (lopart0
) == SUBREG
)
602 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
603 SUBREG_PROMOTED_UNSIGNED_SET (lopart0
, 0);
605 if (GET_CODE (lopart1
) == SUBREG
)
607 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
608 SUBREG_PROMOTED_UNSIGNED_SET (lopart1
, 0);
610 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
611 ops
.op0
= make_tree (halfstype
, lopart0
);
612 ops
.op1
= make_tree (halfstype
, lopart1
);
613 ops
.code
= WIDEN_MULT_EXPR
;
614 ops
.type
= TREE_TYPE (arg0
);
616 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
617 emit_move_insn (res
, thisres
);
618 emit_jump (done_label
);
620 emit_label (small_op0_large_op1
);
622 /* If op0 is sign extended from hmode to mode, but op1 is not,
623 just swap the arguments and handle it as op1 sign extended,
625 rtx larger
= gen_reg_rtx (mode
);
626 rtx hipart
= gen_reg_rtx (hmode
);
627 rtx lopart
= gen_reg_rtx (hmode
);
628 emit_move_insn (larger
, op1
);
629 emit_move_insn (hipart
, hipart1
);
630 emit_move_insn (lopart
, lopart0
);
631 emit_jump (one_small_one_large
);
633 emit_label (large_op0
);
636 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
637 false, both_ops_large
, PROB_UNLIKELY
);
639 /* If op1 is sign extended from hmode to mode, but op0 is not,
640 prepare larger, hipart and lopart pseudos and handle it together
641 with small_op0_large_op1. */
642 emit_move_insn (larger
, op0
);
643 emit_move_insn (hipart
, hipart0
);
644 emit_move_insn (lopart
, lopart1
);
646 emit_label (one_small_one_large
);
648 /* lopart is the low part of the operand that is sign extended
649 to mode, larger is the the other operand, hipart is the
650 high part of larger and lopart0 and lopart1 are the low parts
652 We perform lopart0 * lopart1 and lopart * hipart widening
654 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
655 ops
.op0
= make_tree (halfutype
, lopart0
);
656 ops
.op1
= make_tree (halfutype
, lopart1
);
658 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
660 ops
.op0
= make_tree (halfutype
, lopart
);
661 ops
.op1
= make_tree (halfutype
, hipart
);
662 rtx loxhi
= gen_reg_rtx (mode
);
663 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
664 emit_move_insn (loxhi
, tem
);
666 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
667 if (larger_sign
== 0)
668 emit_jump (after_hipart_neg
);
669 else if (larger_sign
!= -1)
670 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
671 false, after_hipart_neg
, PROB_EVEN
);
673 tem
= convert_modes (mode
, hmode
, lopart
, 1);
674 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
675 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
677 emit_move_insn (loxhi
, tem
);
679 emit_label (after_hipart_neg
);
681 /* if (lopart < 0) loxhi -= larger; */
682 if (smaller_sign
== 0)
683 emit_jump (after_lopart_neg
);
684 else if (smaller_sign
!= -1)
685 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
686 false, after_lopart_neg
, PROB_EVEN
);
688 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
690 emit_move_insn (loxhi
, tem
);
692 emit_label (after_lopart_neg
);
694 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
695 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
696 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
698 emit_move_insn (loxhi
, tem
);
700 /* if (loxhi >> (bitsize / 2)
701 == (hmode) loxhi >> (bitsize / 2 - 1)) */
702 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
704 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
705 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
706 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
707 hprec
- 1, NULL_RTX
, 0);
709 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
710 hmode
, false, do_overflow
,
713 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
714 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
716 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
718 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
721 emit_move_insn (res
, tem
);
722 emit_jump (done_label
);
724 emit_label (both_ops_large
);
726 /* If both operands are large (not sign extended from hmode),
727 then perform the full multiplication which will be the result
728 of the operation. The only cases which don't overflow are
729 some cases where both hipart0 and highpart1 are 0 or -1. */
730 ops
.code
= MULT_EXPR
;
731 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
732 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
733 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
734 emit_move_insn (res
, tem
);
738 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
739 NULL_RTX
, 1, OPTAB_DIRECT
);
740 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
741 true, do_error
, PROB_VERY_UNLIKELY
);
746 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
747 NULL_RTX
, 1, OPTAB_DIRECT
);
748 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
749 true, do_error
, PROB_VERY_UNLIKELY
);
752 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
753 same, overflow happened if res is negative, if they are different,
754 overflow happened if res is positive. */
755 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
756 emit_jump (hipart_different
);
757 else if (op0_sign
== 1 || op1_sign
== 1)
758 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
759 true, hipart_different
, PROB_EVEN
);
761 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
762 do_error
, PROB_VERY_UNLIKELY
);
763 emit_jump (done_label
);
765 emit_label (hipart_different
);
767 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
768 do_error
, PROB_VERY_UNLIKELY
);
769 emit_jump (done_label
);
771 emit_label (do_overflow
);
773 /* Overflow, do full multiplication and fallthru into do_error. */
774 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
775 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
776 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
777 emit_move_insn (res
, tem
);
781 ops
.code
= MULT_EXPR
;
782 ops
.type
= TREE_TYPE (arg0
);
783 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
784 emit_jump (done_label
);
788 emit_label (do_error
);
789 /* Expand the ubsan builtin call. */
791 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
792 TREE_TYPE (arg0
), arg0
, arg1
);
795 do_pending_stack_adjust ();
798 emit_label (done_label
);
801 emit_move_insn (target
, res
);
804 /* Expand UBSAN_CHECK_ADD call STMT. */
807 expand_UBSAN_CHECK_ADD (gimple stmt
)
809 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
812 /* Expand UBSAN_CHECK_SUB call STMT. */
815 expand_UBSAN_CHECK_SUB (gimple stmt
)
817 if (integer_zerop (gimple_call_arg (stmt
, 0)))
818 ubsan_expand_si_overflow_neg_check (stmt
);
820 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
823 /* Expand UBSAN_CHECK_MUL call STMT. */
826 expand_UBSAN_CHECK_MUL (gimple stmt
)
828 ubsan_expand_si_overflow_mul_check (stmt
);
831 /* This should get folded in tree-vectorizer.c. */
834 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED
)
840 expand_MASK_LOAD (gimple stmt
)
842 struct expand_operand ops
[3];
843 tree type
, lhs
, rhs
, maskt
;
844 rtx mem
, target
, mask
;
846 maskt
= gimple_call_arg (stmt
, 2);
847 lhs
= gimple_call_lhs (stmt
);
848 if (lhs
== NULL_TREE
)
850 type
= TREE_TYPE (lhs
);
851 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
852 gimple_call_arg (stmt
, 1));
854 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
855 gcc_assert (MEM_P (mem
));
856 mask
= expand_normal (maskt
);
857 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
858 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
859 create_fixed_operand (&ops
[1], mem
);
860 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
861 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
865 expand_MASK_STORE (gimple stmt
)
867 struct expand_operand ops
[3];
868 tree type
, lhs
, rhs
, maskt
;
871 maskt
= gimple_call_arg (stmt
, 2);
872 rhs
= gimple_call_arg (stmt
, 3);
873 type
= TREE_TYPE (rhs
);
874 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
875 gimple_call_arg (stmt
, 1));
877 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
878 gcc_assert (MEM_P (mem
));
879 mask
= expand_normal (maskt
);
880 reg
= expand_normal (rhs
);
881 create_fixed_operand (&ops
[0], mem
);
882 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
883 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
884 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
888 expand_ABNORMAL_DISPATCHER (gimple
)
893 expand_BUILTIN_EXPECT (gimple stmt
)
895 /* When guessing was done, the hints should be already stripped away. */
896 gcc_assert (!flag_guess_branch_prob
|| optimize
== 0 || seen_error ());
899 tree lhs
= gimple_call_lhs (stmt
);
901 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
904 rtx val
= expand_expr (gimple_call_arg (stmt
, 0), target
, VOIDmode
, EXPAND_NORMAL
);
905 if (lhs
&& val
!= target
)
906 emit_move_insn (target
, val
);
909 /* Routines to expand each internal function, indexed by function number.
910 Each routine has the prototype:
912 expand_<NAME> (gimple stmt)
914 where STMT is the statement that performs the call. */
915 static void (*const internal_fn_expanders
[]) (gimple
) = {
916 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
917 #include "internal-fn.def"
918 #undef DEF_INTERNAL_FN
922 /* Expand STMT, which is a call to internal function FN. */
925 expand_internal_call (gimple stmt
)
927 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);