2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
23 #include "internal-fn.h"
25 #include "stor-layout.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
40 /* The names of each internal function, indexed by function number. */
41 const char *const internal_fn_name_array
[] = {
42 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
43 #include "internal-fn.def"
44 #undef DEF_INTERNAL_FN
48 /* The ECF_* flags of each internal function, indexed by function number. */
49 const int internal_fn_flags_array
[] = {
50 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
51 #include "internal-fn.def"
52 #undef DEF_INTERNAL_FN
56 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
57 for load-lanes-style optab OPTAB. The insn must exist. */
60 get_multi_vector_move (tree array_type
, convert_optab optab
)
63 enum machine_mode imode
;
64 enum machine_mode vmode
;
66 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
67 imode
= TYPE_MODE (array_type
);
68 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
70 icode
= convert_optab_handler (optab
, imode
, vmode
);
71 gcc_assert (icode
!= CODE_FOR_nothing
);
75 /* Expand LOAD_LANES call STMT. */
78 expand_LOAD_LANES (gimple stmt
)
80 struct expand_operand ops
[2];
84 lhs
= gimple_call_lhs (stmt
);
85 rhs
= gimple_call_arg (stmt
, 0);
86 type
= TREE_TYPE (lhs
);
88 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
89 mem
= expand_normal (rhs
);
91 gcc_assert (MEM_P (mem
));
92 PUT_MODE (mem
, TYPE_MODE (type
));
94 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
95 create_fixed_operand (&ops
[1], mem
);
96 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
99 /* Expand STORE_LANES call STMT. */
102 expand_STORE_LANES (gimple stmt
)
104 struct expand_operand ops
[2];
108 lhs
= gimple_call_lhs (stmt
);
109 rhs
= gimple_call_arg (stmt
, 0);
110 type
= TREE_TYPE (rhs
);
112 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
113 reg
= expand_normal (rhs
);
115 gcc_assert (MEM_P (target
));
116 PUT_MODE (target
, TYPE_MODE (type
));
118 create_fixed_operand (&ops
[0], target
);
119 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
120 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
124 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED
)
129 /* This should get expanded in adjust_simduid_builtins. */
132 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED
)
137 /* This should get expanded in adjust_simduid_builtins. */
140 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED
)
145 /* This should get expanded in adjust_simduid_builtins. */
148 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED
)
153 /* This should get expanded in the sanopt pass. */
156 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED
)
161 /* Add sub/add overflow checking to the statement STMT.
162 CODE says whether the operation is +, or -. */
165 ubsan_expand_si_overflow_addsub_check (tree_code code
, gimple stmt
)
168 tree lhs
, fn
, arg0
, arg1
;
169 rtx done_label
, do_error
, target
= NULL_RTX
;
171 lhs
= gimple_call_lhs (stmt
);
172 arg0
= gimple_call_arg (stmt
, 0);
173 arg1
= gimple_call_arg (stmt
, 1);
174 done_label
= gen_label_rtx ();
175 do_error
= gen_label_rtx ();
176 do_pending_stack_adjust ();
177 op0
= expand_normal (arg0
);
178 op1
= expand_normal (arg1
);
180 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
182 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
185 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
186 if (icode
!= CODE_FOR_nothing
)
188 struct expand_operand ops
[4];
189 rtx last
= get_last_insn ();
191 res
= gen_reg_rtx (mode
);
192 create_output_operand (&ops
[0], res
, mode
);
193 create_input_operand (&ops
[1], op0
, mode
);
194 create_input_operand (&ops
[2], op1
, mode
);
195 create_fixed_operand (&ops
[3], do_error
);
196 if (maybe_expand_insn (icode
, 4, ops
))
198 last
= get_last_insn ();
199 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
201 && any_condjump_p (last
)
202 && !find_reg_note (last
, REG_BR_PROB
, 0))
203 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
204 emit_jump (done_label
);
208 delete_insns_since (last
);
209 icode
= CODE_FOR_nothing
;
213 if (icode
== CODE_FOR_nothing
)
215 rtx sub_check
= gen_label_rtx ();
218 /* Compute the operation. On RTL level, the addition is always
220 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
221 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
223 /* If we can prove one of the arguments is always non-negative
224 or always negative, we can do just one comparison and
225 conditional jump instead of 2 at runtime, 3 present in the
226 emitted code. If one of the arguments is CONST_INT, all we
227 need is to make sure it is op1, then the first
228 emit_cmp_and_jump_insns will be just folded. Otherwise try
229 to use range info if available. */
230 if (CONST_INT_P (op0
))
236 else if (CONST_INT_P (op1
))
238 else if (TREE_CODE (arg0
) == SSA_NAME
)
240 double_int arg0_min
, arg0_max
;
241 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
243 if (!arg0_min
.is_negative ())
245 else if (arg0_max
.is_negative ())
255 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
257 double_int arg1_min
, arg1_max
;
258 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
260 if (!arg1_min
.is_negative ())
262 else if (arg1_max
.is_negative ())
267 /* If the op1 is negative, we have to use a different check. */
269 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
270 false, sub_check
, PROB_EVEN
);
272 /* Compare the result of the operation with one of the operands. */
274 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
275 NULL_RTX
, mode
, false, done_label
,
278 /* If we get here, we have to print the error. */
281 emit_jump (do_error
);
283 emit_label (sub_check
);
286 /* We have k = a + b for b < 0 here. k <= a must hold. */
288 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
289 NULL_RTX
, mode
, false, done_label
,
293 emit_label (do_error
);
294 /* Expand the ubsan builtin call. */
296 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
297 TREE_TYPE (arg0
), arg0
, arg1
);
300 do_pending_stack_adjust ();
303 emit_label (done_label
);
306 emit_move_insn (target
, res
);
309 /* Add negate overflow checking to the statement STMT. */
312 ubsan_expand_si_overflow_neg_check (gimple stmt
)
316 rtx done_label
, do_error
, target
= NULL_RTX
;
318 lhs
= gimple_call_lhs (stmt
);
319 arg1
= gimple_call_arg (stmt
, 1);
320 done_label
= gen_label_rtx ();
321 do_error
= gen_label_rtx ();
323 do_pending_stack_adjust ();
324 op1
= expand_normal (arg1
);
326 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
328 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
330 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
331 if (icode
!= CODE_FOR_nothing
)
333 struct expand_operand ops
[3];
334 rtx last
= get_last_insn ();
336 res
= gen_reg_rtx (mode
);
337 create_output_operand (&ops
[0], res
, mode
);
338 create_input_operand (&ops
[1], op1
, mode
);
339 create_fixed_operand (&ops
[2], do_error
);
340 if (maybe_expand_insn (icode
, 3, ops
))
342 last
= get_last_insn ();
343 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
345 && any_condjump_p (last
)
346 && !find_reg_note (last
, REG_BR_PROB
, 0))
347 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
348 emit_jump (done_label
);
352 delete_insns_since (last
);
353 icode
= CODE_FOR_nothing
;
357 if (icode
== CODE_FOR_nothing
)
359 /* Compute the operation. On RTL level, the addition is always
361 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
363 /* Compare the operand with the most negative value. */
364 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
365 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
366 done_label
, PROB_VERY_LIKELY
);
369 emit_label (do_error
);
370 /* Expand the ubsan builtin call. */
372 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
373 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
376 do_pending_stack_adjust ();
379 emit_label (done_label
);
382 emit_move_insn (target
, res
);
385 /* Add mul overflow checking to the statement STMT. */
388 ubsan_expand_si_overflow_mul_check (gimple stmt
)
391 tree lhs
, fn
, arg0
, arg1
;
392 rtx done_label
, do_error
, target
= NULL_RTX
;
394 lhs
= gimple_call_lhs (stmt
);
395 arg0
= gimple_call_arg (stmt
, 0);
396 arg1
= gimple_call_arg (stmt
, 1);
397 done_label
= gen_label_rtx ();
398 do_error
= gen_label_rtx ();
400 do_pending_stack_adjust ();
401 op0
= expand_normal (arg0
);
402 op1
= expand_normal (arg1
);
404 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
406 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
408 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
409 if (icode
!= CODE_FOR_nothing
)
411 struct expand_operand ops
[4];
412 rtx last
= get_last_insn ();
414 res
= gen_reg_rtx (mode
);
415 create_output_operand (&ops
[0], res
, mode
);
416 create_input_operand (&ops
[1], op0
, mode
);
417 create_input_operand (&ops
[2], op1
, mode
);
418 create_fixed_operand (&ops
[3], do_error
);
419 if (maybe_expand_insn (icode
, 4, ops
))
421 last
= get_last_insn ();
422 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
424 && any_condjump_p (last
)
425 && !find_reg_note (last
, REG_BR_PROB
, 0))
426 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
427 emit_jump (done_label
);
431 delete_insns_since (last
);
432 icode
= CODE_FOR_nothing
;
436 if (icode
== CODE_FOR_nothing
)
438 struct separate_ops ops
;
439 enum machine_mode hmode
440 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
444 ops
.location
= gimple_location (stmt
);
445 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
446 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
448 enum machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
449 ops
.code
= WIDEN_MULT_EXPR
;
451 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
453 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
454 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
455 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
456 hipart
= gen_lowpart (mode
, hipart
);
457 res
= gen_lowpart (mode
, res
);
458 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
459 GET_MODE_PRECISION (mode
) - 1,
461 /* RES is low half of the double width result, HIPART
462 the high half. There was overflow if
463 HIPART is different from RES < 0 ? -1 : 0. */
464 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
465 false, done_label
, PROB_VERY_LIKELY
);
467 else if (hmode
!= BLKmode
468 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
470 rtx large_op0
= gen_label_rtx ();
471 rtx small_op0_large_op1
= gen_label_rtx ();
472 rtx one_small_one_large
= gen_label_rtx ();
473 rtx both_ops_large
= gen_label_rtx ();
474 rtx after_hipart_neg
= gen_label_rtx ();
475 rtx after_lopart_neg
= gen_label_rtx ();
476 rtx do_overflow
= gen_label_rtx ();
477 rtx hipart_different
= gen_label_rtx ();
479 int hprec
= GET_MODE_PRECISION (hmode
);
480 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
482 hipart0
= gen_lowpart (hmode
, hipart0
);
483 rtx lopart0
= gen_lowpart (hmode
, op0
);
484 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
486 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
488 hipart1
= gen_lowpart (hmode
, hipart1
);
489 rtx lopart1
= gen_lowpart (hmode
, op1
);
490 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
493 res
= gen_reg_rtx (mode
);
495 /* True if op0 resp. op1 are known to be in the range of
497 bool op0_small_p
= false;
498 bool op1_small_p
= false;
499 /* True if op0 resp. op1 are known to have all zeros or all ones
500 in the upper half of bits, but are not known to be
502 bool op0_medium_p
= false;
503 bool op1_medium_p
= false;
504 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
505 nonnegative, 1 if unknown. */
509 if (TREE_CODE (arg0
) == SSA_NAME
)
511 double_int arg0_min
, arg0_max
;
512 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
514 if (arg0_max
.sle (double_int::max_value (hprec
, false))
515 && double_int::min_value (hprec
, false).sle (arg0_min
))
517 else if (arg0_max
.sle (double_int::max_value (hprec
, true))
518 && (~double_int::max_value (hprec
,
519 true)).sle (arg0_min
))
521 if (!arg0_min
.is_negative ())
523 else if (arg0_max
.is_negative ())
527 if (TREE_CODE (arg1
) == SSA_NAME
)
529 double_int arg1_min
, arg1_max
;
530 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
532 if (arg1_max
.sle (double_int::max_value (hprec
, false))
533 && double_int::min_value (hprec
, false).sle (arg1_min
))
535 else if (arg1_max
.sle (double_int::max_value (hprec
, true))
536 && (~double_int::max_value (hprec
,
537 true)).sle (arg1_min
))
539 if (!arg1_min
.is_negative ())
541 else if (arg1_max
.is_negative ())
546 int smaller_sign
= 1;
550 smaller_sign
= op0_sign
;
551 larger_sign
= op1_sign
;
553 else if (op1_small_p
)
555 smaller_sign
= op1_sign
;
556 larger_sign
= op0_sign
;
558 else if (op0_sign
== op1_sign
)
560 smaller_sign
= op0_sign
;
561 larger_sign
= op0_sign
;
565 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
566 false, large_op0
, PROB_UNLIKELY
);
569 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
570 false, small_op0_large_op1
,
573 /* If both op0 and op1 are sign extended from hmode to mode,
574 the multiplication will never overflow. We can do just one
575 hmode x hmode => mode widening multiplication. */
576 if (GET_CODE (lopart0
) == SUBREG
)
578 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
579 SUBREG_PROMOTED_UNSIGNED_SET (lopart0
, 0);
581 if (GET_CODE (lopart1
) == SUBREG
)
583 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
584 SUBREG_PROMOTED_UNSIGNED_SET (lopart1
, 0);
586 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
587 ops
.op0
= make_tree (halfstype
, lopart0
);
588 ops
.op1
= make_tree (halfstype
, lopart1
);
589 ops
.code
= WIDEN_MULT_EXPR
;
590 ops
.type
= TREE_TYPE (arg0
);
592 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
593 emit_move_insn (res
, thisres
);
594 emit_jump (done_label
);
596 emit_label (small_op0_large_op1
);
598 /* If op0 is sign extended from hmode to mode, but op1 is not,
599 just swap the arguments and handle it as op1 sign extended,
601 rtx larger
= gen_reg_rtx (mode
);
602 rtx hipart
= gen_reg_rtx (hmode
);
603 rtx lopart
= gen_reg_rtx (hmode
);
604 emit_move_insn (larger
, op1
);
605 emit_move_insn (hipart
, hipart1
);
606 emit_move_insn (lopart
, lopart0
);
607 emit_jump (one_small_one_large
);
609 emit_label (large_op0
);
612 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
613 false, both_ops_large
, PROB_UNLIKELY
);
615 /* If op1 is sign extended from hmode to mode, but op0 is not,
616 prepare larger, hipart and lopart pseudos and handle it together
617 with small_op0_large_op1. */
618 emit_move_insn (larger
, op0
);
619 emit_move_insn (hipart
, hipart0
);
620 emit_move_insn (lopart
, lopart1
);
622 emit_label (one_small_one_large
);
624 /* lopart is the low part of the operand that is sign extended
625 to mode, larger is the the other operand, hipart is the
626 high part of larger and lopart0 and lopart1 are the low parts
628 We perform lopart0 * lopart1 and lopart * hipart widening
630 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
631 ops
.op0
= make_tree (halfutype
, lopart0
);
632 ops
.op1
= make_tree (halfutype
, lopart1
);
634 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
636 ops
.op0
= make_tree (halfutype
, lopart
);
637 ops
.op1
= make_tree (halfutype
, hipart
);
638 rtx loxhi
= gen_reg_rtx (mode
);
639 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
640 emit_move_insn (loxhi
, tem
);
642 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
643 if (larger_sign
== 0)
644 emit_jump (after_hipart_neg
);
645 else if (larger_sign
!= -1)
646 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
647 false, after_hipart_neg
, PROB_EVEN
);
649 tem
= convert_modes (mode
, hmode
, lopart
, 1);
650 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
651 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
653 emit_move_insn (loxhi
, tem
);
655 emit_label (after_hipart_neg
);
657 /* if (lopart < 0) loxhi -= larger; */
658 if (smaller_sign
== 0)
659 emit_jump (after_lopart_neg
);
660 else if (smaller_sign
!= -1)
661 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
662 false, after_lopart_neg
, PROB_EVEN
);
664 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
666 emit_move_insn (loxhi
, tem
);
668 emit_label (after_lopart_neg
);
670 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
671 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
672 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
674 emit_move_insn (loxhi
, tem
);
676 /* if (loxhi >> (bitsize / 2)
677 == (hmode) loxhi >> (bitsize / 2 - 1)) */
678 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
680 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
681 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
682 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
683 hprec
- 1, NULL_RTX
, 0);
685 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
686 hmode
, false, do_overflow
,
689 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
690 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
692 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
694 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
697 emit_move_insn (res
, tem
);
698 emit_jump (done_label
);
700 emit_label (both_ops_large
);
702 /* If both operands are large (not sign extended from hmode),
703 then perform the full multiplication which will be the result
704 of the operation. The only cases which don't overflow are
705 some cases where both hipart0 and highpart1 are 0 or -1. */
706 ops
.code
= MULT_EXPR
;
707 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
708 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
709 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
710 emit_move_insn (res
, tem
);
714 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
715 NULL_RTX
, 1, OPTAB_DIRECT
);
716 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
717 true, do_error
, PROB_VERY_UNLIKELY
);
722 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
723 NULL_RTX
, 1, OPTAB_DIRECT
);
724 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
725 true, do_error
, PROB_VERY_UNLIKELY
);
728 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
729 same, overflow happened if res is negative, if they are different,
730 overflow happened if res is positive. */
731 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
732 emit_jump (hipart_different
);
733 else if (op0_sign
== 1 || op1_sign
== 1)
734 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
735 true, hipart_different
, PROB_EVEN
);
737 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
738 do_error
, PROB_VERY_UNLIKELY
);
739 emit_jump (done_label
);
741 emit_label (hipart_different
);
743 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
744 do_error
, PROB_VERY_UNLIKELY
);
745 emit_jump (done_label
);
747 emit_label (do_overflow
);
749 /* Overflow, do full multiplication and fallthru into do_error. */
750 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
751 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
752 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
753 emit_move_insn (res
, tem
);
757 ops
.code
= MULT_EXPR
;
758 ops
.type
= TREE_TYPE (arg0
);
759 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
760 emit_jump (done_label
);
764 emit_label (do_error
);
765 /* Expand the ubsan builtin call. */
767 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
768 TREE_TYPE (arg0
), arg0
, arg1
);
771 do_pending_stack_adjust ();
774 emit_label (done_label
);
777 emit_move_insn (target
, res
);
780 /* Expand UBSAN_CHECK_ADD call STMT. */
783 expand_UBSAN_CHECK_ADD (gimple stmt
)
785 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
788 /* Expand UBSAN_CHECK_SUB call STMT. */
791 expand_UBSAN_CHECK_SUB (gimple stmt
)
793 if (integer_zerop (gimple_call_arg (stmt
, 0)))
794 ubsan_expand_si_overflow_neg_check (stmt
);
796 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
799 /* Expand UBSAN_CHECK_MUL call STMT. */
802 expand_UBSAN_CHECK_MUL (gimple stmt
)
804 ubsan_expand_si_overflow_mul_check (stmt
);
807 /* This should get folded in tree-vectorizer.c. */
810 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED
)
816 expand_MASK_LOAD (gimple stmt
)
818 struct expand_operand ops
[3];
819 tree type
, lhs
, rhs
, maskt
;
820 rtx mem
, target
, mask
;
822 maskt
= gimple_call_arg (stmt
, 2);
823 lhs
= gimple_call_lhs (stmt
);
824 if (lhs
== NULL_TREE
)
826 type
= TREE_TYPE (lhs
);
827 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
828 gimple_call_arg (stmt
, 1));
830 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
831 gcc_assert (MEM_P (mem
));
832 mask
= expand_normal (maskt
);
833 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
834 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
835 create_fixed_operand (&ops
[1], mem
);
836 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
837 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
841 expand_MASK_STORE (gimple stmt
)
843 struct expand_operand ops
[3];
844 tree type
, lhs
, rhs
, maskt
;
847 maskt
= gimple_call_arg (stmt
, 2);
848 rhs
= gimple_call_arg (stmt
, 3);
849 type
= TREE_TYPE (rhs
);
850 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
851 gimple_call_arg (stmt
, 1));
853 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
854 gcc_assert (MEM_P (mem
));
855 mask
= expand_normal (maskt
);
856 reg
= expand_normal (rhs
);
857 create_fixed_operand (&ops
[0], mem
);
858 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
859 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
860 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
864 expand_ABNORMAL_DISPATCHER (gimple
)
868 /* Routines to expand each internal function, indexed by function number.
869 Each routine has the prototype:
871 expand_<NAME> (gimple stmt)
873 where STMT is the statement that performs the call. */
874 static void (*const internal_fn_expanders
[]) (gimple
) = {
875 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
876 #include "internal-fn.def"
877 #undef DEF_INTERNAL_FN
881 /* Expand STMT, which is a call to internal function FN. */
884 expand_internal_call (gimple stmt
)
886 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);