2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array
[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array
[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
58 for load-lanes-style optab OPTAB. The insn must exist. */
61 get_multi_vector_move (tree array_type
, convert_optab optab
)
64 enum machine_mode imode
;
65 enum machine_mode vmode
;
67 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
68 imode
= TYPE_MODE (array_type
);
69 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
71 icode
= convert_optab_handler (optab
, imode
, vmode
);
72 gcc_assert (icode
!= CODE_FOR_nothing
);
76 /* Expand LOAD_LANES call STMT. */
79 expand_LOAD_LANES (gimple stmt
)
81 struct expand_operand ops
[2];
85 lhs
= gimple_call_lhs (stmt
);
86 rhs
= gimple_call_arg (stmt
, 0);
87 type
= TREE_TYPE (lhs
);
89 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
90 mem
= expand_normal (rhs
);
92 gcc_assert (MEM_P (mem
));
93 PUT_MODE (mem
, TYPE_MODE (type
));
95 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
96 create_fixed_operand (&ops
[1], mem
);
97 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
100 /* Expand STORE_LANES call STMT. */
103 expand_STORE_LANES (gimple stmt
)
105 struct expand_operand ops
[2];
109 lhs
= gimple_call_lhs (stmt
);
110 rhs
= gimple_call_arg (stmt
, 0);
111 type
= TREE_TYPE (rhs
);
113 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
114 reg
= expand_normal (rhs
);
116 gcc_assert (MEM_P (target
));
117 PUT_MODE (target
, TYPE_MODE (type
));
119 create_fixed_operand (&ops
[0], target
);
120 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
121 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
125 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED
)
130 /* This should get expanded in adjust_simduid_builtins. */
133 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED
)
138 /* This should get expanded in adjust_simduid_builtins. */
141 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED
)
146 /* This should get expanded in adjust_simduid_builtins. */
149 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED
)
154 /* This should get expanded in the sanopt pass. */
157 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED
)
162 /* This should get expanded in the sanopt pass. */
165 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED
)
170 /* Add sub/add overflow checking to the statement STMT.
171 CODE says whether the operation is +, or -. */
174 ubsan_expand_si_overflow_addsub_check (tree_code code
, gimple stmt
)
177 tree lhs
, fn
, arg0
, arg1
;
178 rtx done_label
, do_error
, target
= NULL_RTX
;
180 lhs
= gimple_call_lhs (stmt
);
181 arg0
= gimple_call_arg (stmt
, 0);
182 arg1
= gimple_call_arg (stmt
, 1);
183 done_label
= gen_label_rtx ();
184 do_error
= gen_label_rtx ();
185 do_pending_stack_adjust ();
186 op0
= expand_normal (arg0
);
187 op1
= expand_normal (arg1
);
189 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
191 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
194 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
195 if (icode
!= CODE_FOR_nothing
)
197 struct expand_operand ops
[4];
198 rtx last
= get_last_insn ();
200 res
= gen_reg_rtx (mode
);
201 create_output_operand (&ops
[0], res
, mode
);
202 create_input_operand (&ops
[1], op0
, mode
);
203 create_input_operand (&ops
[2], op1
, mode
);
204 create_fixed_operand (&ops
[3], do_error
);
205 if (maybe_expand_insn (icode
, 4, ops
))
207 last
= get_last_insn ();
208 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
210 && any_condjump_p (last
)
211 && !find_reg_note (last
, REG_BR_PROB
, 0))
212 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
213 emit_jump (done_label
);
217 delete_insns_since (last
);
218 icode
= CODE_FOR_nothing
;
222 if (icode
== CODE_FOR_nothing
)
224 rtx sub_check
= gen_label_rtx ();
227 /* Compute the operation. On RTL level, the addition is always
229 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
230 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
232 /* If we can prove one of the arguments (for MINUS_EXPR only
233 the second operand, as subtraction is not commutative) is always
234 non-negative or always negative, we can do just one comparison
235 and conditional jump instead of 2 at runtime, 3 present in the
236 emitted code. If one of the arguments is CONST_INT, all we
237 need is to make sure it is op1, then the first
238 emit_cmp_and_jump_insns will be just folded. Otherwise try
239 to use range info if available. */
240 if (code
== PLUS_EXPR
&& CONST_INT_P (op0
))
246 else if (CONST_INT_P (op1
))
248 else if (code
== PLUS_EXPR
&& TREE_CODE (arg0
) == SSA_NAME
)
250 wide_int arg0_min
, arg0_max
;
251 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
253 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
255 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
265 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
267 wide_int arg1_min
, arg1_max
;
268 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
270 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
272 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
277 /* If the op1 is negative, we have to use a different check. */
279 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
280 false, sub_check
, PROB_EVEN
);
282 /* Compare the result of the operation with one of the operands. */
284 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
285 NULL_RTX
, mode
, false, done_label
,
288 /* If we get here, we have to print the error. */
291 emit_jump (do_error
);
293 emit_label (sub_check
);
296 /* We have k = a + b for b < 0 here. k <= a must hold. */
298 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
299 NULL_RTX
, mode
, false, done_label
,
303 emit_label (do_error
);
304 /* Expand the ubsan builtin call. */
306 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
307 TREE_TYPE (arg0
), arg0
, arg1
);
310 do_pending_stack_adjust ();
313 emit_label (done_label
);
316 emit_move_insn (target
, res
);
319 /* Add negate overflow checking to the statement STMT. */
322 ubsan_expand_si_overflow_neg_check (gimple stmt
)
326 rtx done_label
, do_error
, target
= NULL_RTX
;
328 lhs
= gimple_call_lhs (stmt
);
329 arg1
= gimple_call_arg (stmt
, 1);
330 done_label
= gen_label_rtx ();
331 do_error
= gen_label_rtx ();
333 do_pending_stack_adjust ();
334 op1
= expand_normal (arg1
);
336 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
338 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
340 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
341 if (icode
!= CODE_FOR_nothing
)
343 struct expand_operand ops
[3];
344 rtx last
= get_last_insn ();
346 res
= gen_reg_rtx (mode
);
347 create_output_operand (&ops
[0], res
, mode
);
348 create_input_operand (&ops
[1], op1
, mode
);
349 create_fixed_operand (&ops
[2], do_error
);
350 if (maybe_expand_insn (icode
, 3, ops
))
352 last
= get_last_insn ();
353 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
355 && any_condjump_p (last
)
356 && !find_reg_note (last
, REG_BR_PROB
, 0))
357 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
358 emit_jump (done_label
);
362 delete_insns_since (last
);
363 icode
= CODE_FOR_nothing
;
367 if (icode
== CODE_FOR_nothing
)
369 /* Compute the operation. On RTL level, the addition is always
371 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
373 /* Compare the operand with the most negative value. */
374 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
375 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
376 done_label
, PROB_VERY_LIKELY
);
379 emit_label (do_error
);
380 /* Expand the ubsan builtin call. */
382 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
383 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
386 do_pending_stack_adjust ();
389 emit_label (done_label
);
392 emit_move_insn (target
, res
);
395 /* Add mul overflow checking to the statement STMT. */
398 ubsan_expand_si_overflow_mul_check (gimple stmt
)
401 tree lhs
, fn
, arg0
, arg1
;
402 rtx done_label
, do_error
, target
= NULL_RTX
;
404 lhs
= gimple_call_lhs (stmt
);
405 arg0
= gimple_call_arg (stmt
, 0);
406 arg1
= gimple_call_arg (stmt
, 1);
407 done_label
= gen_label_rtx ();
408 do_error
= gen_label_rtx ();
410 do_pending_stack_adjust ();
411 op0
= expand_normal (arg0
);
412 op1
= expand_normal (arg1
);
414 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
416 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
418 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
419 if (icode
!= CODE_FOR_nothing
)
421 struct expand_operand ops
[4];
422 rtx last
= get_last_insn ();
424 res
= gen_reg_rtx (mode
);
425 create_output_operand (&ops
[0], res
, mode
);
426 create_input_operand (&ops
[1], op0
, mode
);
427 create_input_operand (&ops
[2], op1
, mode
);
428 create_fixed_operand (&ops
[3], do_error
);
429 if (maybe_expand_insn (icode
, 4, ops
))
431 last
= get_last_insn ();
432 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
434 && any_condjump_p (last
)
435 && !find_reg_note (last
, REG_BR_PROB
, 0))
436 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
437 emit_jump (done_label
);
441 delete_insns_since (last
);
442 icode
= CODE_FOR_nothing
;
446 if (icode
== CODE_FOR_nothing
)
448 struct separate_ops ops
;
449 enum machine_mode hmode
450 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
454 ops
.location
= gimple_location (stmt
);
455 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
456 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
458 enum machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
459 ops
.code
= WIDEN_MULT_EXPR
;
461 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
463 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
464 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
465 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
466 hipart
= gen_lowpart (mode
, hipart
);
467 res
= gen_lowpart (mode
, res
);
468 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
469 GET_MODE_PRECISION (mode
) - 1,
471 /* RES is low half of the double width result, HIPART
472 the high half. There was overflow if
473 HIPART is different from RES < 0 ? -1 : 0. */
474 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
475 false, done_label
, PROB_VERY_LIKELY
);
477 else if (hmode
!= BLKmode
478 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
480 rtx large_op0
= gen_label_rtx ();
481 rtx small_op0_large_op1
= gen_label_rtx ();
482 rtx one_small_one_large
= gen_label_rtx ();
483 rtx both_ops_large
= gen_label_rtx ();
484 rtx after_hipart_neg
= gen_label_rtx ();
485 rtx after_lopart_neg
= gen_label_rtx ();
486 rtx do_overflow
= gen_label_rtx ();
487 rtx hipart_different
= gen_label_rtx ();
489 unsigned int hprec
= GET_MODE_PRECISION (hmode
);
490 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
492 hipart0
= gen_lowpart (hmode
, hipart0
);
493 rtx lopart0
= gen_lowpart (hmode
, op0
);
494 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
496 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
498 hipart1
= gen_lowpart (hmode
, hipart1
);
499 rtx lopart1
= gen_lowpart (hmode
, op1
);
500 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
503 res
= gen_reg_rtx (mode
);
505 /* True if op0 resp. op1 are known to be in the range of
507 bool op0_small_p
= false;
508 bool op1_small_p
= false;
509 /* True if op0 resp. op1 are known to have all zeros or all ones
510 in the upper half of bits, but are not known to be
512 bool op0_medium_p
= false;
513 bool op1_medium_p
= false;
514 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
515 nonnegative, 1 if unknown. */
519 if (TREE_CODE (arg0
) == SSA_NAME
)
521 wide_int arg0_min
, arg0_max
;
522 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
524 unsigned int mprec0
= wi::min_precision (arg0_min
, SIGNED
);
525 unsigned int mprec1
= wi::min_precision (arg0_max
, SIGNED
);
526 if (mprec0
<= hprec
&& mprec1
<= hprec
)
528 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
530 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
532 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
536 if (TREE_CODE (arg1
) == SSA_NAME
)
538 wide_int arg1_min
, arg1_max
;
539 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
541 unsigned int mprec0
= wi::min_precision (arg1_min
, SIGNED
);
542 unsigned int mprec1
= wi::min_precision (arg1_max
, SIGNED
);
543 if (mprec0
<= hprec
&& mprec1
<= hprec
)
545 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
547 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
549 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
554 int smaller_sign
= 1;
558 smaller_sign
= op0_sign
;
559 larger_sign
= op1_sign
;
561 else if (op1_small_p
)
563 smaller_sign
= op1_sign
;
564 larger_sign
= op0_sign
;
566 else if (op0_sign
== op1_sign
)
568 smaller_sign
= op0_sign
;
569 larger_sign
= op0_sign
;
573 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
574 false, large_op0
, PROB_UNLIKELY
);
577 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
578 false, small_op0_large_op1
,
581 /* If both op0 and op1 are sign extended from hmode to mode,
582 the multiplication will never overflow. We can do just one
583 hmode x hmode => mode widening multiplication. */
584 if (GET_CODE (lopart0
) == SUBREG
)
586 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
587 SUBREG_PROMOTED_UNSIGNED_SET (lopart0
, 0);
589 if (GET_CODE (lopart1
) == SUBREG
)
591 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
592 SUBREG_PROMOTED_UNSIGNED_SET (lopart1
, 0);
594 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
595 ops
.op0
= make_tree (halfstype
, lopart0
);
596 ops
.op1
= make_tree (halfstype
, lopart1
);
597 ops
.code
= WIDEN_MULT_EXPR
;
598 ops
.type
= TREE_TYPE (arg0
);
600 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
601 emit_move_insn (res
, thisres
);
602 emit_jump (done_label
);
604 emit_label (small_op0_large_op1
);
606 /* If op0 is sign extended from hmode to mode, but op1 is not,
607 just swap the arguments and handle it as op1 sign extended,
609 rtx larger
= gen_reg_rtx (mode
);
610 rtx hipart
= gen_reg_rtx (hmode
);
611 rtx lopart
= gen_reg_rtx (hmode
);
612 emit_move_insn (larger
, op1
);
613 emit_move_insn (hipart
, hipart1
);
614 emit_move_insn (lopart
, lopart0
);
615 emit_jump (one_small_one_large
);
617 emit_label (large_op0
);
620 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
621 false, both_ops_large
, PROB_UNLIKELY
);
623 /* If op1 is sign extended from hmode to mode, but op0 is not,
624 prepare larger, hipart and lopart pseudos and handle it together
625 with small_op0_large_op1. */
626 emit_move_insn (larger
, op0
);
627 emit_move_insn (hipart
, hipart0
);
628 emit_move_insn (lopart
, lopart1
);
630 emit_label (one_small_one_large
);
632 /* lopart is the low part of the operand that is sign extended
633 to mode, larger is the the other operand, hipart is the
634 high part of larger and lopart0 and lopart1 are the low parts
636 We perform lopart0 * lopart1 and lopart * hipart widening
638 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
639 ops
.op0
= make_tree (halfutype
, lopart0
);
640 ops
.op1
= make_tree (halfutype
, lopart1
);
642 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
644 ops
.op0
= make_tree (halfutype
, lopart
);
645 ops
.op1
= make_tree (halfutype
, hipart
);
646 rtx loxhi
= gen_reg_rtx (mode
);
647 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
648 emit_move_insn (loxhi
, tem
);
650 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
651 if (larger_sign
== 0)
652 emit_jump (after_hipart_neg
);
653 else if (larger_sign
!= -1)
654 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
655 false, after_hipart_neg
, PROB_EVEN
);
657 tem
= convert_modes (mode
, hmode
, lopart
, 1);
658 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
659 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
661 emit_move_insn (loxhi
, tem
);
663 emit_label (after_hipart_neg
);
665 /* if (lopart < 0) loxhi -= larger; */
666 if (smaller_sign
== 0)
667 emit_jump (after_lopart_neg
);
668 else if (smaller_sign
!= -1)
669 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
670 false, after_lopart_neg
, PROB_EVEN
);
672 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
674 emit_move_insn (loxhi
, tem
);
676 emit_label (after_lopart_neg
);
678 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
679 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
680 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
682 emit_move_insn (loxhi
, tem
);
684 /* if (loxhi >> (bitsize / 2)
685 == (hmode) loxhi >> (bitsize / 2 - 1)) */
686 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
688 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
689 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
690 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
691 hprec
- 1, NULL_RTX
, 0);
693 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
694 hmode
, false, do_overflow
,
697 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
698 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
700 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
702 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
705 emit_move_insn (res
, tem
);
706 emit_jump (done_label
);
708 emit_label (both_ops_large
);
710 /* If both operands are large (not sign extended from hmode),
711 then perform the full multiplication which will be the result
712 of the operation. The only cases which don't overflow are
713 some cases where both hipart0 and highpart1 are 0 or -1. */
714 ops
.code
= MULT_EXPR
;
715 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
716 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
717 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
718 emit_move_insn (res
, tem
);
722 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
723 NULL_RTX
, 1, OPTAB_DIRECT
);
724 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
725 true, do_error
, PROB_VERY_UNLIKELY
);
730 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
731 NULL_RTX
, 1, OPTAB_DIRECT
);
732 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
733 true, do_error
, PROB_VERY_UNLIKELY
);
736 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
737 same, overflow happened if res is negative, if they are different,
738 overflow happened if res is positive. */
739 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
740 emit_jump (hipart_different
);
741 else if (op0_sign
== 1 || op1_sign
== 1)
742 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
743 true, hipart_different
, PROB_EVEN
);
745 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
746 do_error
, PROB_VERY_UNLIKELY
);
747 emit_jump (done_label
);
749 emit_label (hipart_different
);
751 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
752 do_error
, PROB_VERY_UNLIKELY
);
753 emit_jump (done_label
);
755 emit_label (do_overflow
);
757 /* Overflow, do full multiplication and fallthru into do_error. */
758 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
759 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
760 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
761 emit_move_insn (res
, tem
);
765 ops
.code
= MULT_EXPR
;
766 ops
.type
= TREE_TYPE (arg0
);
767 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
768 emit_jump (done_label
);
772 emit_label (do_error
);
773 /* Expand the ubsan builtin call. */
775 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
776 TREE_TYPE (arg0
), arg0
, arg1
);
779 do_pending_stack_adjust ();
782 emit_label (done_label
);
785 emit_move_insn (target
, res
);
788 /* Expand UBSAN_CHECK_ADD call STMT. */
791 expand_UBSAN_CHECK_ADD (gimple stmt
)
793 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
796 /* Expand UBSAN_CHECK_SUB call STMT. */
799 expand_UBSAN_CHECK_SUB (gimple stmt
)
801 if (integer_zerop (gimple_call_arg (stmt
, 0)))
802 ubsan_expand_si_overflow_neg_check (stmt
);
804 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
807 /* Expand UBSAN_CHECK_MUL call STMT. */
810 expand_UBSAN_CHECK_MUL (gimple stmt
)
812 ubsan_expand_si_overflow_mul_check (stmt
);
815 /* This should get folded in tree-vectorizer.c. */
818 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED
)
824 expand_MASK_LOAD (gimple stmt
)
826 struct expand_operand ops
[3];
827 tree type
, lhs
, rhs
, maskt
;
828 rtx mem
, target
, mask
;
830 maskt
= gimple_call_arg (stmt
, 2);
831 lhs
= gimple_call_lhs (stmt
);
832 if (lhs
== NULL_TREE
)
834 type
= TREE_TYPE (lhs
);
835 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
836 gimple_call_arg (stmt
, 1));
838 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
839 gcc_assert (MEM_P (mem
));
840 mask
= expand_normal (maskt
);
841 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
842 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
843 create_fixed_operand (&ops
[1], mem
);
844 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
845 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
849 expand_MASK_STORE (gimple stmt
)
851 struct expand_operand ops
[3];
852 tree type
, lhs
, rhs
, maskt
;
855 maskt
= gimple_call_arg (stmt
, 2);
856 rhs
= gimple_call_arg (stmt
, 3);
857 type
= TREE_TYPE (rhs
);
858 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
859 gimple_call_arg (stmt
, 1));
861 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
862 gcc_assert (MEM_P (mem
));
863 mask
= expand_normal (maskt
);
864 reg
= expand_normal (rhs
);
865 create_fixed_operand (&ops
[0], mem
);
866 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
867 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
868 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
872 expand_ABNORMAL_DISPATCHER (gimple
)
877 expand_BUILTIN_EXPECT (gimple stmt
)
879 /* When guessing was done, the hints should be already stripped away. */
880 gcc_assert (!flag_guess_branch_prob
|| optimize
== 0 || seen_error ());
883 tree lhs
= gimple_call_lhs (stmt
);
885 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
888 rtx val
= expand_expr (gimple_call_arg (stmt
, 0), target
, VOIDmode
, EXPAND_NORMAL
);
889 if (lhs
&& val
!= target
)
890 emit_move_insn (target
, val
);
893 /* Routines to expand each internal function, indexed by function number.
894 Each routine has the prototype:
896 expand_<NAME> (gimple stmt)
898 where STMT is the statement that performs the call. */
899 static void (*const internal_fn_expanders
[]) (gimple
) = {
900 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
901 #include "internal-fn.def"
902 #undef DEF_INTERNAL_FN
906 /* Expand STMT, which is a call to internal function FN. */
909 expand_internal_call (gimple stmt
)
911 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);