2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
34 #include "hard-reg-set.h"
37 #include "dominance.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
47 #include "stringpool.h"
48 #include "tree-ssanames.h"
49 #include "diagnostic-core.h"
51 /* The names of each internal function, indexed by function number. */
52 const char *const internal_fn_name_array
[] = {
53 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
54 #include "internal-fn.def"
55 #undef DEF_INTERNAL_FN
59 /* The ECF_* flags of each internal function, indexed by function number. */
60 const int internal_fn_flags_array
[] = {
61 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
62 #include "internal-fn.def"
63 #undef DEF_INTERNAL_FN
67 /* Fnspec of each internal function, indexed by function number. */
68 const_tree internal_fn_fnspec_array
[IFN_LAST
+ 1];
73 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
74 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
75 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
76 #include "internal-fn.def"
77 #undef DEF_INTERNAL_FN
78 internal_fn_fnspec_array
[IFN_LAST
] = 0;
81 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
82 for load-lanes-style optab OPTAB. The insn must exist. */
85 get_multi_vector_move (tree array_type
, convert_optab optab
)
88 enum machine_mode imode
;
89 enum machine_mode vmode
;
91 gcc_assert (TREE_CODE (array_type
) == ARRAY_TYPE
);
92 imode
= TYPE_MODE (array_type
);
93 vmode
= TYPE_MODE (TREE_TYPE (array_type
));
95 icode
= convert_optab_handler (optab
, imode
, vmode
);
96 gcc_assert (icode
!= CODE_FOR_nothing
);
100 /* Expand LOAD_LANES call STMT. */
103 expand_LOAD_LANES (gcall
*stmt
)
105 struct expand_operand ops
[2];
109 lhs
= gimple_call_lhs (stmt
);
110 rhs
= gimple_call_arg (stmt
, 0);
111 type
= TREE_TYPE (lhs
);
113 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
114 mem
= expand_normal (rhs
);
116 gcc_assert (MEM_P (mem
));
117 PUT_MODE (mem
, TYPE_MODE (type
));
119 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
120 create_fixed_operand (&ops
[1], mem
);
121 expand_insn (get_multi_vector_move (type
, vec_load_lanes_optab
), 2, ops
);
124 /* Expand STORE_LANES call STMT. */
127 expand_STORE_LANES (gcall
*stmt
)
129 struct expand_operand ops
[2];
133 lhs
= gimple_call_lhs (stmt
);
134 rhs
= gimple_call_arg (stmt
, 0);
135 type
= TREE_TYPE (rhs
);
137 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
138 reg
= expand_normal (rhs
);
140 gcc_assert (MEM_P (target
));
141 PUT_MODE (target
, TYPE_MODE (type
));
143 create_fixed_operand (&ops
[0], target
);
144 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
145 expand_insn (get_multi_vector_move (type
, vec_store_lanes_optab
), 2, ops
);
149 expand_ANNOTATE (gcall
*stmt ATTRIBUTE_UNUSED
)
154 /* This should get expanded in adjust_simduid_builtins. */
157 expand_GOMP_SIMD_LANE (gcall
*stmt ATTRIBUTE_UNUSED
)
162 /* This should get expanded in adjust_simduid_builtins. */
165 expand_GOMP_SIMD_VF (gcall
*stmt ATTRIBUTE_UNUSED
)
170 /* This should get expanded in adjust_simduid_builtins. */
173 expand_GOMP_SIMD_LAST_LANE (gcall
*stmt ATTRIBUTE_UNUSED
)
178 /* This should get expanded in the sanopt pass. */
181 expand_UBSAN_NULL (gcall
*stmt ATTRIBUTE_UNUSED
)
186 /* This should get expanded in the sanopt pass. */
189 expand_UBSAN_BOUNDS (gcall
*stmt ATTRIBUTE_UNUSED
)
194 /* This should get expanded in the sanopt pass. */
197 expand_UBSAN_OBJECT_SIZE (gcall
*stmt ATTRIBUTE_UNUSED
)
202 /* This should get expanded in the sanopt pass. */
205 expand_ASAN_CHECK (gcall
*stmt ATTRIBUTE_UNUSED
)
210 /* Add sub/add overflow checking to the statement STMT.
211 CODE says whether the operation is +, or -. */
214 ubsan_expand_si_overflow_addsub_check (tree_code code
, gcall
*stmt
)
217 tree lhs
, fn
, arg0
, arg1
;
218 rtx_code_label
*done_label
, *do_error
;
219 rtx target
= NULL_RTX
;
221 lhs
= gimple_call_lhs (stmt
);
222 arg0
= gimple_call_arg (stmt
, 0);
223 arg1
= gimple_call_arg (stmt
, 1);
224 done_label
= gen_label_rtx ();
225 do_error
= gen_label_rtx ();
226 do_pending_stack_adjust ();
227 op0
= expand_normal (arg0
);
228 op1
= expand_normal (arg1
);
230 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
232 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
235 = optab_handler (code
== PLUS_EXPR
? addv4_optab
: subv4_optab
, mode
);
236 if (icode
!= CODE_FOR_nothing
)
238 struct expand_operand ops
[4];
239 rtx_insn
*last
= get_last_insn ();
241 res
= gen_reg_rtx (mode
);
242 create_output_operand (&ops
[0], res
, mode
);
243 create_input_operand (&ops
[1], op0
, mode
);
244 create_input_operand (&ops
[2], op1
, mode
);
245 create_fixed_operand (&ops
[3], do_error
);
246 if (maybe_expand_insn (icode
, 4, ops
))
248 last
= get_last_insn ();
249 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
251 && any_condjump_p (last
)
252 && !find_reg_note (last
, REG_BR_PROB
, 0))
253 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
254 emit_jump (done_label
);
258 delete_insns_since (last
);
259 icode
= CODE_FOR_nothing
;
263 if (icode
== CODE_FOR_nothing
)
265 rtx_code_label
*sub_check
= gen_label_rtx ();
268 /* Compute the operation. On RTL level, the addition is always
270 res
= expand_binop (mode
, code
== PLUS_EXPR
? add_optab
: sub_optab
,
271 op0
, op1
, NULL_RTX
, false, OPTAB_LIB_WIDEN
);
273 /* If we can prove one of the arguments (for MINUS_EXPR only
274 the second operand, as subtraction is not commutative) is always
275 non-negative or always negative, we can do just one comparison
276 and conditional jump instead of 2 at runtime, 3 present in the
277 emitted code. If one of the arguments is CONST_INT, all we
278 need is to make sure it is op1, then the first
279 emit_cmp_and_jump_insns will be just folded. Otherwise try
280 to use range info if available. */
281 if (code
== PLUS_EXPR
&& CONST_INT_P (op0
))
287 else if (CONST_INT_P (op1
))
289 else if (code
== PLUS_EXPR
&& TREE_CODE (arg0
) == SSA_NAME
)
291 wide_int arg0_min
, arg0_max
;
292 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
294 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
296 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
306 if (pos_neg
== 3 && !CONST_INT_P (op1
) && TREE_CODE (arg1
) == SSA_NAME
)
308 wide_int arg1_min
, arg1_max
;
309 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
311 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
313 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
318 /* If the op1 is negative, we have to use a different check. */
320 emit_cmp_and_jump_insns (op1
, const0_rtx
, LT
, NULL_RTX
, mode
,
321 false, sub_check
, PROB_EVEN
);
323 /* Compare the result of the operation with one of the operands. */
325 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? GE
: LE
,
326 NULL_RTX
, mode
, false, done_label
,
329 /* If we get here, we have to print the error. */
332 emit_jump (do_error
);
334 emit_label (sub_check
);
337 /* We have k = a + b for b < 0 here. k <= a must hold. */
339 emit_cmp_and_jump_insns (res
, op0
, code
== PLUS_EXPR
? LE
: GE
,
340 NULL_RTX
, mode
, false, done_label
,
344 emit_label (do_error
);
345 /* Expand the ubsan builtin call. */
347 fn
= ubsan_build_overflow_builtin (code
, gimple_location (stmt
),
348 TREE_TYPE (arg0
), arg0
, arg1
);
351 do_pending_stack_adjust ();
354 emit_label (done_label
);
357 emit_move_insn (target
, res
);
360 /* Add negate overflow checking to the statement STMT. */
363 ubsan_expand_si_overflow_neg_check (gcall
*stmt
)
367 rtx_code_label
*done_label
, *do_error
;
368 rtx target
= NULL_RTX
;
370 lhs
= gimple_call_lhs (stmt
);
371 arg1
= gimple_call_arg (stmt
, 1);
372 done_label
= gen_label_rtx ();
373 do_error
= gen_label_rtx ();
375 do_pending_stack_adjust ();
376 op1
= expand_normal (arg1
);
378 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg1
));
380 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
382 enum insn_code icode
= optab_handler (negv3_optab
, mode
);
383 if (icode
!= CODE_FOR_nothing
)
385 struct expand_operand ops
[3];
386 rtx_insn
*last
= get_last_insn ();
388 res
= gen_reg_rtx (mode
);
389 create_output_operand (&ops
[0], res
, mode
);
390 create_input_operand (&ops
[1], op1
, mode
);
391 create_fixed_operand (&ops
[2], do_error
);
392 if (maybe_expand_insn (icode
, 3, ops
))
394 last
= get_last_insn ();
395 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
397 && any_condjump_p (last
)
398 && !find_reg_note (last
, REG_BR_PROB
, 0))
399 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
400 emit_jump (done_label
);
404 delete_insns_since (last
);
405 icode
= CODE_FOR_nothing
;
409 if (icode
== CODE_FOR_nothing
)
411 /* Compute the operation. On RTL level, the addition is always
413 res
= expand_unop (mode
, neg_optab
, op1
, NULL_RTX
, false);
415 /* Compare the operand with the most negative value. */
416 rtx minv
= expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1
)));
417 emit_cmp_and_jump_insns (op1
, minv
, NE
, NULL_RTX
, mode
, false,
418 done_label
, PROB_VERY_LIKELY
);
421 emit_label (do_error
);
422 /* Expand the ubsan builtin call. */
424 fn
= ubsan_build_overflow_builtin (NEGATE_EXPR
, gimple_location (stmt
),
425 TREE_TYPE (arg1
), arg1
, NULL_TREE
);
428 do_pending_stack_adjust ();
431 emit_label (done_label
);
434 emit_move_insn (target
, res
);
437 /* Add mul overflow checking to the statement STMT. */
440 ubsan_expand_si_overflow_mul_check (gcall
*stmt
)
443 tree lhs
, fn
, arg0
, arg1
;
444 rtx_code_label
*done_label
, *do_error
;
445 rtx target
= NULL_RTX
;
447 lhs
= gimple_call_lhs (stmt
);
448 arg0
= gimple_call_arg (stmt
, 0);
449 arg1
= gimple_call_arg (stmt
, 1);
450 done_label
= gen_label_rtx ();
451 do_error
= gen_label_rtx ();
453 do_pending_stack_adjust ();
454 op0
= expand_normal (arg0
);
455 op1
= expand_normal (arg1
);
457 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
459 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
461 enum insn_code icode
= optab_handler (mulv4_optab
, mode
);
462 if (icode
!= CODE_FOR_nothing
)
464 struct expand_operand ops
[4];
465 rtx_insn
*last
= get_last_insn ();
467 res
= gen_reg_rtx (mode
);
468 create_output_operand (&ops
[0], res
, mode
);
469 create_input_operand (&ops
[1], op0
, mode
);
470 create_input_operand (&ops
[2], op1
, mode
);
471 create_fixed_operand (&ops
[3], do_error
);
472 if (maybe_expand_insn (icode
, 4, ops
))
474 last
= get_last_insn ();
475 if (profile_status_for_fn (cfun
) != PROFILE_ABSENT
477 && any_condjump_p (last
)
478 && !find_reg_note (last
, REG_BR_PROB
, 0))
479 add_int_reg_note (last
, REG_BR_PROB
, PROB_VERY_UNLIKELY
);
480 emit_jump (done_label
);
484 delete_insns_since (last
);
485 icode
= CODE_FOR_nothing
;
489 if (icode
== CODE_FOR_nothing
)
491 struct separate_ops ops
;
492 enum machine_mode hmode
493 = mode_for_size (GET_MODE_PRECISION (mode
) / 2, MODE_INT
, 1);
497 ops
.location
= gimple_location (stmt
);
498 if (GET_MODE_2XWIDER_MODE (mode
) != VOIDmode
499 && targetm
.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode
)))
501 enum machine_mode wmode
= GET_MODE_2XWIDER_MODE (mode
);
502 ops
.code
= WIDEN_MULT_EXPR
;
504 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode
), 0);
506 res
= expand_expr_real_2 (&ops
, NULL_RTX
, wmode
, EXPAND_NORMAL
);
507 rtx hipart
= expand_shift (RSHIFT_EXPR
, wmode
, res
,
508 GET_MODE_PRECISION (mode
), NULL_RTX
, 0);
509 hipart
= gen_lowpart (mode
, hipart
);
510 res
= gen_lowpart (mode
, res
);
511 rtx signbit
= expand_shift (RSHIFT_EXPR
, mode
, res
,
512 GET_MODE_PRECISION (mode
) - 1,
514 /* RES is low half of the double width result, HIPART
515 the high half. There was overflow if
516 HIPART is different from RES < 0 ? -1 : 0. */
517 emit_cmp_and_jump_insns (signbit
, hipart
, EQ
, NULL_RTX
, mode
,
518 false, done_label
, PROB_VERY_LIKELY
);
520 else if (hmode
!= BLKmode
521 && 2 * GET_MODE_PRECISION (hmode
) == GET_MODE_PRECISION (mode
))
523 rtx_code_label
*large_op0
= gen_label_rtx ();
524 rtx_code_label
*small_op0_large_op1
= gen_label_rtx ();
525 rtx_code_label
*one_small_one_large
= gen_label_rtx ();
526 rtx_code_label
*both_ops_large
= gen_label_rtx ();
527 rtx_code_label
*after_hipart_neg
= gen_label_rtx ();
528 rtx_code_label
*after_lopart_neg
= gen_label_rtx ();
529 rtx_code_label
*do_overflow
= gen_label_rtx ();
530 rtx_code_label
*hipart_different
= gen_label_rtx ();
532 unsigned int hprec
= GET_MODE_PRECISION (hmode
);
533 rtx hipart0
= expand_shift (RSHIFT_EXPR
, mode
, op0
, hprec
,
535 hipart0
= gen_lowpart (hmode
, hipart0
);
536 rtx lopart0
= gen_lowpart (hmode
, op0
);
537 rtx signbit0
= expand_shift (RSHIFT_EXPR
, hmode
, lopart0
, hprec
- 1,
539 rtx hipart1
= expand_shift (RSHIFT_EXPR
, mode
, op1
, hprec
,
541 hipart1
= gen_lowpart (hmode
, hipart1
);
542 rtx lopart1
= gen_lowpart (hmode
, op1
);
543 rtx signbit1
= expand_shift (RSHIFT_EXPR
, hmode
, lopart1
, hprec
- 1,
546 res
= gen_reg_rtx (mode
);
548 /* True if op0 resp. op1 are known to be in the range of
550 bool op0_small_p
= false;
551 bool op1_small_p
= false;
552 /* True if op0 resp. op1 are known to have all zeros or all ones
553 in the upper half of bits, but are not known to be
555 bool op0_medium_p
= false;
556 bool op1_medium_p
= false;
557 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
558 nonnegative, 1 if unknown. */
562 if (TREE_CODE (arg0
) == SSA_NAME
)
564 wide_int arg0_min
, arg0_max
;
565 if (get_range_info (arg0
, &arg0_min
, &arg0_max
) == VR_RANGE
)
567 unsigned int mprec0
= wi::min_precision (arg0_min
, SIGNED
);
568 unsigned int mprec1
= wi::min_precision (arg0_max
, SIGNED
);
569 if (mprec0
<= hprec
&& mprec1
<= hprec
)
571 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
573 if (!wi::neg_p (arg0_min
, TYPE_SIGN (TREE_TYPE (arg0
))))
575 else if (wi::neg_p (arg0_max
, TYPE_SIGN (TREE_TYPE (arg0
))))
579 if (TREE_CODE (arg1
) == SSA_NAME
)
581 wide_int arg1_min
, arg1_max
;
582 if (get_range_info (arg1
, &arg1_min
, &arg1_max
) == VR_RANGE
)
584 unsigned int mprec0
= wi::min_precision (arg1_min
, SIGNED
);
585 unsigned int mprec1
= wi::min_precision (arg1_max
, SIGNED
);
586 if (mprec0
<= hprec
&& mprec1
<= hprec
)
588 else if (mprec0
<= hprec
+ 1 && mprec1
<= hprec
+ 1)
590 if (!wi::neg_p (arg1_min
, TYPE_SIGN (TREE_TYPE (arg1
))))
592 else if (wi::neg_p (arg1_max
, TYPE_SIGN (TREE_TYPE (arg1
))))
597 int smaller_sign
= 1;
601 smaller_sign
= op0_sign
;
602 larger_sign
= op1_sign
;
604 else if (op1_small_p
)
606 smaller_sign
= op1_sign
;
607 larger_sign
= op0_sign
;
609 else if (op0_sign
== op1_sign
)
611 smaller_sign
= op0_sign
;
612 larger_sign
= op0_sign
;
616 emit_cmp_and_jump_insns (signbit0
, hipart0
, NE
, NULL_RTX
, hmode
,
617 false, large_op0
, PROB_UNLIKELY
);
620 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
621 false, small_op0_large_op1
,
624 /* If both op0 and op1 are sign extended from hmode to mode,
625 the multiplication will never overflow. We can do just one
626 hmode x hmode => mode widening multiplication. */
627 if (GET_CODE (lopart0
) == SUBREG
)
629 SUBREG_PROMOTED_VAR_P (lopart0
) = 1;
630 SUBREG_PROMOTED_SET (lopart0
, 0);
632 if (GET_CODE (lopart1
) == SUBREG
)
634 SUBREG_PROMOTED_VAR_P (lopart1
) = 1;
635 SUBREG_PROMOTED_SET (lopart1
, 0);
637 tree halfstype
= build_nonstandard_integer_type (hprec
, 0);
638 ops
.op0
= make_tree (halfstype
, lopart0
);
639 ops
.op1
= make_tree (halfstype
, lopart1
);
640 ops
.code
= WIDEN_MULT_EXPR
;
641 ops
.type
= TREE_TYPE (arg0
);
643 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
644 emit_move_insn (res
, thisres
);
645 emit_jump (done_label
);
647 emit_label (small_op0_large_op1
);
649 /* If op0 is sign extended from hmode to mode, but op1 is not,
650 just swap the arguments and handle it as op1 sign extended,
652 rtx larger
= gen_reg_rtx (mode
);
653 rtx hipart
= gen_reg_rtx (hmode
);
654 rtx lopart
= gen_reg_rtx (hmode
);
655 emit_move_insn (larger
, op1
);
656 emit_move_insn (hipart
, hipart1
);
657 emit_move_insn (lopart
, lopart0
);
658 emit_jump (one_small_one_large
);
660 emit_label (large_op0
);
663 emit_cmp_and_jump_insns (signbit1
, hipart1
, NE
, NULL_RTX
, hmode
,
664 false, both_ops_large
, PROB_UNLIKELY
);
666 /* If op1 is sign extended from hmode to mode, but op0 is not,
667 prepare larger, hipart and lopart pseudos and handle it together
668 with small_op0_large_op1. */
669 emit_move_insn (larger
, op0
);
670 emit_move_insn (hipart
, hipart0
);
671 emit_move_insn (lopart
, lopart1
);
673 emit_label (one_small_one_large
);
675 /* lopart is the low part of the operand that is sign extended
676 to mode, larger is the the other operand, hipart is the
677 high part of larger and lopart0 and lopart1 are the low parts
679 We perform lopart0 * lopart1 and lopart * hipart widening
681 tree halfutype
= build_nonstandard_integer_type (hprec
, 1);
682 ops
.op0
= make_tree (halfutype
, lopart0
);
683 ops
.op1
= make_tree (halfutype
, lopart1
);
685 = expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
687 ops
.op0
= make_tree (halfutype
, lopart
);
688 ops
.op1
= make_tree (halfutype
, hipart
);
689 rtx loxhi
= gen_reg_rtx (mode
);
690 rtx tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
691 emit_move_insn (loxhi
, tem
);
693 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
694 if (larger_sign
== 0)
695 emit_jump (after_hipart_neg
);
696 else if (larger_sign
!= -1)
697 emit_cmp_and_jump_insns (hipart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
698 false, after_hipart_neg
, PROB_EVEN
);
700 tem
= convert_modes (mode
, hmode
, lopart
, 1);
701 tem
= expand_shift (LSHIFT_EXPR
, mode
, tem
, hprec
, NULL_RTX
, 1);
702 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, tem
, NULL_RTX
,
704 emit_move_insn (loxhi
, tem
);
706 emit_label (after_hipart_neg
);
708 /* if (lopart < 0) loxhi -= larger; */
709 if (smaller_sign
== 0)
710 emit_jump (after_lopart_neg
);
711 else if (smaller_sign
!= -1)
712 emit_cmp_and_jump_insns (lopart
, const0_rtx
, GE
, NULL_RTX
, hmode
,
713 false, after_lopart_neg
, PROB_EVEN
);
715 tem
= expand_simple_binop (mode
, MINUS
, loxhi
, larger
, NULL_RTX
,
717 emit_move_insn (loxhi
, tem
);
719 emit_label (after_lopart_neg
);
721 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
722 tem
= expand_shift (RSHIFT_EXPR
, mode
, lo0xlo1
, hprec
, NULL_RTX
, 1);
723 tem
= expand_simple_binop (mode
, PLUS
, loxhi
, tem
, NULL_RTX
,
725 emit_move_insn (loxhi
, tem
);
727 /* if (loxhi >> (bitsize / 2)
728 == (hmode) loxhi >> (bitsize / 2 - 1)) */
729 rtx hipartloxhi
= expand_shift (RSHIFT_EXPR
, mode
, loxhi
, hprec
,
731 hipartloxhi
= gen_lowpart (hmode
, hipartloxhi
);
732 rtx lopartloxhi
= gen_lowpart (hmode
, loxhi
);
733 rtx signbitloxhi
= expand_shift (RSHIFT_EXPR
, hmode
, lopartloxhi
,
734 hprec
- 1, NULL_RTX
, 0);
736 emit_cmp_and_jump_insns (signbitloxhi
, hipartloxhi
, NE
, NULL_RTX
,
737 hmode
, false, do_overflow
,
740 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
741 rtx loxhishifted
= expand_shift (LSHIFT_EXPR
, mode
, loxhi
, hprec
,
743 tem
= convert_modes (mode
, hmode
, gen_lowpart (hmode
, lo0xlo1
), 1);
745 tem
= expand_simple_binop (mode
, IOR
, loxhishifted
, tem
, res
,
748 emit_move_insn (res
, tem
);
749 emit_jump (done_label
);
751 emit_label (both_ops_large
);
753 /* If both operands are large (not sign extended from hmode),
754 then perform the full multiplication which will be the result
755 of the operation. The only cases which don't overflow are
756 some cases where both hipart0 and highpart1 are 0 or -1. */
757 ops
.code
= MULT_EXPR
;
758 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
759 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
760 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
761 emit_move_insn (res
, tem
);
765 tem
= expand_simple_binop (hmode
, PLUS
, hipart0
, const1_rtx
,
766 NULL_RTX
, 1, OPTAB_DIRECT
);
767 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
768 true, do_error
, PROB_VERY_UNLIKELY
);
773 tem
= expand_simple_binop (hmode
, PLUS
, hipart1
, const1_rtx
,
774 NULL_RTX
, 1, OPTAB_DIRECT
);
775 emit_cmp_and_jump_insns (tem
, const1_rtx
, GTU
, NULL_RTX
, hmode
,
776 true, do_error
, PROB_VERY_UNLIKELY
);
779 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
780 same, overflow happened if res is negative, if they are different,
781 overflow happened if res is positive. */
782 if (op0_sign
!= 1 && op1_sign
!= 1 && op0_sign
!= op1_sign
)
783 emit_jump (hipart_different
);
784 else if (op0_sign
== 1 || op1_sign
== 1)
785 emit_cmp_and_jump_insns (hipart0
, hipart1
, NE
, NULL_RTX
, hmode
,
786 true, hipart_different
, PROB_EVEN
);
788 emit_cmp_and_jump_insns (res
, const0_rtx
, LT
, NULL_RTX
, mode
, false,
789 do_error
, PROB_VERY_UNLIKELY
);
790 emit_jump (done_label
);
792 emit_label (hipart_different
);
794 emit_cmp_and_jump_insns (res
, const0_rtx
, GE
, NULL_RTX
, mode
, false,
795 do_error
, PROB_VERY_UNLIKELY
);
796 emit_jump (done_label
);
798 emit_label (do_overflow
);
800 /* Overflow, do full multiplication and fallthru into do_error. */
801 ops
.op0
= make_tree (TREE_TYPE (arg0
), op0
);
802 ops
.op1
= make_tree (TREE_TYPE (arg0
), op1
);
803 tem
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
804 emit_move_insn (res
, tem
);
808 ops
.code
= MULT_EXPR
;
809 ops
.type
= TREE_TYPE (arg0
);
810 res
= expand_expr_real_2 (&ops
, NULL_RTX
, mode
, EXPAND_NORMAL
);
811 emit_jump (done_label
);
815 emit_label (do_error
);
816 /* Expand the ubsan builtin call. */
818 fn
= ubsan_build_overflow_builtin (MULT_EXPR
, gimple_location (stmt
),
819 TREE_TYPE (arg0
), arg0
, arg1
);
822 do_pending_stack_adjust ();
825 emit_label (done_label
);
828 emit_move_insn (target
, res
);
831 /* Expand UBSAN_CHECK_ADD call STMT. */
834 expand_UBSAN_CHECK_ADD (gcall
*stmt
)
836 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR
, stmt
);
839 /* Expand UBSAN_CHECK_SUB call STMT. */
842 expand_UBSAN_CHECK_SUB (gcall
*stmt
)
844 if (integer_zerop (gimple_call_arg (stmt
, 0)))
845 ubsan_expand_si_overflow_neg_check (stmt
);
847 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR
, stmt
);
850 /* Expand UBSAN_CHECK_MUL call STMT. */
853 expand_UBSAN_CHECK_MUL (gcall
*stmt
)
855 ubsan_expand_si_overflow_mul_check (stmt
);
858 /* This should get folded in tree-vectorizer.c. */
861 expand_LOOP_VECTORIZED (gcall
*stmt ATTRIBUTE_UNUSED
)
867 expand_MASK_LOAD (gcall
*stmt
)
869 struct expand_operand ops
[3];
870 tree type
, lhs
, rhs
, maskt
;
871 rtx mem
, target
, mask
;
873 maskt
= gimple_call_arg (stmt
, 2);
874 lhs
= gimple_call_lhs (stmt
);
875 if (lhs
== NULL_TREE
)
877 type
= TREE_TYPE (lhs
);
878 rhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
879 gimple_call_arg (stmt
, 1));
881 mem
= expand_expr (rhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
882 gcc_assert (MEM_P (mem
));
883 mask
= expand_normal (maskt
);
884 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
885 create_output_operand (&ops
[0], target
, TYPE_MODE (type
));
886 create_fixed_operand (&ops
[1], mem
);
887 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
888 expand_insn (optab_handler (maskload_optab
, TYPE_MODE (type
)), 3, ops
);
892 expand_MASK_STORE (gcall
*stmt
)
894 struct expand_operand ops
[3];
895 tree type
, lhs
, rhs
, maskt
;
898 maskt
= gimple_call_arg (stmt
, 2);
899 rhs
= gimple_call_arg (stmt
, 3);
900 type
= TREE_TYPE (rhs
);
901 lhs
= fold_build2 (MEM_REF
, type
, gimple_call_arg (stmt
, 0),
902 gimple_call_arg (stmt
, 1));
904 mem
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
905 gcc_assert (MEM_P (mem
));
906 mask
= expand_normal (maskt
);
907 reg
= expand_normal (rhs
);
908 create_fixed_operand (&ops
[0], mem
);
909 create_input_operand (&ops
[1], reg
, TYPE_MODE (type
));
910 create_input_operand (&ops
[2], mask
, TYPE_MODE (TREE_TYPE (maskt
)));
911 expand_insn (optab_handler (maskstore_optab
, TYPE_MODE (type
)), 3, ops
);
915 expand_ABNORMAL_DISPATCHER (gcall
*)
920 expand_BUILTIN_EXPECT (gcall
*stmt
)
922 /* When guessing was done, the hints should be already stripped away. */
923 gcc_assert (!flag_guess_branch_prob
|| optimize
== 0 || seen_error ());
926 tree lhs
= gimple_call_lhs (stmt
);
928 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
931 rtx val
= expand_expr (gimple_call_arg (stmt
, 0), target
, VOIDmode
, EXPAND_NORMAL
);
932 if (lhs
&& val
!= target
)
933 emit_move_insn (target
, val
);
936 /* Routines to expand each internal function, indexed by function number.
937 Each routine has the prototype:
939 expand_<NAME> (gcall *stmt)
941 where STMT is the statement that performs the call. */
942 static void (*const internal_fn_expanders
[]) (gcall
*) = {
943 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
944 #include "internal-fn.def"
945 #undef DEF_INTERNAL_FN
949 /* Expand STMT, which is a call to internal function FN. */
952 expand_internal_call (gcall
*stmt
)
954 internal_fn_expanders
[(int) gimple_call_internal_fn (stmt
)] (stmt
);