2014-09-18 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / internal-fn.c
blobc2a44b65f1d6a882ae8d7c55de25d8a50d855974
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* Fnspec of each internal function, indexed by function number. */
58 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
60 void
61 init_internal_fns ()
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
66 #include "internal-fn.def"
67 #undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array[IFN_LAST] = 0;
71 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
74 static enum insn_code
75 get_multi_vector_move (tree array_type, convert_optab optab)
77 enum insn_code icode;
78 enum machine_mode imode;
79 enum machine_mode vmode;
81 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
82 imode = TYPE_MODE (array_type);
83 vmode = TYPE_MODE (TREE_TYPE (array_type));
85 icode = convert_optab_handler (optab, imode, vmode);
86 gcc_assert (icode != CODE_FOR_nothing);
87 return icode;
90 /* Expand LOAD_LANES call STMT. */
92 static void
93 expand_LOAD_LANES (gimple stmt)
95 struct expand_operand ops[2];
96 tree type, lhs, rhs;
97 rtx target, mem;
99 lhs = gimple_call_lhs (stmt);
100 rhs = gimple_call_arg (stmt, 0);
101 type = TREE_TYPE (lhs);
103 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
104 mem = expand_normal (rhs);
106 gcc_assert (MEM_P (mem));
107 PUT_MODE (mem, TYPE_MODE (type));
109 create_output_operand (&ops[0], target, TYPE_MODE (type));
110 create_fixed_operand (&ops[1], mem);
111 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
114 /* Expand STORE_LANES call STMT. */
116 static void
117 expand_STORE_LANES (gimple stmt)
119 struct expand_operand ops[2];
120 tree type, lhs, rhs;
121 rtx target, reg;
123 lhs = gimple_call_lhs (stmt);
124 rhs = gimple_call_arg (stmt, 0);
125 type = TREE_TYPE (rhs);
127 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
128 reg = expand_normal (rhs);
130 gcc_assert (MEM_P (target));
131 PUT_MODE (target, TYPE_MODE (type));
133 create_fixed_operand (&ops[0], target);
134 create_input_operand (&ops[1], reg, TYPE_MODE (type));
135 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
138 static void
139 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
141 gcc_unreachable ();
144 /* This should get expanded in adjust_simduid_builtins. */
146 static void
147 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
149 gcc_unreachable ();
152 /* This should get expanded in adjust_simduid_builtins. */
154 static void
155 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
157 gcc_unreachable ();
160 /* This should get expanded in adjust_simduid_builtins. */
162 static void
163 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
165 gcc_unreachable ();
168 /* This should get expanded in the sanopt pass. */
170 static void
171 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
173 gcc_unreachable ();
176 /* This should get expanded in the sanopt pass. */
178 static void
179 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
181 gcc_unreachable ();
184 /* This should get expanded in the sanopt pass. */
186 static void
187 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
189 gcc_unreachable ();
192 /* Add sub/add overflow checking to the statement STMT.
193 CODE says whether the operation is +, or -. */
195 void
196 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
198 rtx res, op0, op1;
199 tree lhs, fn, arg0, arg1;
200 rtx_code_label *done_label, *do_error;
201 rtx target = NULL_RTX;
203 lhs = gimple_call_lhs (stmt);
204 arg0 = gimple_call_arg (stmt, 0);
205 arg1 = gimple_call_arg (stmt, 1);
206 done_label = gen_label_rtx ();
207 do_error = gen_label_rtx ();
208 do_pending_stack_adjust ();
209 op0 = expand_normal (arg0);
210 op1 = expand_normal (arg1);
212 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
213 if (lhs)
214 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
216 enum insn_code icode
217 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
218 if (icode != CODE_FOR_nothing)
220 struct expand_operand ops[4];
221 rtx_insn *last = get_last_insn ();
223 res = gen_reg_rtx (mode);
224 create_output_operand (&ops[0], res, mode);
225 create_input_operand (&ops[1], op0, mode);
226 create_input_operand (&ops[2], op1, mode);
227 create_fixed_operand (&ops[3], do_error);
228 if (maybe_expand_insn (icode, 4, ops))
230 last = get_last_insn ();
231 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
232 && JUMP_P (last)
233 && any_condjump_p (last)
234 && !find_reg_note (last, REG_BR_PROB, 0))
235 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
236 emit_jump (done_label);
238 else
240 delete_insns_since (last);
241 icode = CODE_FOR_nothing;
245 if (icode == CODE_FOR_nothing)
247 rtx_code_label *sub_check = gen_label_rtx ();
248 int pos_neg = 3;
250 /* Compute the operation. On RTL level, the addition is always
251 unsigned. */
252 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
253 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
255 /* If we can prove one of the arguments (for MINUS_EXPR only
256 the second operand, as subtraction is not commutative) is always
257 non-negative or always negative, we can do just one comparison
258 and conditional jump instead of 2 at runtime, 3 present in the
259 emitted code. If one of the arguments is CONST_INT, all we
260 need is to make sure it is op1, then the first
261 emit_cmp_and_jump_insns will be just folded. Otherwise try
262 to use range info if available. */
263 if (code == PLUS_EXPR && CONST_INT_P (op0))
265 rtx tem = op0;
266 op0 = op1;
267 op1 = tem;
269 else if (CONST_INT_P (op1))
271 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
273 wide_int arg0_min, arg0_max;
274 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
276 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
277 pos_neg = 1;
278 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
279 pos_neg = 2;
281 if (pos_neg != 3)
283 rtx tem = op0;
284 op0 = op1;
285 op1 = tem;
288 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
290 wide_int arg1_min, arg1_max;
291 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
293 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
294 pos_neg = 1;
295 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
296 pos_neg = 2;
300 /* If the op1 is negative, we have to use a different check. */
301 if (pos_neg == 3)
302 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
303 false, sub_check, PROB_EVEN);
305 /* Compare the result of the operation with one of the operands. */
306 if (pos_neg & 1)
307 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
308 NULL_RTX, mode, false, done_label,
309 PROB_VERY_LIKELY);
311 /* If we get here, we have to print the error. */
312 if (pos_neg == 3)
314 emit_jump (do_error);
316 emit_label (sub_check);
319 /* We have k = a + b for b < 0 here. k <= a must hold. */
320 if (pos_neg & 2)
321 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
322 NULL_RTX, mode, false, done_label,
323 PROB_VERY_LIKELY);
326 emit_label (do_error);
327 /* Expand the ubsan builtin call. */
328 push_temp_slots ();
329 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
330 TREE_TYPE (arg0), arg0, arg1);
331 expand_normal (fn);
332 pop_temp_slots ();
333 do_pending_stack_adjust ();
335 /* We're done. */
336 emit_label (done_label);
338 if (lhs)
339 emit_move_insn (target, res);
342 /* Add negate overflow checking to the statement STMT. */
344 void
345 ubsan_expand_si_overflow_neg_check (gimple stmt)
347 rtx res, op1;
348 tree lhs, fn, arg1;
349 rtx_code_label *done_label, *do_error;
350 rtx target = NULL_RTX;
352 lhs = gimple_call_lhs (stmt);
353 arg1 = gimple_call_arg (stmt, 1);
354 done_label = gen_label_rtx ();
355 do_error = gen_label_rtx ();
357 do_pending_stack_adjust ();
358 op1 = expand_normal (arg1);
360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
361 if (lhs)
362 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
364 enum insn_code icode = optab_handler (negv3_optab, mode);
365 if (icode != CODE_FOR_nothing)
367 struct expand_operand ops[3];
368 rtx_insn *last = get_last_insn ();
370 res = gen_reg_rtx (mode);
371 create_output_operand (&ops[0], res, mode);
372 create_input_operand (&ops[1], op1, mode);
373 create_fixed_operand (&ops[2], do_error);
374 if (maybe_expand_insn (icode, 3, ops))
376 last = get_last_insn ();
377 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
378 && JUMP_P (last)
379 && any_condjump_p (last)
380 && !find_reg_note (last, REG_BR_PROB, 0))
381 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
382 emit_jump (done_label);
384 else
386 delete_insns_since (last);
387 icode = CODE_FOR_nothing;
391 if (icode == CODE_FOR_nothing)
393 /* Compute the operation. On RTL level, the addition is always
394 unsigned. */
395 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
397 /* Compare the operand with the most negative value. */
398 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
399 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
400 done_label, PROB_VERY_LIKELY);
403 emit_label (do_error);
404 /* Expand the ubsan builtin call. */
405 push_temp_slots ();
406 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
407 TREE_TYPE (arg1), arg1, NULL_TREE);
408 expand_normal (fn);
409 pop_temp_slots ();
410 do_pending_stack_adjust ();
412 /* We're done. */
413 emit_label (done_label);
415 if (lhs)
416 emit_move_insn (target, res);
419 /* Add mul overflow checking to the statement STMT. */
421 void
422 ubsan_expand_si_overflow_mul_check (gimple stmt)
424 rtx res, op0, op1;
425 tree lhs, fn, arg0, arg1;
426 rtx_code_label *done_label, *do_error;
427 rtx target = NULL_RTX;
429 lhs = gimple_call_lhs (stmt);
430 arg0 = gimple_call_arg (stmt, 0);
431 arg1 = gimple_call_arg (stmt, 1);
432 done_label = gen_label_rtx ();
433 do_error = gen_label_rtx ();
435 do_pending_stack_adjust ();
436 op0 = expand_normal (arg0);
437 op1 = expand_normal (arg1);
439 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
440 if (lhs)
441 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
443 enum insn_code icode = optab_handler (mulv4_optab, mode);
444 if (icode != CODE_FOR_nothing)
446 struct expand_operand ops[4];
447 rtx_insn *last = get_last_insn ();
449 res = gen_reg_rtx (mode);
450 create_output_operand (&ops[0], res, mode);
451 create_input_operand (&ops[1], op0, mode);
452 create_input_operand (&ops[2], op1, mode);
453 create_fixed_operand (&ops[3], do_error);
454 if (maybe_expand_insn (icode, 4, ops))
456 last = get_last_insn ();
457 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
458 && JUMP_P (last)
459 && any_condjump_p (last)
460 && !find_reg_note (last, REG_BR_PROB, 0))
461 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
462 emit_jump (done_label);
464 else
466 delete_insns_since (last);
467 icode = CODE_FOR_nothing;
471 if (icode == CODE_FOR_nothing)
473 struct separate_ops ops;
474 enum machine_mode hmode
475 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
476 ops.op0 = arg0;
477 ops.op1 = arg1;
478 ops.op2 = NULL_TREE;
479 ops.location = gimple_location (stmt);
480 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
481 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
483 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
484 ops.code = WIDEN_MULT_EXPR;
485 ops.type
486 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
488 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
489 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
490 GET_MODE_PRECISION (mode), NULL_RTX, 0);
491 hipart = gen_lowpart (mode, hipart);
492 res = gen_lowpart (mode, res);
493 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
494 GET_MODE_PRECISION (mode) - 1,
495 NULL_RTX, 0);
496 /* RES is low half of the double width result, HIPART
497 the high half. There was overflow if
498 HIPART is different from RES < 0 ? -1 : 0. */
499 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
500 false, done_label, PROB_VERY_LIKELY);
502 else if (hmode != BLKmode
503 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
505 rtx_code_label *large_op0 = gen_label_rtx ();
506 rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
507 rtx_code_label *one_small_one_large = gen_label_rtx ();
508 rtx_code_label *both_ops_large = gen_label_rtx ();
509 rtx_code_label *after_hipart_neg = gen_label_rtx ();
510 rtx_code_label *after_lopart_neg = gen_label_rtx ();
511 rtx_code_label *do_overflow = gen_label_rtx ();
512 rtx_code_label *hipart_different = gen_label_rtx ();
514 unsigned int hprec = GET_MODE_PRECISION (hmode);
515 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
516 NULL_RTX, 0);
517 hipart0 = gen_lowpart (hmode, hipart0);
518 rtx lopart0 = gen_lowpart (hmode, op0);
519 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
520 NULL_RTX, 0);
521 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
522 NULL_RTX, 0);
523 hipart1 = gen_lowpart (hmode, hipart1);
524 rtx lopart1 = gen_lowpart (hmode, op1);
525 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
526 NULL_RTX, 0);
528 res = gen_reg_rtx (mode);
530 /* True if op0 resp. op1 are known to be in the range of
531 halfstype. */
532 bool op0_small_p = false;
533 bool op1_small_p = false;
534 /* True if op0 resp. op1 are known to have all zeros or all ones
535 in the upper half of bits, but are not known to be
536 op{0,1}_small_p. */
537 bool op0_medium_p = false;
538 bool op1_medium_p = false;
539 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
540 nonnegative, 1 if unknown. */
541 int op0_sign = 1;
542 int op1_sign = 1;
544 if (TREE_CODE (arg0) == SSA_NAME)
546 wide_int arg0_min, arg0_max;
547 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
549 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
550 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
551 if (mprec0 <= hprec && mprec1 <= hprec)
552 op0_small_p = true;
553 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
554 op0_medium_p = true;
555 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
556 op0_sign = 0;
557 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
558 op0_sign = -1;
561 if (TREE_CODE (arg1) == SSA_NAME)
563 wide_int arg1_min, arg1_max;
564 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
566 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
567 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
568 if (mprec0 <= hprec && mprec1 <= hprec)
569 op1_small_p = true;
570 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
571 op1_medium_p = true;
572 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
573 op1_sign = 0;
574 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
575 op1_sign = -1;
579 int smaller_sign = 1;
580 int larger_sign = 1;
581 if (op0_small_p)
583 smaller_sign = op0_sign;
584 larger_sign = op1_sign;
586 else if (op1_small_p)
588 smaller_sign = op1_sign;
589 larger_sign = op0_sign;
591 else if (op0_sign == op1_sign)
593 smaller_sign = op0_sign;
594 larger_sign = op0_sign;
597 if (!op0_small_p)
598 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
599 false, large_op0, PROB_UNLIKELY);
601 if (!op1_small_p)
602 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
603 false, small_op0_large_op1,
604 PROB_UNLIKELY);
606 /* If both op0 and op1 are sign extended from hmode to mode,
607 the multiplication will never overflow. We can do just one
608 hmode x hmode => mode widening multiplication. */
609 if (GET_CODE (lopart0) == SUBREG)
611 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
612 SUBREG_PROMOTED_SET (lopart0, 0);
614 if (GET_CODE (lopart1) == SUBREG)
616 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
617 SUBREG_PROMOTED_SET (lopart1, 0);
619 tree halfstype = build_nonstandard_integer_type (hprec, 0);
620 ops.op0 = make_tree (halfstype, lopart0);
621 ops.op1 = make_tree (halfstype, lopart1);
622 ops.code = WIDEN_MULT_EXPR;
623 ops.type = TREE_TYPE (arg0);
624 rtx thisres
625 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
626 emit_move_insn (res, thisres);
627 emit_jump (done_label);
629 emit_label (small_op0_large_op1);
631 /* If op0 is sign extended from hmode to mode, but op1 is not,
632 just swap the arguments and handle it as op1 sign extended,
633 op0 not. */
634 rtx larger = gen_reg_rtx (mode);
635 rtx hipart = gen_reg_rtx (hmode);
636 rtx lopart = gen_reg_rtx (hmode);
637 emit_move_insn (larger, op1);
638 emit_move_insn (hipart, hipart1);
639 emit_move_insn (lopart, lopart0);
640 emit_jump (one_small_one_large);
642 emit_label (large_op0);
644 if (!op1_small_p)
645 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
646 false, both_ops_large, PROB_UNLIKELY);
648 /* If op1 is sign extended from hmode to mode, but op0 is not,
649 prepare larger, hipart and lopart pseudos and handle it together
650 with small_op0_large_op1. */
651 emit_move_insn (larger, op0);
652 emit_move_insn (hipart, hipart0);
653 emit_move_insn (lopart, lopart1);
655 emit_label (one_small_one_large);
657 /* lopart is the low part of the operand that is sign extended
658 to mode, larger is the the other operand, hipart is the
659 high part of larger and lopart0 and lopart1 are the low parts
660 of both operands.
661 We perform lopart0 * lopart1 and lopart * hipart widening
662 multiplications. */
663 tree halfutype = build_nonstandard_integer_type (hprec, 1);
664 ops.op0 = make_tree (halfutype, lopart0);
665 ops.op1 = make_tree (halfutype, lopart1);
666 rtx lo0xlo1
667 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
669 ops.op0 = make_tree (halfutype, lopart);
670 ops.op1 = make_tree (halfutype, hipart);
671 rtx loxhi = gen_reg_rtx (mode);
672 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
673 emit_move_insn (loxhi, tem);
675 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
676 if (larger_sign == 0)
677 emit_jump (after_hipart_neg);
678 else if (larger_sign != -1)
679 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
680 false, after_hipart_neg, PROB_EVEN);
682 tem = convert_modes (mode, hmode, lopart, 1);
683 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
684 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
685 1, OPTAB_DIRECT);
686 emit_move_insn (loxhi, tem);
688 emit_label (after_hipart_neg);
690 /* if (lopart < 0) loxhi -= larger; */
691 if (smaller_sign == 0)
692 emit_jump (after_lopart_neg);
693 else if (smaller_sign != -1)
694 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
695 false, after_lopart_neg, PROB_EVEN);
697 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
698 1, OPTAB_DIRECT);
699 emit_move_insn (loxhi, tem);
701 emit_label (after_lopart_neg);
703 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
704 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
705 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
706 1, OPTAB_DIRECT);
707 emit_move_insn (loxhi, tem);
709 /* if (loxhi >> (bitsize / 2)
710 == (hmode) loxhi >> (bitsize / 2 - 1)) */
711 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
712 NULL_RTX, 0);
713 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
714 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
715 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
716 hprec - 1, NULL_RTX, 0);
718 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
719 hmode, false, do_overflow,
720 PROB_VERY_UNLIKELY);
722 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
723 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
724 NULL_RTX, 1);
725 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
727 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
728 1, OPTAB_DIRECT);
729 if (tem != res)
730 emit_move_insn (res, tem);
731 emit_jump (done_label);
733 emit_label (both_ops_large);
735 /* If both operands are large (not sign extended from hmode),
736 then perform the full multiplication which will be the result
737 of the operation. The only cases which don't overflow are
738 some cases where both hipart0 and highpart1 are 0 or -1. */
739 ops.code = MULT_EXPR;
740 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
741 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
742 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
743 emit_move_insn (res, tem);
745 if (!op0_medium_p)
747 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
748 NULL_RTX, 1, OPTAB_DIRECT);
749 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
750 true, do_error, PROB_VERY_UNLIKELY);
753 if (!op1_medium_p)
755 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
756 NULL_RTX, 1, OPTAB_DIRECT);
757 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
758 true, do_error, PROB_VERY_UNLIKELY);
761 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
762 same, overflow happened if res is negative, if they are different,
763 overflow happened if res is positive. */
764 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
765 emit_jump (hipart_different);
766 else if (op0_sign == 1 || op1_sign == 1)
767 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
768 true, hipart_different, PROB_EVEN);
770 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
771 do_error, PROB_VERY_UNLIKELY);
772 emit_jump (done_label);
774 emit_label (hipart_different);
776 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
777 do_error, PROB_VERY_UNLIKELY);
778 emit_jump (done_label);
780 emit_label (do_overflow);
782 /* Overflow, do full multiplication and fallthru into do_error. */
783 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
784 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
785 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
786 emit_move_insn (res, tem);
788 else
790 ops.code = MULT_EXPR;
791 ops.type = TREE_TYPE (arg0);
792 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
793 emit_jump (done_label);
797 emit_label (do_error);
798 /* Expand the ubsan builtin call. */
799 push_temp_slots ();
800 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
801 TREE_TYPE (arg0), arg0, arg1);
802 expand_normal (fn);
803 pop_temp_slots ();
804 do_pending_stack_adjust ();
806 /* We're done. */
807 emit_label (done_label);
809 if (lhs)
810 emit_move_insn (target, res);
813 /* Expand UBSAN_CHECK_ADD call STMT. */
815 static void
816 expand_UBSAN_CHECK_ADD (gimple stmt)
818 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
821 /* Expand UBSAN_CHECK_SUB call STMT. */
823 static void
824 expand_UBSAN_CHECK_SUB (gimple stmt)
826 if (integer_zerop (gimple_call_arg (stmt, 0)))
827 ubsan_expand_si_overflow_neg_check (stmt);
828 else
829 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
832 /* Expand UBSAN_CHECK_MUL call STMT. */
834 static void
835 expand_UBSAN_CHECK_MUL (gimple stmt)
837 ubsan_expand_si_overflow_mul_check (stmt);
840 /* This should get folded in tree-vectorizer.c. */
842 static void
843 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
845 gcc_unreachable ();
848 static void
849 expand_MASK_LOAD (gimple stmt)
851 struct expand_operand ops[3];
852 tree type, lhs, rhs, maskt;
853 rtx mem, target, mask;
855 maskt = gimple_call_arg (stmt, 2);
856 lhs = gimple_call_lhs (stmt);
857 if (lhs == NULL_TREE)
858 return;
859 type = TREE_TYPE (lhs);
860 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
861 gimple_call_arg (stmt, 1));
863 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
864 gcc_assert (MEM_P (mem));
865 mask = expand_normal (maskt);
866 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
867 create_output_operand (&ops[0], target, TYPE_MODE (type));
868 create_fixed_operand (&ops[1], mem);
869 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
870 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
873 static void
874 expand_MASK_STORE (gimple stmt)
876 struct expand_operand ops[3];
877 tree type, lhs, rhs, maskt;
878 rtx mem, reg, mask;
880 maskt = gimple_call_arg (stmt, 2);
881 rhs = gimple_call_arg (stmt, 3);
882 type = TREE_TYPE (rhs);
883 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
884 gimple_call_arg (stmt, 1));
886 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
887 gcc_assert (MEM_P (mem));
888 mask = expand_normal (maskt);
889 reg = expand_normal (rhs);
890 create_fixed_operand (&ops[0], mem);
891 create_input_operand (&ops[1], reg, TYPE_MODE (type));
892 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
893 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
896 static void
897 expand_ABNORMAL_DISPATCHER (gimple)
901 static void
902 expand_BUILTIN_EXPECT (gimple stmt)
904 /* When guessing was done, the hints should be already stripped away. */
905 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
907 rtx target;
908 tree lhs = gimple_call_lhs (stmt);
909 if (lhs)
910 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
911 else
912 target = const0_rtx;
913 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
914 if (lhs && val != target)
915 emit_move_insn (target, val);
918 /* Routines to expand each internal function, indexed by function number.
919 Each routine has the prototype:
921 expand_<NAME> (gimple stmt)
923 where STMT is the statement that performs the call. */
924 static void (*const internal_fn_expanders[]) (gimple) = {
925 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
926 #include "internal-fn.def"
927 #undef DEF_INTERNAL_FN
931 /* Expand STMT, which is a call to internal function FN. */
933 void
934 expand_internal_call (gimple stmt)
936 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);