PR c/61852
[official-gcc.git] / gcc / internal-fn.c
blob78f59d6a7b737cf9fa18e5ed3776214410be61a3
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
58 for load-lanes-style optab OPTAB. The insn must exist. */
60 static enum insn_code
61 get_multi_vector_move (tree array_type, convert_optab optab)
63 enum insn_code icode;
64 enum machine_mode imode;
65 enum machine_mode vmode;
67 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
68 imode = TYPE_MODE (array_type);
69 vmode = TYPE_MODE (TREE_TYPE (array_type));
71 icode = convert_optab_handler (optab, imode, vmode);
72 gcc_assert (icode != CODE_FOR_nothing);
73 return icode;
76 /* Expand LOAD_LANES call STMT. */
78 static void
79 expand_LOAD_LANES (gimple stmt)
81 struct expand_operand ops[2];
82 tree type, lhs, rhs;
83 rtx target, mem;
85 lhs = gimple_call_lhs (stmt);
86 rhs = gimple_call_arg (stmt, 0);
87 type = TREE_TYPE (lhs);
89 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
90 mem = expand_normal (rhs);
92 gcc_assert (MEM_P (mem));
93 PUT_MODE (mem, TYPE_MODE (type));
95 create_output_operand (&ops[0], target, TYPE_MODE (type));
96 create_fixed_operand (&ops[1], mem);
97 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
100 /* Expand STORE_LANES call STMT. */
102 static void
103 expand_STORE_LANES (gimple stmt)
105 struct expand_operand ops[2];
106 tree type, lhs, rhs;
107 rtx target, reg;
109 lhs = gimple_call_lhs (stmt);
110 rhs = gimple_call_arg (stmt, 0);
111 type = TREE_TYPE (rhs);
113 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
114 reg = expand_normal (rhs);
116 gcc_assert (MEM_P (target));
117 PUT_MODE (target, TYPE_MODE (type));
119 create_fixed_operand (&ops[0], target);
120 create_input_operand (&ops[1], reg, TYPE_MODE (type));
121 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
124 static void
125 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
127 gcc_unreachable ();
130 /* This should get expanded in adjust_simduid_builtins. */
132 static void
133 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
135 gcc_unreachable ();
138 /* This should get expanded in adjust_simduid_builtins. */
140 static void
141 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
143 gcc_unreachable ();
146 /* This should get expanded in adjust_simduid_builtins. */
148 static void
149 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
151 gcc_unreachable ();
154 /* This should get expanded in the sanopt pass. */
156 static void
157 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
159 gcc_unreachable ();
162 /* This should get expanded in the sanopt pass. */
164 static void
165 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
167 gcc_unreachable ();
170 /* Add sub/add overflow checking to the statement STMT.
171 CODE says whether the operation is +, or -. */
173 void
174 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
176 rtx res, op0, op1;
177 tree lhs, fn, arg0, arg1;
178 rtx done_label, do_error, target = NULL_RTX;
180 lhs = gimple_call_lhs (stmt);
181 arg0 = gimple_call_arg (stmt, 0);
182 arg1 = gimple_call_arg (stmt, 1);
183 done_label = gen_label_rtx ();
184 do_error = gen_label_rtx ();
185 do_pending_stack_adjust ();
186 op0 = expand_normal (arg0);
187 op1 = expand_normal (arg1);
189 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
190 if (lhs)
191 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
193 enum insn_code icode
194 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
195 if (icode != CODE_FOR_nothing)
197 struct expand_operand ops[4];
198 rtx last = get_last_insn ();
200 res = gen_reg_rtx (mode);
201 create_output_operand (&ops[0], res, mode);
202 create_input_operand (&ops[1], op0, mode);
203 create_input_operand (&ops[2], op1, mode);
204 create_fixed_operand (&ops[3], do_error);
205 if (maybe_expand_insn (icode, 4, ops))
207 last = get_last_insn ();
208 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
209 && JUMP_P (last)
210 && any_condjump_p (last)
211 && !find_reg_note (last, REG_BR_PROB, 0))
212 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
213 emit_jump (done_label);
215 else
217 delete_insns_since (last);
218 icode = CODE_FOR_nothing;
222 if (icode == CODE_FOR_nothing)
224 rtx sub_check = gen_label_rtx ();
225 int pos_neg = 3;
227 /* Compute the operation. On RTL level, the addition is always
228 unsigned. */
229 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
230 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
232 /* If we can prove one of the arguments (for MINUS_EXPR only
233 the second operand, as subtraction is not commutative) is always
234 non-negative or always negative, we can do just one comparison
235 and conditional jump instead of 2 at runtime, 3 present in the
236 emitted code. If one of the arguments is CONST_INT, all we
237 need is to make sure it is op1, then the first
238 emit_cmp_and_jump_insns will be just folded. Otherwise try
239 to use range info if available. */
240 if (code == PLUS_EXPR && CONST_INT_P (op0))
242 rtx tem = op0;
243 op0 = op1;
244 op1 = tem;
246 else if (CONST_INT_P (op1))
248 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
250 wide_int arg0_min, arg0_max;
251 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
253 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
254 pos_neg = 1;
255 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
256 pos_neg = 2;
258 if (pos_neg != 3)
260 rtx tem = op0;
261 op0 = op1;
262 op1 = tem;
265 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
267 wide_int arg1_min, arg1_max;
268 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
270 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
271 pos_neg = 1;
272 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
273 pos_neg = 2;
277 /* If the op1 is negative, we have to use a different check. */
278 if (pos_neg == 3)
279 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
280 false, sub_check, PROB_EVEN);
282 /* Compare the result of the operation with one of the operands. */
283 if (pos_neg & 1)
284 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
285 NULL_RTX, mode, false, done_label,
286 PROB_VERY_LIKELY);
288 /* If we get here, we have to print the error. */
289 if (pos_neg == 3)
291 emit_jump (do_error);
293 emit_label (sub_check);
296 /* We have k = a + b for b < 0 here. k <= a must hold. */
297 if (pos_neg & 2)
298 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
299 NULL_RTX, mode, false, done_label,
300 PROB_VERY_LIKELY);
303 emit_label (do_error);
304 /* Expand the ubsan builtin call. */
305 push_temp_slots ();
306 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
307 TREE_TYPE (arg0), arg0, arg1);
308 expand_normal (fn);
309 pop_temp_slots ();
310 do_pending_stack_adjust ();
312 /* We're done. */
313 emit_label (done_label);
315 if (lhs)
316 emit_move_insn (target, res);
319 /* Add negate overflow checking to the statement STMT. */
321 void
322 ubsan_expand_si_overflow_neg_check (gimple stmt)
324 rtx res, op1;
325 tree lhs, fn, arg1;
326 rtx done_label, do_error, target = NULL_RTX;
328 lhs = gimple_call_lhs (stmt);
329 arg1 = gimple_call_arg (stmt, 1);
330 done_label = gen_label_rtx ();
331 do_error = gen_label_rtx ();
333 do_pending_stack_adjust ();
334 op1 = expand_normal (arg1);
336 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
337 if (lhs)
338 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
340 enum insn_code icode = optab_handler (negv3_optab, mode);
341 if (icode != CODE_FOR_nothing)
343 struct expand_operand ops[3];
344 rtx last = get_last_insn ();
346 res = gen_reg_rtx (mode);
347 create_output_operand (&ops[0], res, mode);
348 create_input_operand (&ops[1], op1, mode);
349 create_fixed_operand (&ops[2], do_error);
350 if (maybe_expand_insn (icode, 3, ops))
352 last = get_last_insn ();
353 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
354 && JUMP_P (last)
355 && any_condjump_p (last)
356 && !find_reg_note (last, REG_BR_PROB, 0))
357 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
358 emit_jump (done_label);
360 else
362 delete_insns_since (last);
363 icode = CODE_FOR_nothing;
367 if (icode == CODE_FOR_nothing)
369 /* Compute the operation. On RTL level, the addition is always
370 unsigned. */
371 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
373 /* Compare the operand with the most negative value. */
374 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
375 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
376 done_label, PROB_VERY_LIKELY);
379 emit_label (do_error);
380 /* Expand the ubsan builtin call. */
381 push_temp_slots ();
382 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
383 TREE_TYPE (arg1), arg1, NULL_TREE);
384 expand_normal (fn);
385 pop_temp_slots ();
386 do_pending_stack_adjust ();
388 /* We're done. */
389 emit_label (done_label);
391 if (lhs)
392 emit_move_insn (target, res);
395 /* Add mul overflow checking to the statement STMT. */
397 void
398 ubsan_expand_si_overflow_mul_check (gimple stmt)
400 rtx res, op0, op1;
401 tree lhs, fn, arg0, arg1;
402 rtx done_label, do_error, target = NULL_RTX;
404 lhs = gimple_call_lhs (stmt);
405 arg0 = gimple_call_arg (stmt, 0);
406 arg1 = gimple_call_arg (stmt, 1);
407 done_label = gen_label_rtx ();
408 do_error = gen_label_rtx ();
410 do_pending_stack_adjust ();
411 op0 = expand_normal (arg0);
412 op1 = expand_normal (arg1);
414 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
415 if (lhs)
416 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
418 enum insn_code icode = optab_handler (mulv4_optab, mode);
419 if (icode != CODE_FOR_nothing)
421 struct expand_operand ops[4];
422 rtx last = get_last_insn ();
424 res = gen_reg_rtx (mode);
425 create_output_operand (&ops[0], res, mode);
426 create_input_operand (&ops[1], op0, mode);
427 create_input_operand (&ops[2], op1, mode);
428 create_fixed_operand (&ops[3], do_error);
429 if (maybe_expand_insn (icode, 4, ops))
431 last = get_last_insn ();
432 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
433 && JUMP_P (last)
434 && any_condjump_p (last)
435 && !find_reg_note (last, REG_BR_PROB, 0))
436 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
437 emit_jump (done_label);
439 else
441 delete_insns_since (last);
442 icode = CODE_FOR_nothing;
446 if (icode == CODE_FOR_nothing)
448 struct separate_ops ops;
449 enum machine_mode hmode
450 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
451 ops.op0 = arg0;
452 ops.op1 = arg1;
453 ops.op2 = NULL_TREE;
454 ops.location = gimple_location (stmt);
455 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
456 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
458 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
459 ops.code = WIDEN_MULT_EXPR;
460 ops.type
461 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
463 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
464 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
465 GET_MODE_PRECISION (mode), NULL_RTX, 0);
466 hipart = gen_lowpart (mode, hipart);
467 res = gen_lowpart (mode, res);
468 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
469 GET_MODE_PRECISION (mode) - 1,
470 NULL_RTX, 0);
471 /* RES is low half of the double width result, HIPART
472 the high half. There was overflow if
473 HIPART is different from RES < 0 ? -1 : 0. */
474 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
475 false, done_label, PROB_VERY_LIKELY);
477 else if (hmode != BLKmode
478 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
480 rtx large_op0 = gen_label_rtx ();
481 rtx small_op0_large_op1 = gen_label_rtx ();
482 rtx one_small_one_large = gen_label_rtx ();
483 rtx both_ops_large = gen_label_rtx ();
484 rtx after_hipart_neg = gen_label_rtx ();
485 rtx after_lopart_neg = gen_label_rtx ();
486 rtx do_overflow = gen_label_rtx ();
487 rtx hipart_different = gen_label_rtx ();
489 unsigned int hprec = GET_MODE_PRECISION (hmode);
490 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
491 NULL_RTX, 0);
492 hipart0 = gen_lowpart (hmode, hipart0);
493 rtx lopart0 = gen_lowpart (hmode, op0);
494 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
495 NULL_RTX, 0);
496 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
497 NULL_RTX, 0);
498 hipart1 = gen_lowpart (hmode, hipart1);
499 rtx lopart1 = gen_lowpart (hmode, op1);
500 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
501 NULL_RTX, 0);
503 res = gen_reg_rtx (mode);
505 /* True if op0 resp. op1 are known to be in the range of
506 halfstype. */
507 bool op0_small_p = false;
508 bool op1_small_p = false;
509 /* True if op0 resp. op1 are known to have all zeros or all ones
510 in the upper half of bits, but are not known to be
511 op{0,1}_small_p. */
512 bool op0_medium_p = false;
513 bool op1_medium_p = false;
514 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
515 nonnegative, 1 if unknown. */
516 int op0_sign = 1;
517 int op1_sign = 1;
519 if (TREE_CODE (arg0) == SSA_NAME)
521 wide_int arg0_min, arg0_max;
522 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
524 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
525 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
526 if (mprec0 <= hprec && mprec1 <= hprec)
527 op0_small_p = true;
528 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
529 op0_medium_p = true;
530 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
531 op0_sign = 0;
532 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
533 op0_sign = -1;
536 if (TREE_CODE (arg1) == SSA_NAME)
538 wide_int arg1_min, arg1_max;
539 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
541 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
542 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
543 if (mprec0 <= hprec && mprec1 <= hprec)
544 op1_small_p = true;
545 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
546 op1_medium_p = true;
547 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
548 op1_sign = 0;
549 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
550 op1_sign = -1;
554 int smaller_sign = 1;
555 int larger_sign = 1;
556 if (op0_small_p)
558 smaller_sign = op0_sign;
559 larger_sign = op1_sign;
561 else if (op1_small_p)
563 smaller_sign = op1_sign;
564 larger_sign = op0_sign;
566 else if (op0_sign == op1_sign)
568 smaller_sign = op0_sign;
569 larger_sign = op0_sign;
572 if (!op0_small_p)
573 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
574 false, large_op0, PROB_UNLIKELY);
576 if (!op1_small_p)
577 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
578 false, small_op0_large_op1,
579 PROB_UNLIKELY);
581 /* If both op0 and op1 are sign extended from hmode to mode,
582 the multiplication will never overflow. We can do just one
583 hmode x hmode => mode widening multiplication. */
584 if (GET_CODE (lopart0) == SUBREG)
586 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
587 SUBREG_PROMOTED_UNSIGNED_SET (lopart0, 0);
589 if (GET_CODE (lopart1) == SUBREG)
591 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
592 SUBREG_PROMOTED_UNSIGNED_SET (lopart1, 0);
594 tree halfstype = build_nonstandard_integer_type (hprec, 0);
595 ops.op0 = make_tree (halfstype, lopart0);
596 ops.op1 = make_tree (halfstype, lopart1);
597 ops.code = WIDEN_MULT_EXPR;
598 ops.type = TREE_TYPE (arg0);
599 rtx thisres
600 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
601 emit_move_insn (res, thisres);
602 emit_jump (done_label);
604 emit_label (small_op0_large_op1);
606 /* If op0 is sign extended from hmode to mode, but op1 is not,
607 just swap the arguments and handle it as op1 sign extended,
608 op0 not. */
609 rtx larger = gen_reg_rtx (mode);
610 rtx hipart = gen_reg_rtx (hmode);
611 rtx lopart = gen_reg_rtx (hmode);
612 emit_move_insn (larger, op1);
613 emit_move_insn (hipart, hipart1);
614 emit_move_insn (lopart, lopart0);
615 emit_jump (one_small_one_large);
617 emit_label (large_op0);
619 if (!op1_small_p)
620 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
621 false, both_ops_large, PROB_UNLIKELY);
623 /* If op1 is sign extended from hmode to mode, but op0 is not,
624 prepare larger, hipart and lopart pseudos and handle it together
625 with small_op0_large_op1. */
626 emit_move_insn (larger, op0);
627 emit_move_insn (hipart, hipart0);
628 emit_move_insn (lopart, lopart1);
630 emit_label (one_small_one_large);
632 /* lopart is the low part of the operand that is sign extended
633 to mode, larger is the the other operand, hipart is the
634 high part of larger and lopart0 and lopart1 are the low parts
635 of both operands.
636 We perform lopart0 * lopart1 and lopart * hipart widening
637 multiplications. */
638 tree halfutype = build_nonstandard_integer_type (hprec, 1);
639 ops.op0 = make_tree (halfutype, lopart0);
640 ops.op1 = make_tree (halfutype, lopart1);
641 rtx lo0xlo1
642 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
644 ops.op0 = make_tree (halfutype, lopart);
645 ops.op1 = make_tree (halfutype, hipart);
646 rtx loxhi = gen_reg_rtx (mode);
647 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
648 emit_move_insn (loxhi, tem);
650 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
651 if (larger_sign == 0)
652 emit_jump (after_hipart_neg);
653 else if (larger_sign != -1)
654 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
655 false, after_hipart_neg, PROB_EVEN);
657 tem = convert_modes (mode, hmode, lopart, 1);
658 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
659 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
660 1, OPTAB_DIRECT);
661 emit_move_insn (loxhi, tem);
663 emit_label (after_hipart_neg);
665 /* if (lopart < 0) loxhi -= larger; */
666 if (smaller_sign == 0)
667 emit_jump (after_lopart_neg);
668 else if (smaller_sign != -1)
669 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
670 false, after_lopart_neg, PROB_EVEN);
672 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
673 1, OPTAB_DIRECT);
674 emit_move_insn (loxhi, tem);
676 emit_label (after_lopart_neg);
678 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
679 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
680 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
681 1, OPTAB_DIRECT);
682 emit_move_insn (loxhi, tem);
684 /* if (loxhi >> (bitsize / 2)
685 == (hmode) loxhi >> (bitsize / 2 - 1)) */
686 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
687 NULL_RTX, 0);
688 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
689 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
690 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
691 hprec - 1, NULL_RTX, 0);
693 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
694 hmode, false, do_overflow,
695 PROB_VERY_UNLIKELY);
697 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
698 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
699 NULL_RTX, 1);
700 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
702 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
703 1, OPTAB_DIRECT);
704 if (tem != res)
705 emit_move_insn (res, tem);
706 emit_jump (done_label);
708 emit_label (both_ops_large);
710 /* If both operands are large (not sign extended from hmode),
711 then perform the full multiplication which will be the result
712 of the operation. The only cases which don't overflow are
713 some cases where both hipart0 and highpart1 are 0 or -1. */
714 ops.code = MULT_EXPR;
715 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
716 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
717 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
718 emit_move_insn (res, tem);
720 if (!op0_medium_p)
722 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
723 NULL_RTX, 1, OPTAB_DIRECT);
724 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
725 true, do_error, PROB_VERY_UNLIKELY);
728 if (!op1_medium_p)
730 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
731 NULL_RTX, 1, OPTAB_DIRECT);
732 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
733 true, do_error, PROB_VERY_UNLIKELY);
736 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
737 same, overflow happened if res is negative, if they are different,
738 overflow happened if res is positive. */
739 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
740 emit_jump (hipart_different);
741 else if (op0_sign == 1 || op1_sign == 1)
742 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
743 true, hipart_different, PROB_EVEN);
745 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
746 do_error, PROB_VERY_UNLIKELY);
747 emit_jump (done_label);
749 emit_label (hipart_different);
751 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
752 do_error, PROB_VERY_UNLIKELY);
753 emit_jump (done_label);
755 emit_label (do_overflow);
757 /* Overflow, do full multiplication and fallthru into do_error. */
758 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
759 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
760 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
761 emit_move_insn (res, tem);
763 else
765 ops.code = MULT_EXPR;
766 ops.type = TREE_TYPE (arg0);
767 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
768 emit_jump (done_label);
772 emit_label (do_error);
773 /* Expand the ubsan builtin call. */
774 push_temp_slots ();
775 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
776 TREE_TYPE (arg0), arg0, arg1);
777 expand_normal (fn);
778 pop_temp_slots ();
779 do_pending_stack_adjust ();
781 /* We're done. */
782 emit_label (done_label);
784 if (lhs)
785 emit_move_insn (target, res);
788 /* Expand UBSAN_CHECK_ADD call STMT. */
790 static void
791 expand_UBSAN_CHECK_ADD (gimple stmt)
793 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
796 /* Expand UBSAN_CHECK_SUB call STMT. */
798 static void
799 expand_UBSAN_CHECK_SUB (gimple stmt)
801 if (integer_zerop (gimple_call_arg (stmt, 0)))
802 ubsan_expand_si_overflow_neg_check (stmt);
803 else
804 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
807 /* Expand UBSAN_CHECK_MUL call STMT. */
809 static void
810 expand_UBSAN_CHECK_MUL (gimple stmt)
812 ubsan_expand_si_overflow_mul_check (stmt);
815 /* This should get folded in tree-vectorizer.c. */
817 static void
818 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
820 gcc_unreachable ();
823 static void
824 expand_MASK_LOAD (gimple stmt)
826 struct expand_operand ops[3];
827 tree type, lhs, rhs, maskt;
828 rtx mem, target, mask;
830 maskt = gimple_call_arg (stmt, 2);
831 lhs = gimple_call_lhs (stmt);
832 if (lhs == NULL_TREE)
833 return;
834 type = TREE_TYPE (lhs);
835 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
836 gimple_call_arg (stmt, 1));
838 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
839 gcc_assert (MEM_P (mem));
840 mask = expand_normal (maskt);
841 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
842 create_output_operand (&ops[0], target, TYPE_MODE (type));
843 create_fixed_operand (&ops[1], mem);
844 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
845 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
848 static void
849 expand_MASK_STORE (gimple stmt)
851 struct expand_operand ops[3];
852 tree type, lhs, rhs, maskt;
853 rtx mem, reg, mask;
855 maskt = gimple_call_arg (stmt, 2);
856 rhs = gimple_call_arg (stmt, 3);
857 type = TREE_TYPE (rhs);
858 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
859 gimple_call_arg (stmt, 1));
861 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
862 gcc_assert (MEM_P (mem));
863 mask = expand_normal (maskt);
864 reg = expand_normal (rhs);
865 create_fixed_operand (&ops[0], mem);
866 create_input_operand (&ops[1], reg, TYPE_MODE (type));
867 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
868 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
871 static void
872 expand_ABNORMAL_DISPATCHER (gimple)
876 static void
877 expand_BUILTIN_EXPECT (gimple stmt)
879 /* When guessing was done, the hints should be already stripped away. */
880 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
882 rtx target;
883 tree lhs = gimple_call_lhs (stmt);
884 if (lhs)
885 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
886 else
887 target = const0_rtx;
888 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
889 if (lhs && val != target)
890 emit_move_insn (target, val);
893 /* Routines to expand each internal function, indexed by function number.
894 Each routine has the prototype:
896 expand_<NAME> (gimple stmt)
898 where STMT is the statement that performs the call. */
899 static void (*const internal_fn_expanders[]) (gimple) = {
900 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
901 #include "internal-fn.def"
902 #undef DEF_INTERNAL_FN
906 /* Expand STMT, which is a call to internal function FN. */
908 void
909 expand_internal_call (gimple stmt)
911 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);