PR target/60039
[official-gcc.git] / gcc / internal-fn.c
blob9926ec2808be9134cd2f8d1cb45d57225deca734
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "internal-fn.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
58 for load-lanes-style optab OPTAB. The insn must exist. */
60 static enum insn_code
61 get_multi_vector_move (tree array_type, convert_optab optab)
63 enum insn_code icode;
64 enum machine_mode imode;
65 enum machine_mode vmode;
67 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
68 imode = TYPE_MODE (array_type);
69 vmode = TYPE_MODE (TREE_TYPE (array_type));
71 icode = convert_optab_handler (optab, imode, vmode);
72 gcc_assert (icode != CODE_FOR_nothing);
73 return icode;
76 /* Expand LOAD_LANES call STMT. */
78 static void
79 expand_LOAD_LANES (gimple stmt)
81 struct expand_operand ops[2];
82 tree type, lhs, rhs;
83 rtx target, mem;
85 lhs = gimple_call_lhs (stmt);
86 rhs = gimple_call_arg (stmt, 0);
87 type = TREE_TYPE (lhs);
89 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
90 mem = expand_normal (rhs);
92 gcc_assert (MEM_P (mem));
93 PUT_MODE (mem, TYPE_MODE (type));
95 create_output_operand (&ops[0], target, TYPE_MODE (type));
96 create_fixed_operand (&ops[1], mem);
97 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
100 /* Expand STORE_LANES call STMT. */
102 static void
103 expand_STORE_LANES (gimple stmt)
105 struct expand_operand ops[2];
106 tree type, lhs, rhs;
107 rtx target, reg;
109 lhs = gimple_call_lhs (stmt);
110 rhs = gimple_call_arg (stmt, 0);
111 type = TREE_TYPE (rhs);
113 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
114 reg = expand_normal (rhs);
116 gcc_assert (MEM_P (target));
117 PUT_MODE (target, TYPE_MODE (type));
119 create_fixed_operand (&ops[0], target);
120 create_input_operand (&ops[1], reg, TYPE_MODE (type));
121 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
124 static void
125 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
127 gcc_unreachable ();
130 /* This should get expanded in adjust_simduid_builtins. */
132 static void
133 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
135 gcc_unreachable ();
138 /* This should get expanded in adjust_simduid_builtins. */
140 static void
141 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
143 gcc_unreachable ();
146 /* This should get expanded in adjust_simduid_builtins. */
148 static void
149 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
151 gcc_unreachable ();
154 /* This should get expanded in the sanopt pass. */
156 static void
157 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
159 gcc_unreachable ();
162 /* Add sub/add overflow checking to the statement STMT.
163 CODE says whether the operation is +, or -. */
165 void
166 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
168 rtx res, op0, op1;
169 tree lhs, fn, arg0, arg1;
170 rtx done_label, do_error, target = NULL_RTX;
172 lhs = gimple_call_lhs (stmt);
173 arg0 = gimple_call_arg (stmt, 0);
174 arg1 = gimple_call_arg (stmt, 1);
175 done_label = gen_label_rtx ();
176 do_error = gen_label_rtx ();
177 do_pending_stack_adjust ();
178 op0 = expand_normal (arg0);
179 op1 = expand_normal (arg1);
181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
182 if (lhs)
183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
185 enum insn_code icode
186 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
187 if (icode != CODE_FOR_nothing)
189 struct expand_operand ops[4];
190 rtx last = get_last_insn ();
192 res = gen_reg_rtx (mode);
193 create_output_operand (&ops[0], res, mode);
194 create_input_operand (&ops[1], op0, mode);
195 create_input_operand (&ops[2], op1, mode);
196 create_fixed_operand (&ops[3], do_error);
197 if (maybe_expand_insn (icode, 4, ops))
199 last = get_last_insn ();
200 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
201 && JUMP_P (last)
202 && any_condjump_p (last)
203 && !find_reg_note (last, REG_BR_PROB, 0))
204 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
205 emit_jump (done_label);
207 else
209 delete_insns_since (last);
210 icode = CODE_FOR_nothing;
214 if (icode == CODE_FOR_nothing)
216 rtx sub_check = gen_label_rtx ();
217 int pos_neg = 3;
219 /* Compute the operation. On RTL level, the addition is always
220 unsigned. */
221 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
222 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
224 /* If we can prove one of the arguments is always non-negative
225 or always negative, we can do just one comparison and
226 conditional jump instead of 2 at runtime, 3 present in the
227 emitted code. If one of the arguments is CONST_INT, all we
228 need is to make sure it is op1, then the first
229 emit_cmp_and_jump_insns will be just folded. Otherwise try
230 to use range info if available. */
231 if (CONST_INT_P (op0))
233 rtx tem = op0;
234 op0 = op1;
235 op1 = tem;
237 else if (CONST_INT_P (op1))
239 else if (TREE_CODE (arg0) == SSA_NAME)
241 double_int arg0_min, arg0_max;
242 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
244 if (!arg0_min.is_negative ())
245 pos_neg = 1;
246 else if (arg0_max.is_negative ())
247 pos_neg = 2;
249 if (pos_neg != 3)
251 rtx tem = op0;
252 op0 = op1;
253 op1 = tem;
256 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
258 double_int arg1_min, arg1_max;
259 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
261 if (!arg1_min.is_negative ())
262 pos_neg = 1;
263 else if (arg1_max.is_negative ())
264 pos_neg = 2;
268 /* If the op1 is negative, we have to use a different check. */
269 if (pos_neg == 3)
270 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
271 false, sub_check, PROB_EVEN);
273 /* Compare the result of the operation with one of the operands. */
274 if (pos_neg & 1)
275 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
276 NULL_RTX, mode, false, done_label,
277 PROB_VERY_LIKELY);
279 /* If we get here, we have to print the error. */
280 if (pos_neg == 3)
282 emit_jump (do_error);
284 emit_label (sub_check);
287 /* We have k = a + b for b < 0 here. k <= a must hold. */
288 if (pos_neg & 2)
289 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
290 NULL_RTX, mode, false, done_label,
291 PROB_VERY_LIKELY);
294 emit_label (do_error);
295 /* Expand the ubsan builtin call. */
296 push_temp_slots ();
297 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
298 TREE_TYPE (arg0), arg0, arg1);
299 expand_normal (fn);
300 pop_temp_slots ();
301 do_pending_stack_adjust ();
303 /* We're done. */
304 emit_label (done_label);
306 if (lhs)
307 emit_move_insn (target, res);
310 /* Add negate overflow checking to the statement STMT. */
312 void
313 ubsan_expand_si_overflow_neg_check (gimple stmt)
315 rtx res, op1;
316 tree lhs, fn, arg1;
317 rtx done_label, do_error, target = NULL_RTX;
319 lhs = gimple_call_lhs (stmt);
320 arg1 = gimple_call_arg (stmt, 1);
321 done_label = gen_label_rtx ();
322 do_error = gen_label_rtx ();
324 do_pending_stack_adjust ();
325 op1 = expand_normal (arg1);
327 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
328 if (lhs)
329 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
331 enum insn_code icode = optab_handler (negv3_optab, mode);
332 if (icode != CODE_FOR_nothing)
334 struct expand_operand ops[3];
335 rtx last = get_last_insn ();
337 res = gen_reg_rtx (mode);
338 create_output_operand (&ops[0], res, mode);
339 create_input_operand (&ops[1], op1, mode);
340 create_fixed_operand (&ops[2], do_error);
341 if (maybe_expand_insn (icode, 3, ops))
343 last = get_last_insn ();
344 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
345 && JUMP_P (last)
346 && any_condjump_p (last)
347 && !find_reg_note (last, REG_BR_PROB, 0))
348 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
349 emit_jump (done_label);
351 else
353 delete_insns_since (last);
354 icode = CODE_FOR_nothing;
358 if (icode == CODE_FOR_nothing)
360 /* Compute the operation. On RTL level, the addition is always
361 unsigned. */
362 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
364 /* Compare the operand with the most negative value. */
365 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
366 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
367 done_label, PROB_VERY_LIKELY);
370 emit_label (do_error);
371 /* Expand the ubsan builtin call. */
372 push_temp_slots ();
373 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
374 TREE_TYPE (arg1), arg1, NULL_TREE);
375 expand_normal (fn);
376 pop_temp_slots ();
377 do_pending_stack_adjust ();
379 /* We're done. */
380 emit_label (done_label);
382 if (lhs)
383 emit_move_insn (target, res);
386 /* Add mul overflow checking to the statement STMT. */
388 void
389 ubsan_expand_si_overflow_mul_check (gimple stmt)
391 rtx res, op0, op1;
392 tree lhs, fn, arg0, arg1;
393 rtx done_label, do_error, target = NULL_RTX;
395 lhs = gimple_call_lhs (stmt);
396 arg0 = gimple_call_arg (stmt, 0);
397 arg1 = gimple_call_arg (stmt, 1);
398 done_label = gen_label_rtx ();
399 do_error = gen_label_rtx ();
401 do_pending_stack_adjust ();
402 op0 = expand_normal (arg0);
403 op1 = expand_normal (arg1);
405 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
406 if (lhs)
407 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
409 enum insn_code icode = optab_handler (mulv4_optab, mode);
410 if (icode != CODE_FOR_nothing)
412 struct expand_operand ops[4];
413 rtx last = get_last_insn ();
415 res = gen_reg_rtx (mode);
416 create_output_operand (&ops[0], res, mode);
417 create_input_operand (&ops[1], op0, mode);
418 create_input_operand (&ops[2], op1, mode);
419 create_fixed_operand (&ops[3], do_error);
420 if (maybe_expand_insn (icode, 4, ops))
422 last = get_last_insn ();
423 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
424 && JUMP_P (last)
425 && any_condjump_p (last)
426 && !find_reg_note (last, REG_BR_PROB, 0))
427 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
428 emit_jump (done_label);
430 else
432 delete_insns_since (last);
433 icode = CODE_FOR_nothing;
437 if (icode == CODE_FOR_nothing)
439 struct separate_ops ops;
440 enum machine_mode hmode
441 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
442 ops.op0 = arg0;
443 ops.op1 = arg1;
444 ops.op2 = NULL_TREE;
445 ops.location = gimple_location (stmt);
446 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
447 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
449 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
450 ops.code = WIDEN_MULT_EXPR;
451 ops.type
452 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
454 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
455 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
456 GET_MODE_PRECISION (mode), NULL_RTX, 0);
457 hipart = gen_lowpart (mode, hipart);
458 res = gen_lowpart (mode, res);
459 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
460 GET_MODE_PRECISION (mode) - 1,
461 NULL_RTX, 0);
462 /* RES is low half of the double width result, HIPART
463 the high half. There was overflow if
464 HIPART is different from RES < 0 ? -1 : 0. */
465 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
466 false, done_label, PROB_VERY_LIKELY);
468 else if (hmode != BLKmode
469 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
471 rtx large_op0 = gen_label_rtx ();
472 rtx small_op0_large_op1 = gen_label_rtx ();
473 rtx one_small_one_large = gen_label_rtx ();
474 rtx both_ops_large = gen_label_rtx ();
475 rtx after_hipart_neg = gen_label_rtx ();
476 rtx after_lopart_neg = gen_label_rtx ();
477 rtx do_overflow = gen_label_rtx ();
478 rtx hipart_different = gen_label_rtx ();
480 int hprec = GET_MODE_PRECISION (hmode);
481 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
482 NULL_RTX, 0);
483 hipart0 = gen_lowpart (hmode, hipart0);
484 rtx lopart0 = gen_lowpart (hmode, op0);
485 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
486 NULL_RTX, 0);
487 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
488 NULL_RTX, 0);
489 hipart1 = gen_lowpart (hmode, hipart1);
490 rtx lopart1 = gen_lowpart (hmode, op1);
491 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
492 NULL_RTX, 0);
494 res = gen_reg_rtx (mode);
496 /* True if op0 resp. op1 are known to be in the range of
497 halfstype. */
498 bool op0_small_p = false;
499 bool op1_small_p = false;
500 /* True if op0 resp. op1 are known to have all zeros or all ones
501 in the upper half of bits, but are not known to be
502 op{0,1}_small_p. */
503 bool op0_medium_p = false;
504 bool op1_medium_p = false;
505 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
506 nonnegative, 1 if unknown. */
507 int op0_sign = 1;
508 int op1_sign = 1;
510 if (TREE_CODE (arg0) == SSA_NAME)
512 double_int arg0_min, arg0_max;
513 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
515 if (arg0_max.sle (double_int::max_value (hprec, false))
516 && double_int::min_value (hprec, false).sle (arg0_min))
517 op0_small_p = true;
518 else if (arg0_max.sle (double_int::max_value (hprec, true))
519 && (~double_int::max_value (hprec,
520 true)).sle (arg0_min))
521 op0_medium_p = true;
522 if (!arg0_min.is_negative ())
523 op0_sign = 0;
524 else if (arg0_max.is_negative ())
525 op0_sign = -1;
528 if (TREE_CODE (arg1) == SSA_NAME)
530 double_int arg1_min, arg1_max;
531 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
533 if (arg1_max.sle (double_int::max_value (hprec, false))
534 && double_int::min_value (hprec, false).sle (arg1_min))
535 op1_small_p = true;
536 else if (arg1_max.sle (double_int::max_value (hprec, true))
537 && (~double_int::max_value (hprec,
538 true)).sle (arg1_min))
539 op1_medium_p = true;
540 if (!arg1_min.is_negative ())
541 op1_sign = 0;
542 else if (arg1_max.is_negative ())
543 op1_sign = -1;
547 int smaller_sign = 1;
548 int larger_sign = 1;
549 if (op0_small_p)
551 smaller_sign = op0_sign;
552 larger_sign = op1_sign;
554 else if (op1_small_p)
556 smaller_sign = op1_sign;
557 larger_sign = op0_sign;
559 else if (op0_sign == op1_sign)
561 smaller_sign = op0_sign;
562 larger_sign = op0_sign;
565 if (!op0_small_p)
566 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
567 false, large_op0, PROB_UNLIKELY);
569 if (!op1_small_p)
570 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
571 false, small_op0_large_op1,
572 PROB_UNLIKELY);
574 /* If both op0 and op1 are sign extended from hmode to mode,
575 the multiplication will never overflow. We can do just one
576 hmode x hmode => mode widening multiplication. */
577 if (GET_CODE (lopart0) == SUBREG)
579 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
580 SUBREG_PROMOTED_UNSIGNED_SET (lopart0, 0);
582 if (GET_CODE (lopart1) == SUBREG)
584 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
585 SUBREG_PROMOTED_UNSIGNED_SET (lopart1, 0);
587 tree halfstype = build_nonstandard_integer_type (hprec, 0);
588 ops.op0 = make_tree (halfstype, lopart0);
589 ops.op1 = make_tree (halfstype, lopart1);
590 ops.code = WIDEN_MULT_EXPR;
591 ops.type = TREE_TYPE (arg0);
592 rtx thisres
593 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
594 emit_move_insn (res, thisres);
595 emit_jump (done_label);
597 emit_label (small_op0_large_op1);
599 /* If op0 is sign extended from hmode to mode, but op1 is not,
600 just swap the arguments and handle it as op1 sign extended,
601 op0 not. */
602 rtx larger = gen_reg_rtx (mode);
603 rtx hipart = gen_reg_rtx (hmode);
604 rtx lopart = gen_reg_rtx (hmode);
605 emit_move_insn (larger, op1);
606 emit_move_insn (hipart, hipart1);
607 emit_move_insn (lopart, lopart0);
608 emit_jump (one_small_one_large);
610 emit_label (large_op0);
612 if (!op1_small_p)
613 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
614 false, both_ops_large, PROB_UNLIKELY);
616 /* If op1 is sign extended from hmode to mode, but op0 is not,
617 prepare larger, hipart and lopart pseudos and handle it together
618 with small_op0_large_op1. */
619 emit_move_insn (larger, op0);
620 emit_move_insn (hipart, hipart0);
621 emit_move_insn (lopart, lopart1);
623 emit_label (one_small_one_large);
625 /* lopart is the low part of the operand that is sign extended
626 to mode, larger is the the other operand, hipart is the
627 high part of larger and lopart0 and lopart1 are the low parts
628 of both operands.
629 We perform lopart0 * lopart1 and lopart * hipart widening
630 multiplications. */
631 tree halfutype = build_nonstandard_integer_type (hprec, 1);
632 ops.op0 = make_tree (halfutype, lopart0);
633 ops.op1 = make_tree (halfutype, lopart1);
634 rtx lo0xlo1
635 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
637 ops.op0 = make_tree (halfutype, lopart);
638 ops.op1 = make_tree (halfutype, hipart);
639 rtx loxhi = gen_reg_rtx (mode);
640 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
641 emit_move_insn (loxhi, tem);
643 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
644 if (larger_sign == 0)
645 emit_jump (after_hipart_neg);
646 else if (larger_sign != -1)
647 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
648 false, after_hipart_neg, PROB_EVEN);
650 tem = convert_modes (mode, hmode, lopart, 1);
651 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
652 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
653 1, OPTAB_DIRECT);
654 emit_move_insn (loxhi, tem);
656 emit_label (after_hipart_neg);
658 /* if (lopart < 0) loxhi -= larger; */
659 if (smaller_sign == 0)
660 emit_jump (after_lopart_neg);
661 else if (smaller_sign != -1)
662 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
663 false, after_lopart_neg, PROB_EVEN);
665 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
666 1, OPTAB_DIRECT);
667 emit_move_insn (loxhi, tem);
669 emit_label (after_lopart_neg);
671 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
672 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
673 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
674 1, OPTAB_DIRECT);
675 emit_move_insn (loxhi, tem);
677 /* if (loxhi >> (bitsize / 2)
678 == (hmode) loxhi >> (bitsize / 2 - 1)) */
679 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
680 NULL_RTX, 0);
681 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
682 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
683 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
684 hprec - 1, NULL_RTX, 0);
686 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
687 hmode, false, do_overflow,
688 PROB_VERY_UNLIKELY);
690 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
691 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
692 NULL_RTX, 1);
693 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
695 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
696 1, OPTAB_DIRECT);
697 if (tem != res)
698 emit_move_insn (res, tem);
699 emit_jump (done_label);
701 emit_label (both_ops_large);
703 /* If both operands are large (not sign extended from hmode),
704 then perform the full multiplication which will be the result
705 of the operation. The only cases which don't overflow are
706 some cases where both hipart0 and highpart1 are 0 or -1. */
707 ops.code = MULT_EXPR;
708 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
709 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
710 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
711 emit_move_insn (res, tem);
713 if (!op0_medium_p)
715 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
716 NULL_RTX, 1, OPTAB_DIRECT);
717 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
718 true, do_error, PROB_VERY_UNLIKELY);
721 if (!op1_medium_p)
723 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
724 NULL_RTX, 1, OPTAB_DIRECT);
725 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
726 true, do_error, PROB_VERY_UNLIKELY);
729 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
730 same, overflow happened if res is negative, if they are different,
731 overflow happened if res is positive. */
732 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
733 emit_jump (hipart_different);
734 else if (op0_sign == 1 || op1_sign == 1)
735 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
736 true, hipart_different, PROB_EVEN);
738 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
739 do_error, PROB_VERY_UNLIKELY);
740 emit_jump (done_label);
742 emit_label (hipart_different);
744 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
745 do_error, PROB_VERY_UNLIKELY);
746 emit_jump (done_label);
748 emit_label (do_overflow);
750 /* Overflow, do full multiplication and fallthru into do_error. */
751 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
752 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
753 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
754 emit_move_insn (res, tem);
756 else
758 ops.code = MULT_EXPR;
759 ops.type = TREE_TYPE (arg0);
760 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
761 emit_jump (done_label);
765 emit_label (do_error);
766 /* Expand the ubsan builtin call. */
767 push_temp_slots ();
768 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
769 TREE_TYPE (arg0), arg0, arg1);
770 expand_normal (fn);
771 pop_temp_slots ();
772 do_pending_stack_adjust ();
774 /* We're done. */
775 emit_label (done_label);
777 if (lhs)
778 emit_move_insn (target, res);
781 /* Expand UBSAN_CHECK_ADD call STMT. */
783 static void
784 expand_UBSAN_CHECK_ADD (gimple stmt)
786 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
789 /* Expand UBSAN_CHECK_SUB call STMT. */
791 static void
792 expand_UBSAN_CHECK_SUB (gimple stmt)
794 if (integer_zerop (gimple_call_arg (stmt, 0)))
795 ubsan_expand_si_overflow_neg_check (stmt);
796 else
797 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
800 /* Expand UBSAN_CHECK_MUL call STMT. */
802 static void
803 expand_UBSAN_CHECK_MUL (gimple stmt)
805 ubsan_expand_si_overflow_mul_check (stmt);
808 /* This should get folded in tree-vectorizer.c. */
810 static void
811 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
813 gcc_unreachable ();
816 static void
817 expand_MASK_LOAD (gimple stmt)
819 struct expand_operand ops[3];
820 tree type, lhs, rhs, maskt;
821 rtx mem, target, mask;
823 maskt = gimple_call_arg (stmt, 2);
824 lhs = gimple_call_lhs (stmt);
825 if (lhs == NULL_TREE)
826 return;
827 type = TREE_TYPE (lhs);
828 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
829 gimple_call_arg (stmt, 1));
831 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
832 gcc_assert (MEM_P (mem));
833 mask = expand_normal (maskt);
834 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
835 create_output_operand (&ops[0], target, TYPE_MODE (type));
836 create_fixed_operand (&ops[1], mem);
837 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
838 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
841 static void
842 expand_MASK_STORE (gimple stmt)
844 struct expand_operand ops[3];
845 tree type, lhs, rhs, maskt;
846 rtx mem, reg, mask;
848 maskt = gimple_call_arg (stmt, 2);
849 rhs = gimple_call_arg (stmt, 3);
850 type = TREE_TYPE (rhs);
851 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
852 gimple_call_arg (stmt, 1));
854 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
855 gcc_assert (MEM_P (mem));
856 mask = expand_normal (maskt);
857 reg = expand_normal (rhs);
858 create_fixed_operand (&ops[0], mem);
859 create_input_operand (&ops[1], reg, TYPE_MODE (type));
860 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
861 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
864 static void
865 expand_ABNORMAL_DISPATCHER (gimple)
869 static void
870 expand_BUILTIN_EXPECT (gimple stmt)
872 /* When guessing was done, the hints should be already stripped away. */
873 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
875 rtx target;
876 tree lhs = gimple_call_lhs (stmt);
877 if (lhs)
878 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
879 else
880 target = const0_rtx;
881 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
882 if (lhs && val != target)
883 emit_move_insn (target, val);
886 /* Routines to expand each internal function, indexed by function number.
887 Each routine has the prototype:
889 expand_<NAME> (gimple stmt)
891 where STMT is the statement that performs the call. */
892 static void (*const internal_fn_expanders[]) (gimple) = {
893 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
894 #include "internal-fn.def"
895 #undef DEF_INTERNAL_FN
899 /* Expand STMT, which is a call to internal function FN. */
901 void
902 expand_internal_call (gimple stmt)
904 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);