* config/rl78/rl78-expand.md (umulqihi3): Disable for G10.
[official-gcc.git] / gcc / internal-fn.c
blobaf45d27517643b42e5bc12bba16300672921b681
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* Fnspec of each internal function, indexed by function number. */
58 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
60 void
61 init_internal_fns ()
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC), FNSPEC ? FNSPEC : "");
66 #include "internal-fn.def"
67 #undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array[IFN_LAST] = 0;
71 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
74 static enum insn_code
75 get_multi_vector_move (tree array_type, convert_optab optab)
77 enum insn_code icode;
78 enum machine_mode imode;
79 enum machine_mode vmode;
81 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
82 imode = TYPE_MODE (array_type);
83 vmode = TYPE_MODE (TREE_TYPE (array_type));
85 icode = convert_optab_handler (optab, imode, vmode);
86 gcc_assert (icode != CODE_FOR_nothing);
87 return icode;
90 /* Expand LOAD_LANES call STMT. */
92 static void
93 expand_LOAD_LANES (gimple stmt)
95 struct expand_operand ops[2];
96 tree type, lhs, rhs;
97 rtx target, mem;
99 lhs = gimple_call_lhs (stmt);
100 rhs = gimple_call_arg (stmt, 0);
101 type = TREE_TYPE (lhs);
103 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
104 mem = expand_normal (rhs);
106 gcc_assert (MEM_P (mem));
107 PUT_MODE (mem, TYPE_MODE (type));
109 create_output_operand (&ops[0], target, TYPE_MODE (type));
110 create_fixed_operand (&ops[1], mem);
111 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
114 /* Expand STORE_LANES call STMT. */
116 static void
117 expand_STORE_LANES (gimple stmt)
119 struct expand_operand ops[2];
120 tree type, lhs, rhs;
121 rtx target, reg;
123 lhs = gimple_call_lhs (stmt);
124 rhs = gimple_call_arg (stmt, 0);
125 type = TREE_TYPE (rhs);
127 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
128 reg = expand_normal (rhs);
130 gcc_assert (MEM_P (target));
131 PUT_MODE (target, TYPE_MODE (type));
133 create_fixed_operand (&ops[0], target);
134 create_input_operand (&ops[1], reg, TYPE_MODE (type));
135 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
138 static void
139 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
141 gcc_unreachable ();
144 /* This should get expanded in adjust_simduid_builtins. */
146 static void
147 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
149 gcc_unreachable ();
152 /* This should get expanded in adjust_simduid_builtins. */
154 static void
155 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
157 gcc_unreachable ();
160 /* This should get expanded in adjust_simduid_builtins. */
162 static void
163 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
165 gcc_unreachable ();
168 /* This should get expanded in the sanopt pass. */
170 static void
171 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
173 gcc_unreachable ();
176 /* This should get expanded in the sanopt pass. */
178 static void
179 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
181 gcc_unreachable ();
184 /* This should get expanded in the sanopt pass. */
186 static void
187 expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
189 gcc_unreachable ();
192 /* Add sub/add overflow checking to the statement STMT.
193 CODE says whether the operation is +, or -. */
195 void
196 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
198 rtx res, op0, op1;
199 tree lhs, fn, arg0, arg1;
200 rtx done_label, do_error, target = NULL_RTX;
202 lhs = gimple_call_lhs (stmt);
203 arg0 = gimple_call_arg (stmt, 0);
204 arg1 = gimple_call_arg (stmt, 1);
205 done_label = gen_label_rtx ();
206 do_error = gen_label_rtx ();
207 do_pending_stack_adjust ();
208 op0 = expand_normal (arg0);
209 op1 = expand_normal (arg1);
211 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
212 if (lhs)
213 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
215 enum insn_code icode
216 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
217 if (icode != CODE_FOR_nothing)
219 struct expand_operand ops[4];
220 rtx last = get_last_insn ();
222 res = gen_reg_rtx (mode);
223 create_output_operand (&ops[0], res, mode);
224 create_input_operand (&ops[1], op0, mode);
225 create_input_operand (&ops[2], op1, mode);
226 create_fixed_operand (&ops[3], do_error);
227 if (maybe_expand_insn (icode, 4, ops))
229 last = get_last_insn ();
230 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
231 && JUMP_P (last)
232 && any_condjump_p (last)
233 && !find_reg_note (last, REG_BR_PROB, 0))
234 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
235 emit_jump (done_label);
237 else
239 delete_insns_since (last);
240 icode = CODE_FOR_nothing;
244 if (icode == CODE_FOR_nothing)
246 rtx sub_check = gen_label_rtx ();
247 int pos_neg = 3;
249 /* Compute the operation. On RTL level, the addition is always
250 unsigned. */
251 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
252 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
254 /* If we can prove one of the arguments (for MINUS_EXPR only
255 the second operand, as subtraction is not commutative) is always
256 non-negative or always negative, we can do just one comparison
257 and conditional jump instead of 2 at runtime, 3 present in the
258 emitted code. If one of the arguments is CONST_INT, all we
259 need is to make sure it is op1, then the first
260 emit_cmp_and_jump_insns will be just folded. Otherwise try
261 to use range info if available. */
262 if (code == PLUS_EXPR && CONST_INT_P (op0))
264 rtx tem = op0;
265 op0 = op1;
266 op1 = tem;
268 else if (CONST_INT_P (op1))
270 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
272 wide_int arg0_min, arg0_max;
273 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
275 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
276 pos_neg = 1;
277 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
278 pos_neg = 2;
280 if (pos_neg != 3)
282 rtx tem = op0;
283 op0 = op1;
284 op1 = tem;
287 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
289 wide_int arg1_min, arg1_max;
290 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
292 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
293 pos_neg = 1;
294 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
295 pos_neg = 2;
299 /* If the op1 is negative, we have to use a different check. */
300 if (pos_neg == 3)
301 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
302 false, sub_check, PROB_EVEN);
304 /* Compare the result of the operation with one of the operands. */
305 if (pos_neg & 1)
306 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
307 NULL_RTX, mode, false, done_label,
308 PROB_VERY_LIKELY);
310 /* If we get here, we have to print the error. */
311 if (pos_neg == 3)
313 emit_jump (do_error);
315 emit_label (sub_check);
318 /* We have k = a + b for b < 0 here. k <= a must hold. */
319 if (pos_neg & 2)
320 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
321 NULL_RTX, mode, false, done_label,
322 PROB_VERY_LIKELY);
325 emit_label (do_error);
326 /* Expand the ubsan builtin call. */
327 push_temp_slots ();
328 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
329 TREE_TYPE (arg0), arg0, arg1);
330 expand_normal (fn);
331 pop_temp_slots ();
332 do_pending_stack_adjust ();
334 /* We're done. */
335 emit_label (done_label);
337 if (lhs)
338 emit_move_insn (target, res);
341 /* Add negate overflow checking to the statement STMT. */
343 void
344 ubsan_expand_si_overflow_neg_check (gimple stmt)
346 rtx res, op1;
347 tree lhs, fn, arg1;
348 rtx done_label, do_error, target = NULL_RTX;
350 lhs = gimple_call_lhs (stmt);
351 arg1 = gimple_call_arg (stmt, 1);
352 done_label = gen_label_rtx ();
353 do_error = gen_label_rtx ();
355 do_pending_stack_adjust ();
356 op1 = expand_normal (arg1);
358 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
359 if (lhs)
360 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
362 enum insn_code icode = optab_handler (negv3_optab, mode);
363 if (icode != CODE_FOR_nothing)
365 struct expand_operand ops[3];
366 rtx last = get_last_insn ();
368 res = gen_reg_rtx (mode);
369 create_output_operand (&ops[0], res, mode);
370 create_input_operand (&ops[1], op1, mode);
371 create_fixed_operand (&ops[2], do_error);
372 if (maybe_expand_insn (icode, 3, ops))
374 last = get_last_insn ();
375 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
376 && JUMP_P (last)
377 && any_condjump_p (last)
378 && !find_reg_note (last, REG_BR_PROB, 0))
379 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
380 emit_jump (done_label);
382 else
384 delete_insns_since (last);
385 icode = CODE_FOR_nothing;
389 if (icode == CODE_FOR_nothing)
391 /* Compute the operation. On RTL level, the addition is always
392 unsigned. */
393 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
395 /* Compare the operand with the most negative value. */
396 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
397 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
398 done_label, PROB_VERY_LIKELY);
401 emit_label (do_error);
402 /* Expand the ubsan builtin call. */
403 push_temp_slots ();
404 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
405 TREE_TYPE (arg1), arg1, NULL_TREE);
406 expand_normal (fn);
407 pop_temp_slots ();
408 do_pending_stack_adjust ();
410 /* We're done. */
411 emit_label (done_label);
413 if (lhs)
414 emit_move_insn (target, res);
417 /* Add mul overflow checking to the statement STMT. */
419 void
420 ubsan_expand_si_overflow_mul_check (gimple stmt)
422 rtx res, op0, op1;
423 tree lhs, fn, arg0, arg1;
424 rtx done_label, do_error, target = NULL_RTX;
426 lhs = gimple_call_lhs (stmt);
427 arg0 = gimple_call_arg (stmt, 0);
428 arg1 = gimple_call_arg (stmt, 1);
429 done_label = gen_label_rtx ();
430 do_error = gen_label_rtx ();
432 do_pending_stack_adjust ();
433 op0 = expand_normal (arg0);
434 op1 = expand_normal (arg1);
436 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
437 if (lhs)
438 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
440 enum insn_code icode = optab_handler (mulv4_optab, mode);
441 if (icode != CODE_FOR_nothing)
443 struct expand_operand ops[4];
444 rtx last = get_last_insn ();
446 res = gen_reg_rtx (mode);
447 create_output_operand (&ops[0], res, mode);
448 create_input_operand (&ops[1], op0, mode);
449 create_input_operand (&ops[2], op1, mode);
450 create_fixed_operand (&ops[3], do_error);
451 if (maybe_expand_insn (icode, 4, ops))
453 last = get_last_insn ();
454 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
455 && JUMP_P (last)
456 && any_condjump_p (last)
457 && !find_reg_note (last, REG_BR_PROB, 0))
458 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
459 emit_jump (done_label);
461 else
463 delete_insns_since (last);
464 icode = CODE_FOR_nothing;
468 if (icode == CODE_FOR_nothing)
470 struct separate_ops ops;
471 enum machine_mode hmode
472 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
473 ops.op0 = arg0;
474 ops.op1 = arg1;
475 ops.op2 = NULL_TREE;
476 ops.location = gimple_location (stmt);
477 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
478 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
480 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
481 ops.code = WIDEN_MULT_EXPR;
482 ops.type
483 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
485 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
486 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
487 GET_MODE_PRECISION (mode), NULL_RTX, 0);
488 hipart = gen_lowpart (mode, hipart);
489 res = gen_lowpart (mode, res);
490 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
491 GET_MODE_PRECISION (mode) - 1,
492 NULL_RTX, 0);
493 /* RES is low half of the double width result, HIPART
494 the high half. There was overflow if
495 HIPART is different from RES < 0 ? -1 : 0. */
496 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
497 false, done_label, PROB_VERY_LIKELY);
499 else if (hmode != BLKmode
500 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
502 rtx large_op0 = gen_label_rtx ();
503 rtx small_op0_large_op1 = gen_label_rtx ();
504 rtx one_small_one_large = gen_label_rtx ();
505 rtx both_ops_large = gen_label_rtx ();
506 rtx after_hipart_neg = gen_label_rtx ();
507 rtx after_lopart_neg = gen_label_rtx ();
508 rtx do_overflow = gen_label_rtx ();
509 rtx hipart_different = gen_label_rtx ();
511 unsigned int hprec = GET_MODE_PRECISION (hmode);
512 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
513 NULL_RTX, 0);
514 hipart0 = gen_lowpart (hmode, hipart0);
515 rtx lopart0 = gen_lowpart (hmode, op0);
516 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
517 NULL_RTX, 0);
518 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
519 NULL_RTX, 0);
520 hipart1 = gen_lowpart (hmode, hipart1);
521 rtx lopart1 = gen_lowpart (hmode, op1);
522 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
523 NULL_RTX, 0);
525 res = gen_reg_rtx (mode);
527 /* True if op0 resp. op1 are known to be in the range of
528 halfstype. */
529 bool op0_small_p = false;
530 bool op1_small_p = false;
531 /* True if op0 resp. op1 are known to have all zeros or all ones
532 in the upper half of bits, but are not known to be
533 op{0,1}_small_p. */
534 bool op0_medium_p = false;
535 bool op1_medium_p = false;
536 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
537 nonnegative, 1 if unknown. */
538 int op0_sign = 1;
539 int op1_sign = 1;
541 if (TREE_CODE (arg0) == SSA_NAME)
543 wide_int arg0_min, arg0_max;
544 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
546 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
547 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
548 if (mprec0 <= hprec && mprec1 <= hprec)
549 op0_small_p = true;
550 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
551 op0_medium_p = true;
552 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
553 op0_sign = 0;
554 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
555 op0_sign = -1;
558 if (TREE_CODE (arg1) == SSA_NAME)
560 wide_int arg1_min, arg1_max;
561 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
563 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
564 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
565 if (mprec0 <= hprec && mprec1 <= hprec)
566 op1_small_p = true;
567 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
568 op1_medium_p = true;
569 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
570 op1_sign = 0;
571 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
572 op1_sign = -1;
576 int smaller_sign = 1;
577 int larger_sign = 1;
578 if (op0_small_p)
580 smaller_sign = op0_sign;
581 larger_sign = op1_sign;
583 else if (op1_small_p)
585 smaller_sign = op1_sign;
586 larger_sign = op0_sign;
588 else if (op0_sign == op1_sign)
590 smaller_sign = op0_sign;
591 larger_sign = op0_sign;
594 if (!op0_small_p)
595 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
596 false, large_op0, PROB_UNLIKELY);
598 if (!op1_small_p)
599 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
600 false, small_op0_large_op1,
601 PROB_UNLIKELY);
603 /* If both op0 and op1 are sign extended from hmode to mode,
604 the multiplication will never overflow. We can do just one
605 hmode x hmode => mode widening multiplication. */
606 if (GET_CODE (lopart0) == SUBREG)
608 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
609 SUBREG_PROMOTED_SET (lopart0, 0);
611 if (GET_CODE (lopart1) == SUBREG)
613 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
614 SUBREG_PROMOTED_SET (lopart1, 0);
616 tree halfstype = build_nonstandard_integer_type (hprec, 0);
617 ops.op0 = make_tree (halfstype, lopart0);
618 ops.op1 = make_tree (halfstype, lopart1);
619 ops.code = WIDEN_MULT_EXPR;
620 ops.type = TREE_TYPE (arg0);
621 rtx thisres
622 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
623 emit_move_insn (res, thisres);
624 emit_jump (done_label);
626 emit_label (small_op0_large_op1);
628 /* If op0 is sign extended from hmode to mode, but op1 is not,
629 just swap the arguments and handle it as op1 sign extended,
630 op0 not. */
631 rtx larger = gen_reg_rtx (mode);
632 rtx hipart = gen_reg_rtx (hmode);
633 rtx lopart = gen_reg_rtx (hmode);
634 emit_move_insn (larger, op1);
635 emit_move_insn (hipart, hipart1);
636 emit_move_insn (lopart, lopart0);
637 emit_jump (one_small_one_large);
639 emit_label (large_op0);
641 if (!op1_small_p)
642 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
643 false, both_ops_large, PROB_UNLIKELY);
645 /* If op1 is sign extended from hmode to mode, but op0 is not,
646 prepare larger, hipart and lopart pseudos and handle it together
647 with small_op0_large_op1. */
648 emit_move_insn (larger, op0);
649 emit_move_insn (hipart, hipart0);
650 emit_move_insn (lopart, lopart1);
652 emit_label (one_small_one_large);
654 /* lopart is the low part of the operand that is sign extended
655 to mode, larger is the the other operand, hipart is the
656 high part of larger and lopart0 and lopart1 are the low parts
657 of both operands.
658 We perform lopart0 * lopart1 and lopart * hipart widening
659 multiplications. */
660 tree halfutype = build_nonstandard_integer_type (hprec, 1);
661 ops.op0 = make_tree (halfutype, lopart0);
662 ops.op1 = make_tree (halfutype, lopart1);
663 rtx lo0xlo1
664 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
666 ops.op0 = make_tree (halfutype, lopart);
667 ops.op1 = make_tree (halfutype, hipart);
668 rtx loxhi = gen_reg_rtx (mode);
669 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
670 emit_move_insn (loxhi, tem);
672 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
673 if (larger_sign == 0)
674 emit_jump (after_hipart_neg);
675 else if (larger_sign != -1)
676 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
677 false, after_hipart_neg, PROB_EVEN);
679 tem = convert_modes (mode, hmode, lopart, 1);
680 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
681 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
682 1, OPTAB_DIRECT);
683 emit_move_insn (loxhi, tem);
685 emit_label (after_hipart_neg);
687 /* if (lopart < 0) loxhi -= larger; */
688 if (smaller_sign == 0)
689 emit_jump (after_lopart_neg);
690 else if (smaller_sign != -1)
691 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
692 false, after_lopart_neg, PROB_EVEN);
694 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
695 1, OPTAB_DIRECT);
696 emit_move_insn (loxhi, tem);
698 emit_label (after_lopart_neg);
700 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
701 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
702 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
703 1, OPTAB_DIRECT);
704 emit_move_insn (loxhi, tem);
706 /* if (loxhi >> (bitsize / 2)
707 == (hmode) loxhi >> (bitsize / 2 - 1)) */
708 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
709 NULL_RTX, 0);
710 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
711 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
712 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
713 hprec - 1, NULL_RTX, 0);
715 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
716 hmode, false, do_overflow,
717 PROB_VERY_UNLIKELY);
719 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
720 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
721 NULL_RTX, 1);
722 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
724 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
725 1, OPTAB_DIRECT);
726 if (tem != res)
727 emit_move_insn (res, tem);
728 emit_jump (done_label);
730 emit_label (both_ops_large);
732 /* If both operands are large (not sign extended from hmode),
733 then perform the full multiplication which will be the result
734 of the operation. The only cases which don't overflow are
735 some cases where both hipart0 and highpart1 are 0 or -1. */
736 ops.code = MULT_EXPR;
737 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
738 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
739 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
740 emit_move_insn (res, tem);
742 if (!op0_medium_p)
744 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
745 NULL_RTX, 1, OPTAB_DIRECT);
746 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
747 true, do_error, PROB_VERY_UNLIKELY);
750 if (!op1_medium_p)
752 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
753 NULL_RTX, 1, OPTAB_DIRECT);
754 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
755 true, do_error, PROB_VERY_UNLIKELY);
758 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
759 same, overflow happened if res is negative, if they are different,
760 overflow happened if res is positive. */
761 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
762 emit_jump (hipart_different);
763 else if (op0_sign == 1 || op1_sign == 1)
764 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
765 true, hipart_different, PROB_EVEN);
767 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
768 do_error, PROB_VERY_UNLIKELY);
769 emit_jump (done_label);
771 emit_label (hipart_different);
773 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
774 do_error, PROB_VERY_UNLIKELY);
775 emit_jump (done_label);
777 emit_label (do_overflow);
779 /* Overflow, do full multiplication and fallthru into do_error. */
780 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
781 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
782 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
783 emit_move_insn (res, tem);
785 else
787 ops.code = MULT_EXPR;
788 ops.type = TREE_TYPE (arg0);
789 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
790 emit_jump (done_label);
794 emit_label (do_error);
795 /* Expand the ubsan builtin call. */
796 push_temp_slots ();
797 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
798 TREE_TYPE (arg0), arg0, arg1);
799 expand_normal (fn);
800 pop_temp_slots ();
801 do_pending_stack_adjust ();
803 /* We're done. */
804 emit_label (done_label);
806 if (lhs)
807 emit_move_insn (target, res);
810 /* Expand UBSAN_CHECK_ADD call STMT. */
812 static void
813 expand_UBSAN_CHECK_ADD (gimple stmt)
815 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
818 /* Expand UBSAN_CHECK_SUB call STMT. */
820 static void
821 expand_UBSAN_CHECK_SUB (gimple stmt)
823 if (integer_zerop (gimple_call_arg (stmt, 0)))
824 ubsan_expand_si_overflow_neg_check (stmt);
825 else
826 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
829 /* Expand UBSAN_CHECK_MUL call STMT. */
831 static void
832 expand_UBSAN_CHECK_MUL (gimple stmt)
834 ubsan_expand_si_overflow_mul_check (stmt);
837 /* This should get folded in tree-vectorizer.c. */
839 static void
840 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
842 gcc_unreachable ();
845 static void
846 expand_MASK_LOAD (gimple stmt)
848 struct expand_operand ops[3];
849 tree type, lhs, rhs, maskt;
850 rtx mem, target, mask;
852 maskt = gimple_call_arg (stmt, 2);
853 lhs = gimple_call_lhs (stmt);
854 if (lhs == NULL_TREE)
855 return;
856 type = TREE_TYPE (lhs);
857 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
858 gimple_call_arg (stmt, 1));
860 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
861 gcc_assert (MEM_P (mem));
862 mask = expand_normal (maskt);
863 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
864 create_output_operand (&ops[0], target, TYPE_MODE (type));
865 create_fixed_operand (&ops[1], mem);
866 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
867 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
870 static void
871 expand_MASK_STORE (gimple stmt)
873 struct expand_operand ops[3];
874 tree type, lhs, rhs, maskt;
875 rtx mem, reg, mask;
877 maskt = gimple_call_arg (stmt, 2);
878 rhs = gimple_call_arg (stmt, 3);
879 type = TREE_TYPE (rhs);
880 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
881 gimple_call_arg (stmt, 1));
883 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
884 gcc_assert (MEM_P (mem));
885 mask = expand_normal (maskt);
886 reg = expand_normal (rhs);
887 create_fixed_operand (&ops[0], mem);
888 create_input_operand (&ops[1], reg, TYPE_MODE (type));
889 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
890 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
893 static void
894 expand_ABNORMAL_DISPATCHER (gimple)
898 static void
899 expand_BUILTIN_EXPECT (gimple stmt)
901 /* When guessing was done, the hints should be already stripped away. */
902 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
904 rtx target;
905 tree lhs = gimple_call_lhs (stmt);
906 if (lhs)
907 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
908 else
909 target = const0_rtx;
910 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
911 if (lhs && val != target)
912 emit_move_insn (target, val);
915 /* Routines to expand each internal function, indexed by function number.
916 Each routine has the prototype:
918 expand_<NAME> (gimple stmt)
920 where STMT is the statement that performs the call. */
921 static void (*const internal_fn_expanders[]) (gimple) = {
922 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
923 #include "internal-fn.def"
924 #undef DEF_INTERNAL_FN
928 /* Expand STMT, which is a call to internal function FN. */
930 void
931 expand_internal_call (gimple stmt)
933 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);