Fix dot dump bug
[official-gcc.git] / gcc / internal-fn.c
blob68b2b66fbe793ce118944f5a99f26dc6ce2862af
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "internal-fn.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
57 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
58 for load-lanes-style optab OPTAB. The insn must exist. */
60 static enum insn_code
61 get_multi_vector_move (tree array_type, convert_optab optab)
63 enum insn_code icode;
64 enum machine_mode imode;
65 enum machine_mode vmode;
67 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
68 imode = TYPE_MODE (array_type);
69 vmode = TYPE_MODE (TREE_TYPE (array_type));
71 icode = convert_optab_handler (optab, imode, vmode);
72 gcc_assert (icode != CODE_FOR_nothing);
73 return icode;
76 /* Expand LOAD_LANES call STMT. */
78 static void
79 expand_LOAD_LANES (gimple stmt)
81 struct expand_operand ops[2];
82 tree type, lhs, rhs;
83 rtx target, mem;
85 lhs = gimple_call_lhs (stmt);
86 rhs = gimple_call_arg (stmt, 0);
87 type = TREE_TYPE (lhs);
89 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
90 mem = expand_normal (rhs);
92 gcc_assert (MEM_P (mem));
93 PUT_MODE (mem, TYPE_MODE (type));
95 create_output_operand (&ops[0], target, TYPE_MODE (type));
96 create_fixed_operand (&ops[1], mem);
97 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
100 /* Expand STORE_LANES call STMT. */
102 static void
103 expand_STORE_LANES (gimple stmt)
105 struct expand_operand ops[2];
106 tree type, lhs, rhs;
107 rtx target, reg;
109 lhs = gimple_call_lhs (stmt);
110 rhs = gimple_call_arg (stmt, 0);
111 type = TREE_TYPE (rhs);
113 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
114 reg = expand_normal (rhs);
116 gcc_assert (MEM_P (target));
117 PUT_MODE (target, TYPE_MODE (type));
119 create_fixed_operand (&ops[0], target);
120 create_input_operand (&ops[1], reg, TYPE_MODE (type));
121 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
124 static void
125 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
127 gcc_unreachable ();
130 /* This should get expanded in adjust_simduid_builtins. */
132 static void
133 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
135 gcc_unreachable ();
138 /* This should get expanded in adjust_simduid_builtins. */
140 static void
141 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
143 gcc_unreachable ();
146 /* This should get expanded in adjust_simduid_builtins. */
148 static void
149 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
151 gcc_unreachable ();
154 /* This should get expanded in the sanopt pass. */
156 static void
157 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
159 gcc_unreachable ();
162 /* Add sub/add overflow checking to the statement STMT.
163 CODE says whether the operation is +, or -. */
165 void
166 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
168 rtx res, op0, op1;
169 tree lhs, fn, arg0, arg1;
170 rtx done_label, do_error, target = NULL_RTX;
172 lhs = gimple_call_lhs (stmt);
173 arg0 = gimple_call_arg (stmt, 0);
174 arg1 = gimple_call_arg (stmt, 1);
175 done_label = gen_label_rtx ();
176 do_error = gen_label_rtx ();
177 do_pending_stack_adjust ();
178 op0 = expand_normal (arg0);
179 op1 = expand_normal (arg1);
181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
182 if (lhs)
183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
185 enum insn_code icode
186 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
187 if (icode != CODE_FOR_nothing)
189 struct expand_operand ops[4];
190 rtx last = get_last_insn ();
192 res = gen_reg_rtx (mode);
193 create_output_operand (&ops[0], res, mode);
194 create_input_operand (&ops[1], op0, mode);
195 create_input_operand (&ops[2], op1, mode);
196 create_fixed_operand (&ops[3], do_error);
197 if (maybe_expand_insn (icode, 4, ops))
199 last = get_last_insn ();
200 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
201 && JUMP_P (last)
202 && any_condjump_p (last)
203 && !find_reg_note (last, REG_BR_PROB, 0))
204 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
205 emit_jump (done_label);
207 else
209 delete_insns_since (last);
210 icode = CODE_FOR_nothing;
214 if (icode == CODE_FOR_nothing)
216 rtx sub_check = gen_label_rtx ();
217 int pos_neg = 3;
219 /* Compute the operation. On RTL level, the addition is always
220 unsigned. */
221 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
222 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
224 /* If we can prove one of the arguments (for MINUS_EXPR only
225 the second operand, as subtraction is not commutative) is always
226 non-negative or always negative, we can do just one comparison
227 and conditional jump instead of 2 at runtime, 3 present in the
228 emitted code. If one of the arguments is CONST_INT, all we
229 need is to make sure it is op1, then the first
230 emit_cmp_and_jump_insns will be just folded. Otherwise try
231 to use range info if available. */
232 if (code == PLUS_EXPR && CONST_INT_P (op0))
234 rtx tem = op0;
235 op0 = op1;
236 op1 = tem;
238 else if (CONST_INT_P (op1))
240 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
242 wide_int arg0_min, arg0_max;
243 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
245 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
246 pos_neg = 1;
247 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
248 pos_neg = 2;
250 if (pos_neg != 3)
252 rtx tem = op0;
253 op0 = op1;
254 op1 = tem;
257 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
259 wide_int arg1_min, arg1_max;
260 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
262 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
263 pos_neg = 1;
264 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
265 pos_neg = 2;
269 /* If the op1 is negative, we have to use a different check. */
270 if (pos_neg == 3)
271 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
272 false, sub_check, PROB_EVEN);
274 /* Compare the result of the operation with one of the operands. */
275 if (pos_neg & 1)
276 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
277 NULL_RTX, mode, false, done_label,
278 PROB_VERY_LIKELY);
280 /* If we get here, we have to print the error. */
281 if (pos_neg == 3)
283 emit_jump (do_error);
285 emit_label (sub_check);
288 /* We have k = a + b for b < 0 here. k <= a must hold. */
289 if (pos_neg & 2)
290 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
291 NULL_RTX, mode, false, done_label,
292 PROB_VERY_LIKELY);
295 emit_label (do_error);
296 /* Expand the ubsan builtin call. */
297 push_temp_slots ();
298 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
299 TREE_TYPE (arg0), arg0, arg1);
300 expand_normal (fn);
301 pop_temp_slots ();
302 do_pending_stack_adjust ();
304 /* We're done. */
305 emit_label (done_label);
307 if (lhs)
308 emit_move_insn (target, res);
311 /* Add negate overflow checking to the statement STMT. */
313 void
314 ubsan_expand_si_overflow_neg_check (gimple stmt)
316 rtx res, op1;
317 tree lhs, fn, arg1;
318 rtx done_label, do_error, target = NULL_RTX;
320 lhs = gimple_call_lhs (stmt);
321 arg1 = gimple_call_arg (stmt, 1);
322 done_label = gen_label_rtx ();
323 do_error = gen_label_rtx ();
325 do_pending_stack_adjust ();
326 op1 = expand_normal (arg1);
328 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
329 if (lhs)
330 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
332 enum insn_code icode = optab_handler (negv3_optab, mode);
333 if (icode != CODE_FOR_nothing)
335 struct expand_operand ops[3];
336 rtx last = get_last_insn ();
338 res = gen_reg_rtx (mode);
339 create_output_operand (&ops[0], res, mode);
340 create_input_operand (&ops[1], op1, mode);
341 create_fixed_operand (&ops[2], do_error);
342 if (maybe_expand_insn (icode, 3, ops))
344 last = get_last_insn ();
345 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
346 && JUMP_P (last)
347 && any_condjump_p (last)
348 && !find_reg_note (last, REG_BR_PROB, 0))
349 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
350 emit_jump (done_label);
352 else
354 delete_insns_since (last);
355 icode = CODE_FOR_nothing;
359 if (icode == CODE_FOR_nothing)
361 /* Compute the operation. On RTL level, the addition is always
362 unsigned. */
363 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
365 /* Compare the operand with the most negative value. */
366 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
367 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
368 done_label, PROB_VERY_LIKELY);
371 emit_label (do_error);
372 /* Expand the ubsan builtin call. */
373 push_temp_slots ();
374 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
375 TREE_TYPE (arg1), arg1, NULL_TREE);
376 expand_normal (fn);
377 pop_temp_slots ();
378 do_pending_stack_adjust ();
380 /* We're done. */
381 emit_label (done_label);
383 if (lhs)
384 emit_move_insn (target, res);
387 /* Add mul overflow checking to the statement STMT. */
389 void
390 ubsan_expand_si_overflow_mul_check (gimple stmt)
392 rtx res, op0, op1;
393 tree lhs, fn, arg0, arg1;
394 rtx done_label, do_error, target = NULL_RTX;
396 lhs = gimple_call_lhs (stmt);
397 arg0 = gimple_call_arg (stmt, 0);
398 arg1 = gimple_call_arg (stmt, 1);
399 done_label = gen_label_rtx ();
400 do_error = gen_label_rtx ();
402 do_pending_stack_adjust ();
403 op0 = expand_normal (arg0);
404 op1 = expand_normal (arg1);
406 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
407 if (lhs)
408 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
410 enum insn_code icode = optab_handler (mulv4_optab, mode);
411 if (icode != CODE_FOR_nothing)
413 struct expand_operand ops[4];
414 rtx last = get_last_insn ();
416 res = gen_reg_rtx (mode);
417 create_output_operand (&ops[0], res, mode);
418 create_input_operand (&ops[1], op0, mode);
419 create_input_operand (&ops[2], op1, mode);
420 create_fixed_operand (&ops[3], do_error);
421 if (maybe_expand_insn (icode, 4, ops))
423 last = get_last_insn ();
424 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
425 && JUMP_P (last)
426 && any_condjump_p (last)
427 && !find_reg_note (last, REG_BR_PROB, 0))
428 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
429 emit_jump (done_label);
431 else
433 delete_insns_since (last);
434 icode = CODE_FOR_nothing;
438 if (icode == CODE_FOR_nothing)
440 struct separate_ops ops;
441 enum machine_mode hmode
442 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
443 ops.op0 = arg0;
444 ops.op1 = arg1;
445 ops.op2 = NULL_TREE;
446 ops.location = gimple_location (stmt);
447 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
448 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
450 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
451 ops.code = WIDEN_MULT_EXPR;
452 ops.type
453 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
455 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
456 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
457 GET_MODE_PRECISION (mode), NULL_RTX, 0);
458 hipart = gen_lowpart (mode, hipart);
459 res = gen_lowpart (mode, res);
460 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
461 GET_MODE_PRECISION (mode) - 1,
462 NULL_RTX, 0);
463 /* RES is low half of the double width result, HIPART
464 the high half. There was overflow if
465 HIPART is different from RES < 0 ? -1 : 0. */
466 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
467 false, done_label, PROB_VERY_LIKELY);
469 else if (hmode != BLKmode
470 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
472 rtx large_op0 = gen_label_rtx ();
473 rtx small_op0_large_op1 = gen_label_rtx ();
474 rtx one_small_one_large = gen_label_rtx ();
475 rtx both_ops_large = gen_label_rtx ();
476 rtx after_hipart_neg = gen_label_rtx ();
477 rtx after_lopart_neg = gen_label_rtx ();
478 rtx do_overflow = gen_label_rtx ();
479 rtx hipart_different = gen_label_rtx ();
481 unsigned int hprec = GET_MODE_PRECISION (hmode);
482 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
483 NULL_RTX, 0);
484 hipart0 = gen_lowpart (hmode, hipart0);
485 rtx lopart0 = gen_lowpart (hmode, op0);
486 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
487 NULL_RTX, 0);
488 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
489 NULL_RTX, 0);
490 hipart1 = gen_lowpart (hmode, hipart1);
491 rtx lopart1 = gen_lowpart (hmode, op1);
492 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
493 NULL_RTX, 0);
495 res = gen_reg_rtx (mode);
497 /* True if op0 resp. op1 are known to be in the range of
498 halfstype. */
499 bool op0_small_p = false;
500 bool op1_small_p = false;
501 /* True if op0 resp. op1 are known to have all zeros or all ones
502 in the upper half of bits, but are not known to be
503 op{0,1}_small_p. */
504 bool op0_medium_p = false;
505 bool op1_medium_p = false;
506 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
507 nonnegative, 1 if unknown. */
508 int op0_sign = 1;
509 int op1_sign = 1;
511 if (TREE_CODE (arg0) == SSA_NAME)
513 wide_int arg0_min, arg0_max;
514 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
516 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
517 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
518 if (mprec0 <= hprec && mprec1 <= hprec)
519 op0_small_p = true;
520 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
521 op0_medium_p = true;
522 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
523 op0_sign = 0;
524 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
525 op0_sign = -1;
528 if (TREE_CODE (arg1) == SSA_NAME)
530 wide_int arg1_min, arg1_max;
531 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
533 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
534 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
535 if (mprec0 <= hprec && mprec1 <= hprec)
536 op1_small_p = true;
537 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
538 op1_medium_p = true;
539 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
540 op1_sign = 0;
541 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
542 op1_sign = -1;
546 int smaller_sign = 1;
547 int larger_sign = 1;
548 if (op0_small_p)
550 smaller_sign = op0_sign;
551 larger_sign = op1_sign;
553 else if (op1_small_p)
555 smaller_sign = op1_sign;
556 larger_sign = op0_sign;
558 else if (op0_sign == op1_sign)
560 smaller_sign = op0_sign;
561 larger_sign = op0_sign;
564 if (!op0_small_p)
565 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
566 false, large_op0, PROB_UNLIKELY);
568 if (!op1_small_p)
569 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
570 false, small_op0_large_op1,
571 PROB_UNLIKELY);
573 /* If both op0 and op1 are sign extended from hmode to mode,
574 the multiplication will never overflow. We can do just one
575 hmode x hmode => mode widening multiplication. */
576 if (GET_CODE (lopart0) == SUBREG)
578 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
579 SUBREG_PROMOTED_UNSIGNED_SET (lopart0, 0);
581 if (GET_CODE (lopart1) == SUBREG)
583 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
584 SUBREG_PROMOTED_UNSIGNED_SET (lopart1, 0);
586 tree halfstype = build_nonstandard_integer_type (hprec, 0);
587 ops.op0 = make_tree (halfstype, lopart0);
588 ops.op1 = make_tree (halfstype, lopart1);
589 ops.code = WIDEN_MULT_EXPR;
590 ops.type = TREE_TYPE (arg0);
591 rtx thisres
592 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
593 emit_move_insn (res, thisres);
594 emit_jump (done_label);
596 emit_label (small_op0_large_op1);
598 /* If op0 is sign extended from hmode to mode, but op1 is not,
599 just swap the arguments and handle it as op1 sign extended,
600 op0 not. */
601 rtx larger = gen_reg_rtx (mode);
602 rtx hipart = gen_reg_rtx (hmode);
603 rtx lopart = gen_reg_rtx (hmode);
604 emit_move_insn (larger, op1);
605 emit_move_insn (hipart, hipart1);
606 emit_move_insn (lopart, lopart0);
607 emit_jump (one_small_one_large);
609 emit_label (large_op0);
611 if (!op1_small_p)
612 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
613 false, both_ops_large, PROB_UNLIKELY);
615 /* If op1 is sign extended from hmode to mode, but op0 is not,
616 prepare larger, hipart and lopart pseudos and handle it together
617 with small_op0_large_op1. */
618 emit_move_insn (larger, op0);
619 emit_move_insn (hipart, hipart0);
620 emit_move_insn (lopart, lopart1);
622 emit_label (one_small_one_large);
624 /* lopart is the low part of the operand that is sign extended
625 to mode, larger is the the other operand, hipart is the
626 high part of larger and lopart0 and lopart1 are the low parts
627 of both operands.
628 We perform lopart0 * lopart1 and lopart * hipart widening
629 multiplications. */
630 tree halfutype = build_nonstandard_integer_type (hprec, 1);
631 ops.op0 = make_tree (halfutype, lopart0);
632 ops.op1 = make_tree (halfutype, lopart1);
633 rtx lo0xlo1
634 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
636 ops.op0 = make_tree (halfutype, lopart);
637 ops.op1 = make_tree (halfutype, hipart);
638 rtx loxhi = gen_reg_rtx (mode);
639 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
640 emit_move_insn (loxhi, tem);
642 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
643 if (larger_sign == 0)
644 emit_jump (after_hipart_neg);
645 else if (larger_sign != -1)
646 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
647 false, after_hipart_neg, PROB_EVEN);
649 tem = convert_modes (mode, hmode, lopart, 1);
650 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
651 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
652 1, OPTAB_DIRECT);
653 emit_move_insn (loxhi, tem);
655 emit_label (after_hipart_neg);
657 /* if (lopart < 0) loxhi -= larger; */
658 if (smaller_sign == 0)
659 emit_jump (after_lopart_neg);
660 else if (smaller_sign != -1)
661 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
662 false, after_lopart_neg, PROB_EVEN);
664 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
665 1, OPTAB_DIRECT);
666 emit_move_insn (loxhi, tem);
668 emit_label (after_lopart_neg);
670 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
671 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
672 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
673 1, OPTAB_DIRECT);
674 emit_move_insn (loxhi, tem);
676 /* if (loxhi >> (bitsize / 2)
677 == (hmode) loxhi >> (bitsize / 2 - 1)) */
678 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
679 NULL_RTX, 0);
680 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
681 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
682 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
683 hprec - 1, NULL_RTX, 0);
685 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
686 hmode, false, do_overflow,
687 PROB_VERY_UNLIKELY);
689 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
690 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
691 NULL_RTX, 1);
692 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
694 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
695 1, OPTAB_DIRECT);
696 if (tem != res)
697 emit_move_insn (res, tem);
698 emit_jump (done_label);
700 emit_label (both_ops_large);
702 /* If both operands are large (not sign extended from hmode),
703 then perform the full multiplication which will be the result
704 of the operation. The only cases which don't overflow are
705 some cases where both hipart0 and highpart1 are 0 or -1. */
706 ops.code = MULT_EXPR;
707 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
708 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
709 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
710 emit_move_insn (res, tem);
712 if (!op0_medium_p)
714 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
715 NULL_RTX, 1, OPTAB_DIRECT);
716 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
717 true, do_error, PROB_VERY_UNLIKELY);
720 if (!op1_medium_p)
722 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
723 NULL_RTX, 1, OPTAB_DIRECT);
724 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
725 true, do_error, PROB_VERY_UNLIKELY);
728 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
729 same, overflow happened if res is negative, if they are different,
730 overflow happened if res is positive. */
731 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
732 emit_jump (hipart_different);
733 else if (op0_sign == 1 || op1_sign == 1)
734 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
735 true, hipart_different, PROB_EVEN);
737 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
738 do_error, PROB_VERY_UNLIKELY);
739 emit_jump (done_label);
741 emit_label (hipart_different);
743 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
744 do_error, PROB_VERY_UNLIKELY);
745 emit_jump (done_label);
747 emit_label (do_overflow);
749 /* Overflow, do full multiplication and fallthru into do_error. */
750 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
751 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
752 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
753 emit_move_insn (res, tem);
755 else
757 ops.code = MULT_EXPR;
758 ops.type = TREE_TYPE (arg0);
759 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
760 emit_jump (done_label);
764 emit_label (do_error);
765 /* Expand the ubsan builtin call. */
766 push_temp_slots ();
767 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
768 TREE_TYPE (arg0), arg0, arg1);
769 expand_normal (fn);
770 pop_temp_slots ();
771 do_pending_stack_adjust ();
773 /* We're done. */
774 emit_label (done_label);
776 if (lhs)
777 emit_move_insn (target, res);
780 /* Expand UBSAN_CHECK_ADD call STMT. */
782 static void
783 expand_UBSAN_CHECK_ADD (gimple stmt)
785 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
788 /* Expand UBSAN_CHECK_SUB call STMT. */
790 static void
791 expand_UBSAN_CHECK_SUB (gimple stmt)
793 if (integer_zerop (gimple_call_arg (stmt, 0)))
794 ubsan_expand_si_overflow_neg_check (stmt);
795 else
796 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
799 /* Expand UBSAN_CHECK_MUL call STMT. */
801 static void
802 expand_UBSAN_CHECK_MUL (gimple stmt)
804 ubsan_expand_si_overflow_mul_check (stmt);
807 /* This should get folded in tree-vectorizer.c. */
809 static void
810 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
812 gcc_unreachable ();
815 static void
816 expand_MASK_LOAD (gimple stmt)
818 struct expand_operand ops[3];
819 tree type, lhs, rhs, maskt;
820 rtx mem, target, mask;
822 maskt = gimple_call_arg (stmt, 2);
823 lhs = gimple_call_lhs (stmt);
824 if (lhs == NULL_TREE)
825 return;
826 type = TREE_TYPE (lhs);
827 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
828 gimple_call_arg (stmt, 1));
830 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
831 gcc_assert (MEM_P (mem));
832 mask = expand_normal (maskt);
833 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
834 create_output_operand (&ops[0], target, TYPE_MODE (type));
835 create_fixed_operand (&ops[1], mem);
836 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
837 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
840 static void
841 expand_MASK_STORE (gimple stmt)
843 struct expand_operand ops[3];
844 tree type, lhs, rhs, maskt;
845 rtx mem, reg, mask;
847 maskt = gimple_call_arg (stmt, 2);
848 rhs = gimple_call_arg (stmt, 3);
849 type = TREE_TYPE (rhs);
850 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
851 gimple_call_arg (stmt, 1));
853 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
854 gcc_assert (MEM_P (mem));
855 mask = expand_normal (maskt);
856 reg = expand_normal (rhs);
857 create_fixed_operand (&ops[0], mem);
858 create_input_operand (&ops[1], reg, TYPE_MODE (type));
859 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
860 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
863 static void
864 expand_ABNORMAL_DISPATCHER (gimple)
868 static void
869 expand_BUILTIN_EXPECT (gimple stmt)
871 /* When guessing was done, the hints should be already stripped away. */
872 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
874 rtx target;
875 tree lhs = gimple_call_lhs (stmt);
876 if (lhs)
877 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
878 else
879 target = const0_rtx;
880 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
881 if (lhs && val != target)
882 emit_move_insn (target, val);
885 /* Routines to expand each internal function, indexed by function number.
886 Each routine has the prototype:
888 expand_<NAME> (gimple stmt)
890 where STMT is the statement that performs the call. */
891 static void (*const internal_fn_expanders[]) (gimple) = {
892 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
893 #include "internal-fn.def"
894 #undef DEF_INTERNAL_FN
898 /* Expand STMT, which is a call to internal function FN. */
900 void
901 expand_internal_call (gimple stmt)
903 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);